blob: e72d40b4aec743f4db9f5a27a81d25c2d5657010 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/heap/heap.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/register-configuration.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/x64/assembler-x64.h"
15#include "src/x64/macro-assembler-x64.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
17namespace v8 {
18namespace internal {
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
21 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010022 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000023 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010024 has_frame_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010025 root_array_available_(true) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000026 if (create_code_object == CodeObjectRequired::kYes) {
27 code_object_ =
28 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010029 }
Steve Block44f0eee2011-05-26 01:26:41 +010030}
31
32
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033static const int64_t kInvalidRootRegisterDelta = -1;
34
35
36int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
37 if (predictable_code_size() &&
38 (other.address() < reinterpret_cast<Address>(isolate()) ||
39 other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
40 return kInvalidRootRegisterDelta;
41 }
Steve Block44f0eee2011-05-26 01:26:41 +010042 Address roots_register_value = kRootRegisterBias +
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
44
45 int64_t delta = kInvalidRootRegisterDelta; // Bogus initialization.
46 if (kPointerSize == kInt64Size) {
47 delta = other.address() - roots_register_value;
48 } else {
49 // For x32, zero extend the address to 64-bit and calculate the delta.
50 uint64_t o = static_cast<uint32_t>(
51 reinterpret_cast<intptr_t>(other.address()));
52 uint64_t r = static_cast<uint32_t>(
53 reinterpret_cast<intptr_t>(roots_register_value));
54 delta = o - r;
55 }
Steve Block44f0eee2011-05-26 01:26:41 +010056 return delta;
57}
58
59
60Operand MacroAssembler::ExternalOperand(ExternalReference target,
61 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000062 if (root_array_available_ && !serializer_enabled()) {
63 int64_t delta = RootRegisterDelta(target);
64 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Steve Block44f0eee2011-05-26 01:26:41 +010065 return Operand(kRootRegister, static_cast<int32_t>(delta));
66 }
67 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000068 Move(scratch, target);
Steve Block44f0eee2011-05-26 01:26:41 +010069 return Operand(scratch, 0);
70}
71
72
73void MacroAssembler::Load(Register destination, ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 if (root_array_available_ && !serializer_enabled()) {
75 int64_t delta = RootRegisterDelta(source);
76 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
77 movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +010078 return;
79 }
80 }
81 // Safe code.
82 if (destination.is(rax)) {
83 load_rax(source);
84 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 Move(kScratchRegister, source);
86 movp(destination, Operand(kScratchRegister, 0));
Steve Block44f0eee2011-05-26 01:26:41 +010087 }
88}
89
90
91void MacroAssembler::Store(ExternalReference destination, Register source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092 if (root_array_available_ && !serializer_enabled()) {
93 int64_t delta = RootRegisterDelta(destination);
94 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
95 movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
Steve Block44f0eee2011-05-26 01:26:41 +010096 return;
97 }
98 }
99 // Safe code.
100 if (source.is(rax)) {
101 store_rax(destination);
102 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 Move(kScratchRegister, destination);
104 movp(Operand(kScratchRegister, 0), source);
Steve Block44f0eee2011-05-26 01:26:41 +0100105 }
106}
107
108
109void MacroAssembler::LoadAddress(Register destination,
110 ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111 if (root_array_available_ && !serializer_enabled()) {
112 int64_t delta = RootRegisterDelta(source);
113 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
114 leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +0100115 return;
116 }
117 }
118 // Safe code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 Move(destination, source);
Steve Block44f0eee2011-05-26 01:26:41 +0100120}
121
122
123int MacroAssembler::LoadAddressSize(ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 if (root_array_available_ && !serializer_enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100125 // This calculation depends on the internals of LoadAddress.
126 // It's correctness is ensured by the asserts in the Call
127 // instruction below.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 int64_t delta = RootRegisterDelta(source);
129 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
130 // Operand is leap(scratch, Operand(kRootRegister, delta));
Steve Block44f0eee2011-05-26 01:26:41 +0100131 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
132 int size = 4;
133 if (!is_int8(static_cast<int32_t>(delta))) {
134 size += 3; // Need full four-byte displacement in lea.
135 }
136 return size;
137 }
138 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000139 // Size of movp(destination, src);
140 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
141}
142
143
144void MacroAssembler::PushAddress(ExternalReference source) {
145 int64_t address = reinterpret_cast<int64_t>(source.address());
146 if (is_int32(address) && !serializer_enabled()) {
147 if (emit_debug_code()) {
148 Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone());
149 }
150 Push(Immediate(static_cast<int32_t>(address)));
151 return;
152 }
153 LoadAddress(kScratchRegister, source);
154 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000155}
156
157
Steve Block3ce2e202009-11-05 08:53:23 +0000158void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 DCHECK(root_array_available_);
160 movp(destination, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100161 (index << kPointerSizeLog2) - kRootRegisterBias));
162}
163
164
165void MacroAssembler::LoadRootIndexed(Register destination,
166 Register variable_offset,
167 int fixed_offset) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168 DCHECK(root_array_available_);
169 movp(destination,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100170 Operand(kRootRegister,
171 variable_offset, times_pointer_size,
172 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000173}
174
175
Kristian Monsen25f61362010-05-21 11:50:48 +0100176void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 DCHECK(root_array_available_);
179 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100180 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100181}
182
183
Steve Blocka7e24c12009-10-30 11:49:00 +0000184void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 DCHECK(root_array_available_);
186 Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000187}
188
189
Steve Block3ce2e202009-11-05 08:53:23 +0000190void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191 DCHECK(root_array_available_);
192 cmpp(with, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100193 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000194}
195
196
Steve Block1e0659c2011-05-24 12:43:12 +0100197void MacroAssembler::CompareRoot(const Operand& with,
198 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 DCHECK(root_array_available_);
200 DCHECK(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 LoadRoot(kScratchRegister, index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000202 cmpp(with, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000203}
204
205
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100206void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
207 Register addr,
208 Register scratch,
209 SaveFPRegsMode save_fp,
210 RememberedSetFinalAction and_then) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100212 Label ok;
213 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
214 int3();
215 bind(&ok);
Steve Block6ded16b2010-05-10 14:33:55 +0100216 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100217 // Load store buffer top.
218 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
219 // Store pointer to buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000220 movp(Operand(scratch, 0), addr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100221 // Increment buffer top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 addp(scratch, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 // Write back new top of buffer.
224 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
225 // Call stub on end of buffer.
226 Label done;
227 // Check for end of buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000228 testp(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100229 if (and_then == kReturnAtEnd) {
230 Label buffer_overflowed;
231 j(not_equal, &buffer_overflowed, Label::kNear);
232 ret(0);
233 bind(&buffer_overflowed);
234 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000235 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100236 j(equal, &done, Label::kNear);
237 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100239 CallStub(&store_buffer_overflow);
240 if (and_then == kReturnAtEnd) {
241 ret(0);
242 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000243 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100244 bind(&done);
245 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000246}
247
248
Ben Murdoch257744e2011-11-30 15:57:28 +0000249void MacroAssembler::InNewSpace(Register object,
250 Register scratch,
251 Condition cc,
252 Label* branch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100253 Label::Distance distance) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100254 const int mask =
255 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
256 CheckPageFlag(object, scratch, mask, cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000257}
258
259
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100260void MacroAssembler::RecordWriteField(
261 Register object,
262 int offset,
263 Register value,
264 Register dst,
265 SaveFPRegsMode save_fp,
266 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267 SmiCheck smi_check,
268 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100269 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100270 // catch stores of Smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 Label done;
272
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100273 // Skip barrier if writing a smi.
274 if (smi_check == INLINE_SMI_CHECK) {
275 JumpIfSmi(value, &done);
276 }
277
278 // Although the object register is tagged, the offset is relative to the start
279 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100281
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 leap(dst, FieldOperand(object, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100283 if (emit_debug_code()) {
284 Label ok;
285 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
286 j(zero, &ok, Label::kNear);
287 int3();
288 bind(&ok);
289 }
290
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 RecordWrite(object, dst, value, save_fp, remembered_set_action,
292 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100293
Steve Block3ce2e202009-11-05 08:53:23 +0000294 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000295
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100296 // Clobber clobbered input registers when running with the debug-code flag
297 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100298 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000299 Move(value, kZapValue, Assembler::RelocInfoNone());
300 Move(dst, kZapValue, Assembler::RelocInfoNone());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100301 }
302}
303
304
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305void MacroAssembler::RecordWriteArray(
306 Register object,
307 Register value,
308 Register index,
309 SaveFPRegsMode save_fp,
310 RememberedSetAction remembered_set_action,
311 SmiCheck smi_check,
312 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100313 // First, check if a write barrier is even needed. The tests below
314 // catch stores of Smis.
315 Label done;
316
317 // Skip barrier if writing a smi.
318 if (smi_check == INLINE_SMI_CHECK) {
319 JumpIfSmi(value, &done);
320 }
321
322 // Array access: calculate the destination address. Index is not a smi.
323 Register dst = index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000324 leap(dst, Operand(object, index, times_pointer_size,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100325 FixedArray::kHeaderSize - kHeapObjectTag));
326
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327 RecordWrite(object, dst, value, save_fp, remembered_set_action,
328 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100329
330 bind(&done);
331
332 // Clobber clobbered input registers when running with the debug-code flag
333 // turned on to provoke errors.
334 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000335 Move(value, kZapValue, Assembler::RelocInfoNone());
336 Move(index, kZapValue, Assembler::RelocInfoNone());
Leon Clarke4515c472010-02-03 11:58:03 +0000337 }
Steve Block3ce2e202009-11-05 08:53:23 +0000338}
339
340
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000341void MacroAssembler::RecordWriteForMap(Register object,
342 Register map,
343 Register dst,
344 SaveFPRegsMode fp_mode) {
345 DCHECK(!object.is(kScratchRegister));
346 DCHECK(!object.is(map));
347 DCHECK(!object.is(dst));
348 DCHECK(!map.is(dst));
349 AssertNotSmi(object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100350
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100351 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000352 Label ok;
353 if (map.is(kScratchRegister)) pushq(map);
354 CompareMap(map, isolate()->factory()->meta_map());
355 if (map.is(kScratchRegister)) popq(map);
356 j(equal, &ok, Label::kNear);
357 int3();
358 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100359 }
360
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000361 if (!FLAG_incremental_marking) {
362 return;
363 }
364
365 if (emit_debug_code()) {
366 Label ok;
367 if (map.is(kScratchRegister)) pushq(map);
368 cmpp(map, FieldOperand(object, HeapObject::kMapOffset));
369 if (map.is(kScratchRegister)) popq(map);
370 j(equal, &ok, Label::kNear);
371 int3();
372 bind(&ok);
373 }
374
375 // Compute the address.
376 leap(dst, FieldOperand(object, HeapObject::kMapOffset));
377
378 // First, check if a write barrier is even needed. The tests below
379 // catch stores of smis and stores into the young generation.
380 Label done;
381
382 // A single check of the map's pages interesting flag suffices, since it is
383 // only set during incremental collection, and then it's also guaranteed that
384 // the from object's page's interesting flag is also set. This optimization
385 // relies on the fact that maps can never be in new space.
386 CheckPageFlag(map,
387 map, // Used as scratch.
388 MemoryChunk::kPointersToHereAreInterestingMask,
389 zero,
390 &done,
391 Label::kNear);
392
393 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
394 fp_mode);
395 CallStub(&stub);
396
397 bind(&done);
398
399 // Count number of write barriers in generated code.
400 isolate()->counters()->write_barriers_static()->Increment();
401 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
402
403 // Clobber clobbered registers when running with the debug-code flag
404 // turned on to provoke errors.
405 if (emit_debug_code()) {
406 Move(dst, kZapValue, Assembler::RelocInfoNone());
407 Move(map, kZapValue, Assembler::RelocInfoNone());
408 }
409}
410
411
412void MacroAssembler::RecordWrite(
413 Register object,
414 Register address,
415 Register value,
416 SaveFPRegsMode fp_mode,
417 RememberedSetAction remembered_set_action,
418 SmiCheck smi_check,
419 PointersToHereCheck pointers_to_here_check_for_value) {
420 DCHECK(!object.is(value));
421 DCHECK(!object.is(address));
422 DCHECK(!value.is(address));
423 AssertNotSmi(object);
424
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100425 if (remembered_set_action == OMIT_REMEMBERED_SET &&
426 !FLAG_incremental_marking) {
427 return;
428 }
429
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000430 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100431 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 cmpp(value, Operand(address, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100433 j(equal, &ok, Label::kNear);
434 int3();
435 bind(&ok);
436 }
Steve Block8defd9f2010-07-08 12:39:36 +0100437
438 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100439 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100440 Label done;
Steve Block8defd9f2010-07-08 12:39:36 +0100441
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100442 if (smi_check == INLINE_SMI_CHECK) {
443 // Skip barrier if writing a smi.
444 JumpIfSmi(value, &done);
445 }
Steve Block8defd9f2010-07-08 12:39:36 +0100446
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000447 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
448 CheckPageFlag(value,
449 value, // Used as scratch.
450 MemoryChunk::kPointersToHereAreInterestingMask,
451 zero,
452 &done,
453 Label::kNear);
454 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100455
456 CheckPageFlag(object,
457 value, // Used as scratch.
458 MemoryChunk::kPointersFromHereAreInterestingMask,
459 zero,
460 &done,
461 Label::kNear);
462
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
464 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100465 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100466
467 bind(&done);
468
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 // Count number of write barriers in generated code.
470 isolate()->counters()->write_barriers_static()->Increment();
471 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
472
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100473 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100474 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100475 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 Move(address, kZapValue, Assembler::RelocInfoNone());
477 Move(value, kZapValue, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +0100478 }
479}
480
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
482 Register code_entry,
483 Register scratch) {
484 const int offset = JSFunction::kCodeEntryOffset;
485
486 // The input registers are fixed to make calling the C write barrier function
487 // easier.
488 DCHECK(js_function.is(rdi));
489 DCHECK(code_entry.is(rcx));
490 DCHECK(scratch.is(rax));
491
492 // Since a code entry (value) is always in old space, we don't need to update
493 // remembered set. If incremental marking is off, there is nothing for us to
494 // do.
495 if (!FLAG_incremental_marking) return;
496
497 AssertNotSmi(js_function);
498
499 if (emit_debug_code()) {
500 Label ok;
501 leap(scratch, FieldOperand(js_function, offset));
502 cmpp(code_entry, Operand(scratch, 0));
503 j(equal, &ok, Label::kNear);
504 int3();
505 bind(&ok);
506 }
507
508 // First, check if a write barrier is even needed. The tests below
509 // catch stores of Smis and stores into young gen.
510 Label done;
511
512 CheckPageFlag(code_entry, scratch,
513 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
514 Label::kNear);
515 CheckPageFlag(js_function, scratch,
516 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
517 Label::kNear);
518
519 // Save input registers.
520 Push(js_function);
521 Push(code_entry);
522
523 const Register dst = scratch;
524 leap(dst, FieldOperand(js_function, offset));
525
526 // Save caller-saved registers.
527 PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
528
529 int argument_count = 3;
530 PrepareCallCFunction(argument_count);
531
532 // Load the argument registers.
533 if (arg_reg_1.is(rcx)) {
534 // Windows calling convention.
535 DCHECK(arg_reg_2.is(rdx) && arg_reg_3.is(r8));
536
537 movp(arg_reg_1, js_function); // rcx gets rdi.
538 movp(arg_reg_2, dst); // rdx gets rax.
539 } else {
540 // AMD64 calling convention.
541 DCHECK(arg_reg_1.is(rdi) && arg_reg_2.is(rsi) && arg_reg_3.is(rdx));
542
543 // rdi is already loaded with js_function.
544 movp(arg_reg_2, dst); // rsi gets rax.
545 }
546 Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
547
548 {
549 AllowExternalCallThatCantCauseGC scope(this);
550 CallCFunction(
551 ExternalReference::incremental_marking_record_write_code_entry_function(
552 isolate()),
553 argument_count);
554 }
555
556 // Restore caller-saved registers.
557 PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
558
559 // Restore input registers.
560 Pop(code_entry);
561 Pop(js_function);
562
563 bind(&done);
564}
Steve Block8defd9f2010-07-08 12:39:36 +0100565
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000566void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
567 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000568}
569
570
Iain Merrick75681382010-08-19 15:07:18 +0100571void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100572 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000573 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100574 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
575 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000576 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100577 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000578 Heap::kFixedDoubleArrayMapRootIndex);
579 j(equal, &ok, Label::kNear);
580 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100581 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000582 j(equal, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +0100584 bind(&ok);
585 }
586}
587
588
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000590 Label L;
591 j(cc, &L, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 Abort(reason);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593 // Control will not return here.
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 bind(&L);
595}
596
597
Steve Block6ded16b2010-05-10 14:33:55 +0100598void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +0100600 int frame_alignment_mask = frame_alignment - 1;
601 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000603 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000604 testp(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000605 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100606 // Abort if stack is not aligned.
607 int3();
608 bind(&alignment_as_expected);
609 }
610}
611
612
Steve Blocka7e24c12009-10-30 11:49:00 +0000613void MacroAssembler::NegativeZeroTest(Register result,
614 Register op,
615 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000616 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000617 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000618 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000619 testl(op, op);
620 j(sign, then_label);
621 bind(&ok);
622}
623
624
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000625void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000626#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 if (msg != NULL) {
629 RecordComment("Abort message: ");
630 RecordComment(msg);
631 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000632
633 if (FLAG_trap_on_abort) {
634 int3();
635 return;
636 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000637#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638
639 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)),
640 Assembler::RelocInfoNone());
641 Push(kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100642
643 if (!has_frame_) {
644 // We don't actually want to generate a pile of code for this, so just
645 // claim there is a stack frame, without generating one.
646 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100647 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100648 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100649 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100650 }
651 // Control will not return here.
Steve Blockd0582a62009-12-15 09:54:21 +0000652 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000653}
654
655
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000656void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
657 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000658 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000659}
660
661
Leon Clarkee46be812010-01-19 14:06:41 +0000662void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000663 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
664}
665
666
Steve Blocka7e24c12009-10-30 11:49:00 +0000667void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000668 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +0000669 ret((argc - 1) * kPointerSize);
670}
671
672
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100673bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000674 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000675}
676
677
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100678void MacroAssembler::IndexFromHash(Register hash, Register index) {
679 // The assert checks that the constants for the maximum number of digits
680 // for an array index cached in the hash field and the number of bits
681 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100683 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684 if (!hash.is(index)) {
685 movl(index, hash);
686 }
687 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Steve Block1e0659c2011-05-24 12:43:12 +0100688}
689
690
Steve Block44f0eee2011-05-26 01:26:41 +0100691void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 int num_arguments,
693 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000694 // If the expected number of arguments of the runtime function is
695 // constant, we check that the actual number of arguments match the
696 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000698
Leon Clarke4515c472010-02-03 11:58:03 +0000699 // TODO(1236192): Most runtime routines don't need the number of
700 // arguments passed in because it is constant. At some point we
701 // should remove this need and make the runtime routine entry code
702 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100703 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100704 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705 CEntryStub ces(isolate(), f->result_size, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +0000706 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000707}
708
709
Andrei Popescu402d9372010-02-26 13:31:12 +0000710void MacroAssembler::CallExternalReference(const ExternalReference& ext,
711 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100712 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100713 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000714
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000715 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000716 CallStub(&stub);
717}
718
719
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000720void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000722 // -- rsp[0] : return address
723 // -- rsp[8] : argument num_arguments - 1
Steve Blocka7e24c12009-10-30 11:49:00 +0000724 // ...
725 // -- rsp[8 * num_arguments] : argument 0 (receiver)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000726 //
727 // For runtime functions with variable arguments:
728 // -- rax : number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000729 // -----------------------------------
730
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000731 const Runtime::Function* function = Runtime::FunctionForId(fid);
732 DCHECK_EQ(1, function->result_size);
733 if (function->nargs >= 0) {
734 Set(rax, function->nargs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000736 JumpToExternalReference(ExternalReference(fid, isolate()));
Ben Murdochbb769b22010-08-11 14:56:33 +0100737}
738
739
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000741 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100742 LoadAddress(rbx, ext);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000743 CEntryStub ces(isolate(), 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000744 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000745}
746
747
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000748#define REG(Name) \
749 { Register::kCode_##Name }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100750
751static const Register saved_regs[] = {
752 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
753 REG(r9), REG(r10), REG(r11)
754};
755
756#undef REG
757
758static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
759
760
761void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
762 Register exclusion1,
763 Register exclusion2,
764 Register exclusion3) {
765 // We don't allow a GC during a store buffer overflow so there is no need to
766 // store the registers in any particular way, but we do have to store and
767 // restore them.
768 for (int i = 0; i < kNumberOfSavedRegs; i++) {
769 Register reg = saved_regs[i];
770 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000771 pushq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100772 }
773 }
774 // R12 to r15 are callee save on all platforms.
775 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000776 subp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
777 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100778 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000779 Movsd(Operand(rsp, i * kDoubleSize), reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100780 }
781 }
782}
783
784
785void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
786 Register exclusion1,
787 Register exclusion2,
788 Register exclusion3) {
789 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000790 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100791 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000792 Movsd(reg, Operand(rsp, i * kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100793 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000794 addp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100795 }
796 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
797 Register reg = saved_regs[i];
798 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 popq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100800 }
801 }
802}
803
804
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000805void MacroAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
806 if (CpuFeatures::IsSupported(AVX)) {
807 CpuFeatureScope scope(this, AVX);
808 vcvtss2sd(dst, src, src);
809 } else {
810 cvtss2sd(dst, src);
811 }
812}
813
814
815void MacroAssembler::Cvtss2sd(XMMRegister dst, const Operand& src) {
816 if (CpuFeatures::IsSupported(AVX)) {
817 CpuFeatureScope scope(this, AVX);
818 vcvtss2sd(dst, dst, src);
819 } else {
820 cvtss2sd(dst, src);
821 }
822}
823
824
825void MacroAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
826 if (CpuFeatures::IsSupported(AVX)) {
827 CpuFeatureScope scope(this, AVX);
828 vcvtsd2ss(dst, src, src);
829 } else {
830 cvtsd2ss(dst, src);
831 }
832}
833
834
835void MacroAssembler::Cvtsd2ss(XMMRegister dst, const Operand& src) {
836 if (CpuFeatures::IsSupported(AVX)) {
837 CpuFeatureScope scope(this, AVX);
838 vcvtsd2ss(dst, dst, src);
839 } else {
840 cvtsd2ss(dst, src);
841 }
842}
843
844
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000846 if (CpuFeatures::IsSupported(AVX)) {
847 CpuFeatureScope scope(this, AVX);
848 vxorpd(dst, dst, dst);
849 vcvtlsi2sd(dst, dst, src);
850 } else {
851 xorpd(dst, dst);
852 cvtlsi2sd(dst, src);
853 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854}
855
856
857void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000858 if (CpuFeatures::IsSupported(AVX)) {
859 CpuFeatureScope scope(this, AVX);
860 vxorpd(dst, dst, dst);
861 vcvtlsi2sd(dst, dst, src);
862 } else {
863 xorpd(dst, dst);
864 cvtlsi2sd(dst, src);
865 }
866}
867
868
Ben Murdoch097c5b22016-05-18 11:27:45 +0100869void MacroAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
870 if (CpuFeatures::IsSupported(AVX)) {
871 CpuFeatureScope scope(this, AVX);
872 vxorps(dst, dst, dst);
873 vcvtlsi2ss(dst, dst, src);
874 } else {
875 xorps(dst, dst);
876 cvtlsi2ss(dst, src);
877 }
878}
879
880
881void MacroAssembler::Cvtlsi2ss(XMMRegister dst, const Operand& src) {
882 if (CpuFeatures::IsSupported(AVX)) {
883 CpuFeatureScope scope(this, AVX);
884 vxorps(dst, dst, dst);
885 vcvtlsi2ss(dst, dst, src);
886 } else {
887 xorps(dst, dst);
888 cvtlsi2ss(dst, src);
889 }
890}
891
892
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000893void MacroAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
894 if (CpuFeatures::IsSupported(AVX)) {
895 CpuFeatureScope scope(this, AVX);
896 vxorps(dst, dst, dst);
897 vcvtqsi2ss(dst, dst, src);
898 } else {
899 xorps(dst, dst);
900 cvtqsi2ss(dst, src);
901 }
902}
903
904
905void MacroAssembler::Cvtqsi2ss(XMMRegister dst, const Operand& src) {
906 if (CpuFeatures::IsSupported(AVX)) {
907 CpuFeatureScope scope(this, AVX);
908 vxorps(dst, dst, dst);
909 vcvtqsi2ss(dst, dst, src);
910 } else {
911 xorps(dst, dst);
912 cvtqsi2ss(dst, src);
913 }
914}
915
916
917void MacroAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
918 if (CpuFeatures::IsSupported(AVX)) {
919 CpuFeatureScope scope(this, AVX);
920 vxorpd(dst, dst, dst);
921 vcvtqsi2sd(dst, dst, src);
922 } else {
923 xorpd(dst, dst);
924 cvtqsi2sd(dst, src);
925 }
926}
927
928
929void MacroAssembler::Cvtqsi2sd(XMMRegister dst, const Operand& src) {
930 if (CpuFeatures::IsSupported(AVX)) {
931 CpuFeatureScope scope(this, AVX);
932 vxorpd(dst, dst, dst);
933 vcvtqsi2sd(dst, dst, src);
934 } else {
935 xorpd(dst, dst);
936 cvtqsi2sd(dst, src);
937 }
938}
939
940
941void MacroAssembler::Cvtqui2ss(XMMRegister dst, Register src, Register tmp) {
942 Label msb_set_src;
943 Label jmp_return;
944 testq(src, src);
945 j(sign, &msb_set_src, Label::kNear);
946 Cvtqsi2ss(dst, src);
947 jmp(&jmp_return, Label::kNear);
948 bind(&msb_set_src);
949 movq(tmp, src);
950 shrq(src, Immediate(1));
951 // Recover the least significant bit to avoid rounding errors.
952 andq(tmp, Immediate(1));
953 orq(src, tmp);
954 Cvtqsi2ss(dst, src);
955 addss(dst, dst);
956 bind(&jmp_return);
957}
958
959
960void MacroAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) {
961 Label msb_set_src;
962 Label jmp_return;
963 testq(src, src);
964 j(sign, &msb_set_src, Label::kNear);
965 Cvtqsi2sd(dst, src);
966 jmp(&jmp_return, Label::kNear);
967 bind(&msb_set_src);
968 movq(tmp, src);
969 shrq(src, Immediate(1));
970 andq(tmp, Immediate(1));
971 orq(src, tmp);
972 Cvtqsi2sd(dst, src);
973 addsd(dst, dst);
974 bind(&jmp_return);
975}
976
977
978void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) {
979 if (CpuFeatures::IsSupported(AVX)) {
980 CpuFeatureScope scope(this, AVX);
981 vcvtsd2si(dst, src);
982 } else {
983 cvtsd2si(dst, src);
984 }
985}
986
987
Ben Murdoch097c5b22016-05-18 11:27:45 +0100988void MacroAssembler::Cvttss2si(Register dst, XMMRegister src) {
989 if (CpuFeatures::IsSupported(AVX)) {
990 CpuFeatureScope scope(this, AVX);
991 vcvttss2si(dst, src);
992 } else {
993 cvttss2si(dst, src);
994 }
995}
996
997
998void MacroAssembler::Cvttss2si(Register dst, const Operand& src) {
999 if (CpuFeatures::IsSupported(AVX)) {
1000 CpuFeatureScope scope(this, AVX);
1001 vcvttss2si(dst, src);
1002 } else {
1003 cvttss2si(dst, src);
1004 }
1005}
1006
1007
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001008void MacroAssembler::Cvttsd2si(Register dst, XMMRegister src) {
1009 if (CpuFeatures::IsSupported(AVX)) {
1010 CpuFeatureScope scope(this, AVX);
1011 vcvttsd2si(dst, src);
1012 } else {
1013 cvttsd2si(dst, src);
1014 }
1015}
1016
1017
1018void MacroAssembler::Cvttsd2si(Register dst, const Operand& src) {
1019 if (CpuFeatures::IsSupported(AVX)) {
1020 CpuFeatureScope scope(this, AVX);
1021 vcvttsd2si(dst, src);
1022 } else {
1023 cvttsd2si(dst, src);
1024 }
1025}
1026
1027
1028void MacroAssembler::Cvttss2siq(Register dst, XMMRegister src) {
1029 if (CpuFeatures::IsSupported(AVX)) {
1030 CpuFeatureScope scope(this, AVX);
1031 vcvttss2siq(dst, src);
1032 } else {
1033 cvttss2siq(dst, src);
1034 }
1035}
1036
1037
1038void MacroAssembler::Cvttss2siq(Register dst, const Operand& src) {
1039 if (CpuFeatures::IsSupported(AVX)) {
1040 CpuFeatureScope scope(this, AVX);
1041 vcvttss2siq(dst, src);
1042 } else {
1043 cvttss2siq(dst, src);
1044 }
1045}
1046
1047
1048void MacroAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
1049 if (CpuFeatures::IsSupported(AVX)) {
1050 CpuFeatureScope scope(this, AVX);
1051 vcvttsd2siq(dst, src);
1052 } else {
1053 cvttsd2siq(dst, src);
1054 }
1055}
1056
1057
1058void MacroAssembler::Cvttsd2siq(Register dst, const Operand& src) {
1059 if (CpuFeatures::IsSupported(AVX)) {
1060 CpuFeatureScope scope(this, AVX);
1061 vcvttsd2siq(dst, src);
1062 } else {
1063 cvttsd2siq(dst, src);
1064 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001065}
1066
1067
1068void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
1069 DCHECK(!r.IsDouble());
1070 if (r.IsInteger8()) {
1071 movsxbq(dst, src);
1072 } else if (r.IsUInteger8()) {
1073 movzxbl(dst, src);
1074 } else if (r.IsInteger16()) {
1075 movsxwq(dst, src);
1076 } else if (r.IsUInteger16()) {
1077 movzxwl(dst, src);
1078 } else if (r.IsInteger32()) {
1079 movl(dst, src);
1080 } else {
1081 movp(dst, src);
1082 }
1083}
1084
1085
1086void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
1087 DCHECK(!r.IsDouble());
1088 if (r.IsInteger8() || r.IsUInteger8()) {
1089 movb(dst, src);
1090 } else if (r.IsInteger16() || r.IsUInteger16()) {
1091 movw(dst, src);
1092 } else if (r.IsInteger32()) {
1093 movl(dst, src);
1094 } else {
1095 if (r.IsHeapObject()) {
1096 AssertNotSmi(src);
1097 } else if (r.IsSmi()) {
1098 AssertSmi(src);
1099 }
1100 movp(dst, src);
1101 }
1102}
1103
1104
Steve Blocka7e24c12009-10-30 11:49:00 +00001105void MacroAssembler::Set(Register dst, int64_t x) {
1106 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001107 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001108 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +00001109 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001110 } else if (is_int32(x)) {
1111 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001112 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001113 movq(dst, x);
Steve Blocka7e24c12009-10-30 11:49:00 +00001114 }
1115}
1116
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001117
1118void MacroAssembler::Set(const Operand& dst, intptr_t x) {
1119 if (kPointerSize == kInt64Size) {
1120 if (is_int32(x)) {
1121 movp(dst, Immediate(static_cast<int32_t>(x)));
1122 } else {
1123 Set(kScratchRegister, x);
1124 movp(dst, kScratchRegister);
1125 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001126 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127 movp(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001128 }
1129}
1130
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001131
Steve Blocka7e24c12009-10-30 11:49:00 +00001132// ----------------------------------------------------------------------------
1133// Smi tagging, untagging and tag detection.
1134
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135bool MacroAssembler::IsUnsafeInt(const int32_t x) {
1136 static const int kMaxBits = 17;
1137 return !is_intn(x, kMaxBits);
1138}
1139
1140
1141void MacroAssembler::SafeMove(Register dst, Smi* src) {
1142 DCHECK(!dst.is(kScratchRegister));
1143 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1144 if (SmiValuesAre32Bits()) {
1145 // JIT cookie can be converted to Smi.
1146 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1147 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1148 xorp(dst, kScratchRegister);
1149 } else {
1150 DCHECK(SmiValuesAre31Bits());
1151 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1152 movp(dst, Immediate(value ^ jit_cookie()));
1153 xorp(dst, Immediate(jit_cookie()));
1154 }
1155 } else {
1156 Move(dst, src);
1157 }
1158}
1159
1160
1161void MacroAssembler::SafePush(Smi* src) {
1162 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1163 if (SmiValuesAre32Bits()) {
1164 // JIT cookie can be converted to Smi.
1165 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1166 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1167 xorp(Operand(rsp, 0), kScratchRegister);
1168 } else {
1169 DCHECK(SmiValuesAre31Bits());
1170 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1171 Push(Immediate(value ^ jit_cookie()));
1172 xorp(Operand(rsp, 0), Immediate(jit_cookie()));
1173 }
1174 } else {
1175 Push(src);
1176 }
1177}
1178
1179
Steve Block8defd9f2010-07-08 12:39:36 +01001180Register MacroAssembler::GetSmiConstant(Smi* source) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001181 STATIC_ASSERT(kSmiTag == 0);
Steve Block8defd9f2010-07-08 12:39:36 +01001182 int value = source->value();
1183 if (value == 0) {
1184 xorl(kScratchRegister, kScratchRegister);
1185 return kScratchRegister;
1186 }
Steve Block8defd9f2010-07-08 12:39:36 +01001187 LoadSmiConstant(kScratchRegister, source);
1188 return kScratchRegister;
1189}
1190
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191
Steve Block8defd9f2010-07-08 12:39:36 +01001192void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001193 STATIC_ASSERT(kSmiTag == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01001194 int value = source->value();
1195 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001196 xorl(dst, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001197 } else {
1198 Move(dst, source, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +01001199 }
1200}
1201
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001202
Steve Blocka7e24c12009-10-30 11:49:00 +00001203void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001204 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001205 if (!dst.is(src)) {
1206 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001207 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001208 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001209}
1210
1211
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001212void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001213 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001214 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +00001215 Label ok;
1216 j(zero, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001218 bind(&ok);
1219 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001220
1221 if (SmiValuesAre32Bits()) {
1222 DCHECK(kSmiShift % kBitsPerByte == 0);
1223 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1224 } else {
1225 DCHECK(SmiValuesAre31Bits());
1226 Integer32ToSmi(kScratchRegister, src);
1227 movp(dst, kScratchRegister);
1228 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001229}
1230
1231
Steve Block3ce2e202009-11-05 08:53:23 +00001232void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1233 Register src,
1234 int constant) {
1235 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001236 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001237 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001238 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001239 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001240 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001241}
1242
1243
1244void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001245 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001246 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001247 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001249
1250 if (SmiValuesAre32Bits()) {
1251 shrp(dst, Immediate(kSmiShift));
1252 } else {
1253 DCHECK(SmiValuesAre31Bits());
1254 sarl(dst, Immediate(kSmiShift));
1255 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001256}
1257
1258
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001259void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001260 if (SmiValuesAre32Bits()) {
1261 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1262 } else {
1263 DCHECK(SmiValuesAre31Bits());
1264 movl(dst, src);
1265 sarl(dst, Immediate(kSmiShift));
1266 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001267}
1268
1269
Steve Blocka7e24c12009-10-30 11:49:00 +00001270void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001271 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001272 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001273 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001274 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001275 sarp(dst, Immediate(kSmiShift));
1276 if (kPointerSize == kInt32Size) {
1277 // Sign extend to 64-bit.
1278 movsxlq(dst, dst);
1279 }
Steve Block3ce2e202009-11-05 08:53:23 +00001280}
1281
1282
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001283void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001284 if (SmiValuesAre32Bits()) {
1285 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1286 } else {
1287 DCHECK(SmiValuesAre31Bits());
1288 movp(dst, src);
1289 SmiToInteger64(dst, dst);
1290 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001291}
1292
1293
Steve Block3ce2e202009-11-05 08:53:23 +00001294void MacroAssembler::SmiTest(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001295 AssertSmi(src);
1296 testp(src, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001297}
1298
1299
Steve Block44f0eee2011-05-26 01:26:41 +01001300void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001301 AssertSmi(smi1);
1302 AssertSmi(smi2);
1303 cmpp(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001304}
1305
1306
1307void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001308 AssertSmi(dst);
Steve Block44f0eee2011-05-26 01:26:41 +01001309 Cmp(dst, src);
1310}
1311
1312
1313void MacroAssembler::Cmp(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314 DCHECK(!dst.is(kScratchRegister));
Steve Block3ce2e202009-11-05 08:53:23 +00001315 if (src->value() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001316 testp(dst, dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001317 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001318 Register constant_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001319 cmpp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001320 }
1321}
1322
1323
Leon Clarkef7060e22010-06-03 12:02:55 +01001324void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001325 AssertSmi(dst);
1326 AssertSmi(src);
1327 cmpp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001328}
1329
1330
Steve Block3ce2e202009-11-05 08:53:23 +00001331void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001332 AssertSmi(dst);
1333 AssertSmi(src);
1334 cmpp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001335}
1336
1337
1338void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001339 AssertSmi(dst);
1340 if (SmiValuesAre32Bits()) {
1341 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1342 } else {
1343 DCHECK(SmiValuesAre31Bits());
1344 cmpl(dst, Immediate(src));
Steve Block44f0eee2011-05-26 01:26:41 +01001345 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001346}
1347
1348
Steve Block44f0eee2011-05-26 01:26:41 +01001349void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1350 // The Operand cannot use the smi register.
1351 Register smi_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352 DCHECK(!dst.AddressUsesRegister(smi_reg));
1353 cmpp(dst, smi_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001354}
1355
1356
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001357void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001358 if (SmiValuesAre32Bits()) {
1359 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1360 } else {
1361 DCHECK(SmiValuesAre31Bits());
1362 SmiToInteger32(kScratchRegister, dst);
1363 cmpl(kScratchRegister, src);
1364 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001365}
1366
1367
Steve Blocka7e24c12009-10-30 11:49:00 +00001368void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1369 Register src,
1370 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001371 DCHECK(power >= 0);
1372 DCHECK(power < 64);
Steve Blocka7e24c12009-10-30 11:49:00 +00001373 if (power == 0) {
1374 SmiToInteger64(dst, src);
1375 return;
1376 }
Steve Block3ce2e202009-11-05 08:53:23 +00001377 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001379 }
1380 if (power < kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001381 sarp(dst, Immediate(kSmiShift - power));
Steve Block3ce2e202009-11-05 08:53:23 +00001382 } else if (power > kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001383 shlp(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001384 }
1385}
1386
1387
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001388void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1389 Register src,
1390 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 DCHECK((0 <= power) && (power < 32));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001392 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001393 shrp(dst, Immediate(power + kSmiShift));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001394 } else {
1395 UNIMPLEMENTED(); // Not used.
1396 }
1397}
1398
1399
Ben Murdoch257744e2011-11-30 15:57:28 +00001400void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1401 Label* on_not_smis,
1402 Label::Distance near_jump) {
1403 if (dst.is(src1) || dst.is(src2)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404 DCHECK(!src1.is(kScratchRegister));
1405 DCHECK(!src2.is(kScratchRegister));
1406 movp(kScratchRegister, src1);
1407 orp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001408 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001409 movp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001410 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001411 movp(dst, src1);
1412 orp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001413 JumpIfNotSmi(dst, on_not_smis, near_jump);
1414 }
1415}
1416
1417
Steve Blocka7e24c12009-10-30 11:49:00 +00001418Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001419 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001420 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001421 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001422}
1423
1424
Steve Block1e0659c2011-05-24 12:43:12 +01001425Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001426 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001427 testb(src, Immediate(kSmiTagMask));
1428 return zero;
1429}
1430
1431
Ben Murdochf87a2032010-10-22 12:50:53 +01001432Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001433 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001434 // Test that both bits of the mask 0x8000000000000001 are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001435 movp(kScratchRegister, src);
1436 rolp(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001437 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001438 return zero;
1439}
1440
1441
Steve Blocka7e24c12009-10-30 11:49:00 +00001442Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1443 if (first.is(second)) {
1444 return CheckSmi(first);
1445 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001446 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001447 if (SmiValuesAre32Bits()) {
1448 leal(kScratchRegister, Operand(first, second, times_1, 0));
1449 testb(kScratchRegister, Immediate(0x03));
1450 } else {
1451 DCHECK(SmiValuesAre31Bits());
1452 movl(kScratchRegister, first);
1453 orl(kScratchRegister, second);
1454 testb(kScratchRegister, Immediate(kSmiTagMask));
1455 }
Steve Block3ce2e202009-11-05 08:53:23 +00001456 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001457}
1458
1459
Ben Murdochf87a2032010-10-22 12:50:53 +01001460Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1461 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001462 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001463 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001464 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001465 movp(kScratchRegister, first);
1466 orp(kScratchRegister, second);
1467 rolp(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001468 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001469 return zero;
1470}
1471
1472
Ben Murdochbb769b22010-08-11 14:56:33 +01001473Condition MacroAssembler::CheckEitherSmi(Register first,
1474 Register second,
1475 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001476 if (first.is(second)) {
1477 return CheckSmi(first);
1478 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001479 if (scratch.is(second)) {
1480 andl(scratch, first);
1481 } else {
1482 if (!scratch.is(first)) {
1483 movl(scratch, first);
1484 }
1485 andl(scratch, second);
1486 }
1487 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001488 return zero;
1489}
1490
1491
Steve Blocka7e24c12009-10-30 11:49:00 +00001492Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001493 if (SmiValuesAre32Bits()) {
1494 // A 32-bit integer value can always be converted to a smi.
1495 return always;
1496 } else {
1497 DCHECK(SmiValuesAre31Bits());
1498 cmpl(src, Immediate(0xc0000000));
1499 return positive;
1500 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001501}
1502
1503
Steve Block3ce2e202009-11-05 08:53:23 +00001504Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001505 if (SmiValuesAre32Bits()) {
1506 // An unsigned 32-bit integer value is valid as long as the high bit
1507 // is not set.
1508 testl(src, src);
1509 return positive;
1510 } else {
1511 DCHECK(SmiValuesAre31Bits());
1512 testl(src, Immediate(0xc0000000));
1513 return zero;
1514 }
Steve Block3ce2e202009-11-05 08:53:23 +00001515}
1516
1517
Steve Block1e0659c2011-05-24 12:43:12 +01001518void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1519 if (dst.is(src)) {
1520 andl(dst, Immediate(kSmiTagMask));
1521 } else {
1522 movl(dst, Immediate(kSmiTagMask));
1523 andl(dst, src);
1524 }
1525}
1526
1527
1528void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1529 if (!(src.AddressUsesRegister(dst))) {
1530 movl(dst, Immediate(kSmiTagMask));
1531 andl(dst, src);
1532 } else {
1533 movl(dst, src);
1534 andl(dst, Immediate(kSmiTagMask));
1535 }
1536}
1537
1538
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539void MacroAssembler::JumpIfValidSmiValue(Register src,
1540 Label* on_valid,
1541 Label::Distance near_jump) {
1542 Condition is_valid = CheckInteger32ValidSmiValue(src);
1543 j(is_valid, on_valid, near_jump);
1544}
1545
1546
Ben Murdoch257744e2011-11-30 15:57:28 +00001547void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1548 Label* on_invalid,
1549 Label::Distance near_jump) {
1550 Condition is_valid = CheckInteger32ValidSmiValue(src);
1551 j(NegateCondition(is_valid), on_invalid, near_jump);
1552}
1553
1554
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001555void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1556 Label* on_valid,
1557 Label::Distance near_jump) {
1558 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1559 j(is_valid, on_valid, near_jump);
1560}
1561
1562
Ben Murdoch257744e2011-11-30 15:57:28 +00001563void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1564 Label* on_invalid,
1565 Label::Distance near_jump) {
1566 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1567 j(NegateCondition(is_valid), on_invalid, near_jump);
1568}
1569
1570
1571void MacroAssembler::JumpIfSmi(Register src,
1572 Label* on_smi,
1573 Label::Distance near_jump) {
1574 Condition smi = CheckSmi(src);
1575 j(smi, on_smi, near_jump);
1576}
1577
1578
1579void MacroAssembler::JumpIfNotSmi(Register src,
1580 Label* on_not_smi,
1581 Label::Distance near_jump) {
1582 Condition smi = CheckSmi(src);
1583 j(NegateCondition(smi), on_not_smi, near_jump);
1584}
1585
1586
1587void MacroAssembler::JumpUnlessNonNegativeSmi(
1588 Register src, Label* on_not_smi_or_negative,
1589 Label::Distance near_jump) {
1590 Condition non_negative_smi = CheckNonNegativeSmi(src);
1591 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1592}
1593
1594
1595void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1596 Smi* constant,
1597 Label* on_equals,
1598 Label::Distance near_jump) {
1599 SmiCompare(src, constant);
1600 j(equal, on_equals, near_jump);
1601}
1602
1603
1604void MacroAssembler::JumpIfNotBothSmi(Register src1,
1605 Register src2,
1606 Label* on_not_both_smi,
1607 Label::Distance near_jump) {
1608 Condition both_smi = CheckBothSmi(src1, src2);
1609 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1610}
1611
1612
1613void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1614 Register src2,
1615 Label* on_not_both_smi,
1616 Label::Distance near_jump) {
1617 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1618 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1619}
1620
1621
Steve Block3ce2e202009-11-05 08:53:23 +00001622void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1623 if (constant->value() == 0) {
1624 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001625 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001626 }
Steve Block8defd9f2010-07-08 12:39:36 +01001627 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001628 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001629 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630 Register constant_reg = GetSmiConstant(constant);
1631 addp(dst, constant_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001632 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001633 LoadSmiConstant(dst, constant);
1634 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001635 }
1636}
1637
1638
Leon Clarkef7060e22010-06-03 12:02:55 +01001639void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1640 if (constant->value() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001641 if (SmiValuesAre32Bits()) {
1642 addl(Operand(dst, kSmiShift / kBitsPerByte),
1643 Immediate(constant->value()));
1644 } else {
1645 DCHECK(SmiValuesAre31Bits());
1646 addp(dst, Immediate(constant));
1647 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001648 }
1649}
1650
1651
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001652void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant,
1653 SmiOperationConstraints constraints,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001654 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001655 Label::Distance near_jump) {
1656 if (constant->value() == 0) {
1657 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001658 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001659 }
1660 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001661 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001662 LoadSmiConstant(kScratchRegister, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001663 addp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001664 if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665 j(no_overflow, bailout_label, near_jump);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001666 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001667 subp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001668 } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
1669 if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670 Label done;
1671 j(no_overflow, &done, Label::kNear);
1672 subp(dst, kScratchRegister);
1673 jmp(bailout_label, near_jump);
1674 bind(&done);
1675 } else {
1676 // Bailout if overflow without reserving src.
1677 j(overflow, bailout_label, near_jump);
1678 }
1679 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001680 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001681 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001682 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001683 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
1684 DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001685 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 addp(dst, src);
1687 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001688 }
1689}
1690
1691
Steve Block3ce2e202009-11-05 08:53:23 +00001692void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1693 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001694 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001695 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001696 }
Steve Block3ce2e202009-11-05 08:53:23 +00001697 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001699 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001700 subp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001701 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001702 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001703 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001704 // Adding and subtracting the min-value gives the same result, it only
1705 // differs on the overflow bit, which we don't check here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001706 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001707 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001708 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001709 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001710 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001711 }
1712 }
1713}
1714
1715
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant,
1717 SmiOperationConstraints constraints,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001719 Label::Distance near_jump) {
1720 if (constant->value() == 0) {
1721 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001723 }
1724 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001725 DCHECK(!dst.is(kScratchRegister));
1726 LoadSmiConstant(kScratchRegister, constant);
1727 subp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001728 if (constraints & SmiOperationConstraint::kBailoutOnNoOverflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001729 j(no_overflow, bailout_label, near_jump);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001730 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001731 addp(dst, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 } else if (constraints & SmiOperationConstraint::kBailoutOnOverflow) {
1733 if (constraints & SmiOperationConstraint::kPreserveSourceRegister) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001734 Label done;
1735 j(no_overflow, &done, Label::kNear);
1736 addp(dst, kScratchRegister);
1737 jmp(bailout_label, near_jump);
1738 bind(&done);
1739 } else {
1740 // Bailout if overflow without reserving src.
1741 j(overflow, bailout_label, near_jump);
1742 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001743 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001744 UNREACHABLE();
Ben Murdoch257744e2011-11-30 15:57:28 +00001745 }
1746 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001747 DCHECK(constraints & SmiOperationConstraint::kPreserveSourceRegister);
1748 DCHECK(constraints & SmiOperationConstraint::kBailoutOnOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001749 if (constant->value() == Smi::kMinValue) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001750 DCHECK(!dst.is(kScratchRegister));
1751 movp(dst, src);
1752 LoadSmiConstant(kScratchRegister, constant);
1753 subp(dst, kScratchRegister);
1754 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001755 } else {
1756 // Subtract by adding the negation.
1757 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001758 addp(dst, src);
1759 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001760 }
1761 }
1762}
1763
1764
1765void MacroAssembler::SmiNeg(Register dst,
1766 Register src,
1767 Label* on_smi_result,
1768 Label::Distance near_jump) {
1769 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001770 DCHECK(!dst.is(kScratchRegister));
1771 movp(kScratchRegister, src);
1772 negp(dst); // Low 32 bits are retained as zero by negation.
Ben Murdoch257744e2011-11-30 15:57:28 +00001773 // Test if result is zero or Smi::kMinValue.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001774 cmpp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001775 j(not_equal, on_smi_result, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776 movp(src, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001777 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001778 movp(dst, src);
1779 negp(dst);
1780 cmpp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001781 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1782 j(not_equal, on_smi_result, near_jump);
1783 }
1784}
1785
1786
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001787template<class T>
1788static void SmiAddHelper(MacroAssembler* masm,
1789 Register dst,
1790 Register src1,
1791 T src2,
1792 Label* on_not_smi_result,
1793 Label::Distance near_jump) {
1794 if (dst.is(src1)) {
1795 Label done;
1796 masm->addp(dst, src2);
1797 masm->j(no_overflow, &done, Label::kNear);
1798 // Restore src1.
1799 masm->subp(dst, src2);
1800 masm->jmp(on_not_smi_result, near_jump);
1801 masm->bind(&done);
1802 } else {
1803 masm->movp(dst, src1);
1804 masm->addp(dst, src2);
1805 masm->j(overflow, on_not_smi_result, near_jump);
1806 }
1807}
1808
1809
Ben Murdoch257744e2011-11-30 15:57:28 +00001810void MacroAssembler::SmiAdd(Register dst,
1811 Register src1,
1812 Register src2,
1813 Label* on_not_smi_result,
1814 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001815 DCHECK_NOT_NULL(on_not_smi_result);
1816 DCHECK(!dst.is(src2));
1817 SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001818}
1819
1820
1821void MacroAssembler::SmiAdd(Register dst,
1822 Register src1,
1823 const Operand& src2,
1824 Label* on_not_smi_result,
1825 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001826 DCHECK_NOT_NULL(on_not_smi_result);
1827 DCHECK(!src2.AddressUsesRegister(dst));
1828 SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001829}
1830
1831
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001832void MacroAssembler::SmiAdd(Register dst,
1833 Register src1,
1834 Register src2) {
1835 // No overflow checking. Use only when it's known that
1836 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001837 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001838 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001839 movp(kScratchRegister, src1);
1840 addp(kScratchRegister, src2);
1841 Check(no_overflow, kSmiAdditionOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001842 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001843 leap(dst, Operand(src1, src2, times_1, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001844 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001845 addp(dst, src2);
1846 Assert(no_overflow, kSmiAdditionOverflow);
1847 }
1848}
1849
1850
1851template<class T>
1852static void SmiSubHelper(MacroAssembler* masm,
1853 Register dst,
1854 Register src1,
1855 T src2,
1856 Label* on_not_smi_result,
1857 Label::Distance near_jump) {
1858 if (dst.is(src1)) {
1859 Label done;
1860 masm->subp(dst, src2);
1861 masm->j(no_overflow, &done, Label::kNear);
1862 // Restore src1.
1863 masm->addp(dst, src2);
1864 masm->jmp(on_not_smi_result, near_jump);
1865 masm->bind(&done);
1866 } else {
1867 masm->movp(dst, src1);
1868 masm->subp(dst, src2);
1869 masm->j(overflow, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001870 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001871}
1872
1873
1874void MacroAssembler::SmiSub(Register dst,
1875 Register src1,
1876 Register src2,
1877 Label* on_not_smi_result,
1878 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001879 DCHECK_NOT_NULL(on_not_smi_result);
1880 DCHECK(!dst.is(src2));
1881 SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001882}
1883
1884
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001885void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001887 const Operand& src2,
1888 Label* on_not_smi_result,
1889 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890 DCHECK_NOT_NULL(on_not_smi_result);
1891 DCHECK(!src2.AddressUsesRegister(dst));
1892 SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1893}
1894
1895
1896template<class T>
1897static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1898 Register dst,
1899 Register src1,
1900 T src2) {
1901 // No overflow checking. Use only when it's known that
1902 // overflowing is impossible (e.g., subtracting two positive smis).
1903 if (!dst.is(src1)) {
1904 masm->movp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001905 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906 masm->subp(dst, src2);
1907 masm->Assert(no_overflow, kSmiSubtractionOverflow);
1908}
1909
1910
1911void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1912 DCHECK(!dst.is(src2));
1913 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001914}
1915
1916
1917void MacroAssembler::SmiSub(Register dst,
1918 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001919 const Operand& src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001920 SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001921}
1922
1923
Ben Murdoch257744e2011-11-30 15:57:28 +00001924void MacroAssembler::SmiMul(Register dst,
1925 Register src1,
1926 Register src2,
1927 Label* on_not_smi_result,
1928 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001929 DCHECK(!dst.is(src2));
1930 DCHECK(!dst.is(kScratchRegister));
1931 DCHECK(!src1.is(kScratchRegister));
1932 DCHECK(!src2.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001933
1934 if (dst.is(src1)) {
1935 Label failure, zero_correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001936 movp(kScratchRegister, src1); // Create backup for later testing.
Ben Murdoch257744e2011-11-30 15:57:28 +00001937 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001938 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001939 j(overflow, &failure, Label::kNear);
1940
1941 // Check for negative zero result. If product is zero, and one
1942 // argument is negative, go to slow case.
1943 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001944 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001945 j(not_zero, &correct_result, Label::kNear);
1946
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001947 movp(dst, kScratchRegister);
1948 xorp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001949 // Result was positive zero.
1950 j(positive, &zero_correct_result, Label::kNear);
1951
1952 bind(&failure); // Reused failure exit, restores src1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001953 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001954 jmp(on_not_smi_result, near_jump);
1955
1956 bind(&zero_correct_result);
1957 Set(dst, 0);
1958
1959 bind(&correct_result);
1960 } else {
1961 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001963 j(overflow, on_not_smi_result, near_jump);
1964 // Check for negative zero result. If product is zero, and one
1965 // argument is negative, go to slow case.
1966 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001967 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001968 j(not_zero, &correct_result, Label::kNear);
1969 // One of src1 and src2 is zero, the check whether the other is
1970 // negative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001971 movp(kScratchRegister, src1);
1972 xorp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001973 j(negative, on_not_smi_result, near_jump);
1974 bind(&correct_result);
1975 }
1976}
1977
1978
1979void MacroAssembler::SmiDiv(Register dst,
1980 Register src1,
1981 Register src2,
1982 Label* on_not_smi_result,
1983 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001984 DCHECK(!src1.is(kScratchRegister));
1985 DCHECK(!src2.is(kScratchRegister));
1986 DCHECK(!dst.is(kScratchRegister));
1987 DCHECK(!src2.is(rax));
1988 DCHECK(!src2.is(rdx));
1989 DCHECK(!src1.is(rdx));
Ben Murdoch257744e2011-11-30 15:57:28 +00001990
1991 // Check for 0 divisor (result is +/-Infinity).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001992 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001993 j(zero, on_not_smi_result, near_jump);
1994
1995 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001996 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001997 }
1998 SmiToInteger32(rax, src1);
1999 // We need to rule out dividing Smi::kMinValue by -1, since that would
2000 // overflow in idiv and raise an exception.
2001 // We combine this with negative zero test (negative zero only happens
2002 // when dividing zero by a negative number).
2003
2004 // We overshoot a little and go to slow case if we divide min-value
2005 // by any negative value, not just -1.
2006 Label safe_div;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002007 testl(rax, Immediate(~Smi::kMinValue));
Ben Murdoch257744e2011-11-30 15:57:28 +00002008 j(not_zero, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002009 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002010 if (src1.is(rax)) {
2011 j(positive, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002012 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002013 jmp(on_not_smi_result, near_jump);
2014 } else {
2015 j(negative, on_not_smi_result, near_jump);
2016 }
2017 bind(&safe_div);
2018
2019 SmiToInteger32(src2, src2);
2020 // Sign extend src1 into edx:eax.
2021 cdq();
2022 idivl(src2);
2023 Integer32ToSmi(src2, src2);
2024 // Check that the remainder is zero.
2025 testl(rdx, rdx);
2026 if (src1.is(rax)) {
2027 Label smi_result;
2028 j(zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002029 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002030 jmp(on_not_smi_result, near_jump);
2031 bind(&smi_result);
2032 } else {
2033 j(not_zero, on_not_smi_result, near_jump);
2034 }
2035 if (!dst.is(src1) && src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002037 }
2038 Integer32ToSmi(dst, rax);
2039}
2040
2041
2042void MacroAssembler::SmiMod(Register dst,
2043 Register src1,
2044 Register src2,
2045 Label* on_not_smi_result,
2046 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002047 DCHECK(!dst.is(kScratchRegister));
2048 DCHECK(!src1.is(kScratchRegister));
2049 DCHECK(!src2.is(kScratchRegister));
2050 DCHECK(!src2.is(rax));
2051 DCHECK(!src2.is(rdx));
2052 DCHECK(!src1.is(rdx));
2053 DCHECK(!src1.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002054
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002055 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002056 j(zero, on_not_smi_result, near_jump);
2057
2058 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002059 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002060 }
2061 SmiToInteger32(rax, src1);
2062 SmiToInteger32(src2, src2);
2063
2064 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
2065 Label safe_div;
2066 cmpl(rax, Immediate(Smi::kMinValue));
2067 j(not_equal, &safe_div, Label::kNear);
2068 cmpl(src2, Immediate(-1));
2069 j(not_equal, &safe_div, Label::kNear);
2070 // Retag inputs and go slow case.
2071 Integer32ToSmi(src2, src2);
2072 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002073 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002074 }
2075 jmp(on_not_smi_result, near_jump);
2076 bind(&safe_div);
2077
2078 // Sign extend eax into edx:eax.
2079 cdq();
2080 idivl(src2);
2081 // Restore smi tags on inputs.
2082 Integer32ToSmi(src2, src2);
2083 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002084 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002085 }
2086 // Check for a negative zero result. If the result is zero, and the
2087 // dividend is negative, go slow to return a floating point negative zero.
2088 Label smi_result;
2089 testl(rdx, rdx);
2090 j(not_zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002091 testp(src1, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002092 j(negative, on_not_smi_result, near_jump);
2093 bind(&smi_result);
2094 Integer32ToSmi(dst, rdx);
2095}
2096
2097
Steve Blocka7e24c12009-10-30 11:49:00 +00002098void MacroAssembler::SmiNot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002099 DCHECK(!dst.is(kScratchRegister));
2100 DCHECK(!src.is(kScratchRegister));
2101 if (SmiValuesAre32Bits()) {
2102 // Set tag and padding bits before negating, so that they are zero
2103 // afterwards.
2104 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002105 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002106 DCHECK(SmiValuesAre31Bits());
2107 movl(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002108 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002109 if (dst.is(src)) {
2110 xorp(dst, kScratchRegister);
2111 } else {
2112 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2113 }
2114 notp(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002115}
2116
2117
2118void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002119 DCHECK(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002120 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002121 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002122 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002123 andp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002124}
2125
2126
Steve Block3ce2e202009-11-05 08:53:23 +00002127void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2128 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01002129 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00002130 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002132 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002133 andp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002134 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002135 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002136 andp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002137 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002138}
2139
2140
2141void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2142 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002143 DCHECK(!src1.is(src2));
2144 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002145 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146 orp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002147}
2148
2149
Steve Block3ce2e202009-11-05 08:53:23 +00002150void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2151 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002152 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002153 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002154 orp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002155 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002156 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002157 orp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002158 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002159}
2160
Steve Block3ce2e202009-11-05 08:53:23 +00002161
Steve Blocka7e24c12009-10-30 11:49:00 +00002162void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2163 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164 DCHECK(!src1.is(src2));
2165 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002166 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002167 xorp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002168}
2169
2170
Steve Block3ce2e202009-11-05 08:53:23 +00002171void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2172 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002173 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002174 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002175 xorp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002176 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002177 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002178 xorp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002179 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002180}
2181
2182
Steve Blocka7e24c12009-10-30 11:49:00 +00002183void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2184 Register src,
2185 int shift_value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002186 DCHECK(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002187 if (shift_value > 0) {
2188 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002189 sarp(dst, Immediate(shift_value + kSmiShift));
2190 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002191 } else {
2192 UNIMPLEMENTED(); // Not used.
2193 }
2194 }
2195}
2196
2197
Steve Blocka7e24c12009-10-30 11:49:00 +00002198void MacroAssembler::SmiShiftLeftConstant(Register dst,
2199 Register src,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002200 int shift_value,
2201 Label* on_not_smi_result,
2202 Label::Distance near_jump) {
2203 if (SmiValuesAre32Bits()) {
2204 if (!dst.is(src)) {
2205 movp(dst, src);
2206 }
2207 if (shift_value > 0) {
2208 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2209 shlq(dst, Immediate(shift_value & 0x1f));
2210 }
2211 } else {
2212 DCHECK(SmiValuesAre31Bits());
2213 if (dst.is(src)) {
2214 UNIMPLEMENTED(); // Not used.
2215 } else {
2216 SmiToInteger32(dst, src);
2217 shll(dst, Immediate(shift_value));
2218 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2219 Integer32ToSmi(dst, dst);
2220 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002221 }
2222}
2223
2224
Ben Murdoch257744e2011-11-30 15:57:28 +00002225void MacroAssembler::SmiShiftLogicalRightConstant(
2226 Register dst, Register src, int shift_value,
2227 Label* on_not_smi_result, Label::Distance near_jump) {
2228 // Logic right shift interprets its result as an *unsigned* number.
2229 if (dst.is(src)) {
2230 UNIMPLEMENTED(); // Not used.
2231 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002232 if (shift_value == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002233 testp(src, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00002234 j(negative, on_not_smi_result, near_jump);
2235 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002236 if (SmiValuesAre32Bits()) {
2237 movp(dst, src);
2238 shrp(dst, Immediate(shift_value + kSmiShift));
2239 shlp(dst, Immediate(kSmiShift));
2240 } else {
2241 DCHECK(SmiValuesAre31Bits());
2242 SmiToInteger32(dst, src);
2243 shrp(dst, Immediate(shift_value));
2244 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2245 Integer32ToSmi(dst, dst);
2246 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002247 }
2248}
2249
2250
Steve Blocka7e24c12009-10-30 11:49:00 +00002251void MacroAssembler::SmiShiftLeft(Register dst,
2252 Register src1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002253 Register src2,
2254 Label* on_not_smi_result,
2255 Label::Distance near_jump) {
2256 if (SmiValuesAre32Bits()) {
2257 DCHECK(!dst.is(rcx));
2258 if (!dst.is(src1)) {
2259 movp(dst, src1);
2260 }
2261 // Untag shift amount.
2262 SmiToInteger32(rcx, src2);
2263 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2264 andp(rcx, Immediate(0x1f));
2265 shlq_cl(dst);
2266 } else {
2267 DCHECK(SmiValuesAre31Bits());
2268 DCHECK(!dst.is(kScratchRegister));
2269 DCHECK(!src1.is(kScratchRegister));
2270 DCHECK(!src2.is(kScratchRegister));
2271 DCHECK(!dst.is(src2));
2272 DCHECK(!dst.is(rcx));
2273
2274 if (src1.is(rcx) || src2.is(rcx)) {
2275 movq(kScratchRegister, rcx);
2276 }
2277 if (dst.is(src1)) {
2278 UNIMPLEMENTED(); // Not used.
2279 } else {
2280 Label valid_result;
2281 SmiToInteger32(dst, src1);
2282 SmiToInteger32(rcx, src2);
2283 shll_cl(dst);
2284 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2285 // As src1 or src2 could not be dst, we do not need to restore them for
2286 // clobbering dst.
2287 if (src1.is(rcx) || src2.is(rcx)) {
2288 if (src1.is(rcx)) {
2289 movq(src1, kScratchRegister);
2290 } else {
2291 movq(src2, kScratchRegister);
2292 }
2293 }
2294 jmp(on_not_smi_result, near_jump);
2295 bind(&valid_result);
2296 Integer32ToSmi(dst, dst);
2297 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002298 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002299}
2300
2301
Ben Murdoch257744e2011-11-30 15:57:28 +00002302void MacroAssembler::SmiShiftLogicalRight(Register dst,
2303 Register src1,
2304 Register src2,
2305 Label* on_not_smi_result,
2306 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002307 DCHECK(!dst.is(kScratchRegister));
2308 DCHECK(!src1.is(kScratchRegister));
2309 DCHECK(!src2.is(kScratchRegister));
2310 DCHECK(!dst.is(src2));
2311 DCHECK(!dst.is(rcx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002312 if (src1.is(rcx) || src2.is(rcx)) {
2313 movq(kScratchRegister, rcx);
2314 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002315 if (dst.is(src1)) {
2316 UNIMPLEMENTED(); // Not used.
Ben Murdoch257744e2011-11-30 15:57:28 +00002317 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002318 Label valid_result;
2319 SmiToInteger32(dst, src1);
2320 SmiToInteger32(rcx, src2);
2321 shrl_cl(dst);
2322 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2323 // As src1 or src2 could not be dst, we do not need to restore them for
2324 // clobbering dst.
2325 if (src1.is(rcx) || src2.is(rcx)) {
2326 if (src1.is(rcx)) {
2327 movq(src1, kScratchRegister);
2328 } else {
2329 movq(src2, kScratchRegister);
2330 }
2331 }
2332 jmp(on_not_smi_result, near_jump);
2333 bind(&valid_result);
2334 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00002335 }
2336}
2337
2338
Steve Blocka7e24c12009-10-30 11:49:00 +00002339void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2340 Register src1,
2341 Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002342 DCHECK(!dst.is(kScratchRegister));
2343 DCHECK(!src1.is(kScratchRegister));
2344 DCHECK(!src2.is(kScratchRegister));
2345 DCHECK(!dst.is(rcx));
2346
Steve Blocka7e24c12009-10-30 11:49:00 +00002347 SmiToInteger32(rcx, src2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348 if (!dst.is(src1)) {
2349 movp(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00002350 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002351 SmiToInteger32(dst, dst);
2352 sarl_cl(dst);
2353 Integer32ToSmi(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002354}
2355
2356
Ben Murdoch257744e2011-11-30 15:57:28 +00002357void MacroAssembler::SelectNonSmi(Register dst,
2358 Register src1,
2359 Register src2,
2360 Label* on_not_smis,
2361 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002362 DCHECK(!dst.is(kScratchRegister));
2363 DCHECK(!src1.is(kScratchRegister));
2364 DCHECK(!src2.is(kScratchRegister));
2365 DCHECK(!dst.is(src1));
2366 DCHECK(!dst.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002367 // Both operands must not be smis.
2368#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002369 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2370 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
Ben Murdoch257744e2011-11-30 15:57:28 +00002371#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002372 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002373 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002374 movl(kScratchRegister, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002375 andp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002376 testl(kScratchRegister, src2);
2377 // If non-zero then both are smis.
2378 j(not_zero, on_not_smis, near_jump);
2379
2380 // Exactly one operand is a smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002381 DCHECK_EQ(1, static_cast<int>(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002382 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002383 subp(kScratchRegister, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002384 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002385 movp(dst, src1);
2386 xorp(dst, src2);
2387 andp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002388 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002389 xorp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002390 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2391}
2392
2393
Steve Block3ce2e202009-11-05 08:53:23 +00002394SmiIndex MacroAssembler::SmiToIndex(Register dst,
2395 Register src,
2396 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002397 if (SmiValuesAre32Bits()) {
2398 DCHECK(is_uint6(shift));
2399 // There is a possible optimization if shift is in the range 60-63, but that
2400 // will (and must) never happen.
2401 if (!dst.is(src)) {
2402 movp(dst, src);
2403 }
2404 if (shift < kSmiShift) {
2405 sarp(dst, Immediate(kSmiShift - shift));
2406 } else {
2407 shlp(dst, Immediate(shift - kSmiShift));
2408 }
2409 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002410 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002411 DCHECK(SmiValuesAre31Bits());
2412 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2413 if (!dst.is(src)) {
2414 movp(dst, src);
2415 }
2416 // We have to sign extend the index register to 64-bit as the SMI might
2417 // be negative.
2418 movsxlq(dst, dst);
2419 if (shift == times_1) {
2420 sarq(dst, Immediate(kSmiShift));
2421 return SmiIndex(dst, times_1);
2422 }
2423 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002424 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002425}
2426
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002427
Steve Blocka7e24c12009-10-30 11:49:00 +00002428SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2429 Register src,
2430 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002431 if (SmiValuesAre32Bits()) {
2432 // Register src holds a positive smi.
2433 DCHECK(is_uint6(shift));
2434 if (!dst.is(src)) {
2435 movp(dst, src);
2436 }
2437 negp(dst);
2438 if (shift < kSmiShift) {
2439 sarp(dst, Immediate(kSmiShift - shift));
2440 } else {
2441 shlp(dst, Immediate(shift - kSmiShift));
2442 }
2443 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002444 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002445 DCHECK(SmiValuesAre31Bits());
2446 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2447 if (!dst.is(src)) {
2448 movp(dst, src);
2449 }
2450 negq(dst);
2451 if (shift == times_1) {
2452 sarq(dst, Immediate(kSmiShift));
2453 return SmiIndex(dst, times_1);
2454 }
2455 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Block3ce2e202009-11-05 08:53:23 +00002456 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002457}
2458
2459
Steve Block44f0eee2011-05-26 01:26:41 +01002460void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002461 if (SmiValuesAre32Bits()) {
2462 DCHECK_EQ(0, kSmiShift % kBitsPerByte);
2463 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2464 } else {
2465 DCHECK(SmiValuesAre31Bits());
2466 SmiToInteger32(kScratchRegister, src);
2467 addl(dst, kScratchRegister);
2468 }
2469}
2470
2471
2472void MacroAssembler::Push(Smi* source) {
2473 intptr_t smi = reinterpret_cast<intptr_t>(source);
2474 if (is_int32(smi)) {
2475 Push(Immediate(static_cast<int32_t>(smi)));
2476 } else {
2477 Register constant = GetSmiConstant(source);
2478 Push(constant);
2479 }
2480}
2481
2482
2483void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
2484 DCHECK(!src.is(scratch));
2485 movp(scratch, src);
2486 // High bits.
2487 shrp(src, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2488 shlp(src, Immediate(kSmiShift));
2489 Push(src);
2490 // Low bits.
2491 shlp(scratch, Immediate(kSmiShift));
2492 Push(scratch);
2493}
2494
2495
2496void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2497 DCHECK(!dst.is(scratch));
2498 Pop(scratch);
2499 // Low bits.
2500 shrp(scratch, Immediate(kSmiShift));
2501 Pop(dst);
2502 shrp(dst, Immediate(kSmiShift));
2503 // High bits.
2504 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2505 orp(dst, scratch);
2506}
2507
2508
2509void MacroAssembler::Test(const Operand& src, Smi* source) {
2510 if (SmiValuesAre32Bits()) {
2511 testl(Operand(src, kIntSize), Immediate(source->value()));
2512 } else {
2513 DCHECK(SmiValuesAre31Bits());
2514 testl(src, Immediate(source));
2515 }
2516}
2517
2518
2519// ----------------------------------------------------------------------------
2520
2521
Ben Murdoch257744e2011-11-30 15:57:28 +00002522void MacroAssembler::JumpIfNotString(Register object,
2523 Register object_map,
2524 Label* not_string,
2525 Label::Distance near_jump) {
2526 Condition is_smi = CheckSmi(object);
2527 j(is_smi, not_string, near_jump);
2528 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2529 j(above_equal, not_string, near_jump);
2530}
2531
2532
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002533void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(
2534 Register first_object, Register second_object, Register scratch1,
2535 Register scratch2, Label* on_fail, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002536 // Check that both objects are not smis.
2537 Condition either_smi = CheckEitherSmi(first_object, second_object);
2538 j(either_smi, on_fail, near_jump);
2539
2540 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002541 movp(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2542 movp(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002543 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2544 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2545
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002546 // Check that both are flat one-byte strings.
2547 DCHECK(kNotStringTag != 0);
2548 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002549 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002550 const int kFlatOneByteStringTag =
2551 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002552
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002553 andl(scratch1, Immediate(kFlatOneByteStringMask));
2554 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002555 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002556 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2557 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002558 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002559 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002560 j(not_equal, on_fail, near_jump);
2561}
2562
2563
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002564void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2565 Register instance_type, Register scratch, Label* failure,
Ben Murdoch257744e2011-11-30 15:57:28 +00002566 Label::Distance near_jump) {
2567 if (!scratch.is(instance_type)) {
2568 movl(scratch, instance_type);
2569 }
2570
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002571 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002572 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2573
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002574 andl(scratch, Immediate(kFlatOneByteStringMask));
2575 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002576 j(not_equal, failure, near_jump);
2577}
2578
2579
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002580void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
2581 Register first_object_instance_type, Register second_object_instance_type,
2582 Register scratch1, Register scratch2, Label* on_fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002583 Label::Distance near_jump) {
2584 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002585 movp(scratch1, first_object_instance_type);
2586 movp(scratch2, second_object_instance_type);
Ben Murdoch257744e2011-11-30 15:57:28 +00002587
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002588 // Check that both are flat one-byte strings.
2589 DCHECK(kNotStringTag != 0);
2590 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002591 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002592 const int kFlatOneByteStringTag =
2593 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002594
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002595 andl(scratch1, Immediate(kFlatOneByteStringMask));
2596 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002597 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002598 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2599 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002600 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002602 j(not_equal, on_fail, near_jump);
2603}
2604
2605
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002606template<class T>
2607static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
2608 T operand_or_register,
2609 Label* not_unique_name,
2610 Label::Distance distance) {
2611 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2612 Label succeed;
2613 masm->testb(operand_or_register,
2614 Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2615 masm->j(zero, &succeed, Label::kNear);
2616 masm->cmpb(operand_or_register, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
2617 masm->j(not_equal, not_unique_name, distance);
2618
2619 masm->bind(&succeed);
2620}
2621
2622
2623void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2624 Label* not_unique_name,
2625 Label::Distance distance) {
2626 JumpIfNotUniqueNameHelper<Operand>(this, operand, not_unique_name, distance);
2627}
2628
2629
2630void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2631 Label* not_unique_name,
2632 Label::Distance distance) {
2633 JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance);
2634}
2635
Steve Block44f0eee2011-05-26 01:26:41 +01002636
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002637void MacroAssembler::Move(Register dst, Register src) {
2638 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002639 movp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002640 }
Steve Block6ded16b2010-05-10 14:33:55 +01002641}
2642
2643
Steve Blocka7e24c12009-10-30 11:49:00 +00002644void MacroAssembler::Move(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002645 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002646 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002647 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002648 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002649 MoveHeapObject(dst, source);
Steve Blocka7e24c12009-10-30 11:49:00 +00002650 }
2651}
2652
2653
2654void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002656 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002657 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002658 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002659 MoveHeapObject(kScratchRegister, source);
2660 movp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002661 }
2662}
2663
2664
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002665void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2666 if (src == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002667 Xorpd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002668 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002669 unsigned pop = base::bits::CountPopulation32(src);
2670 DCHECK_NE(0u, pop);
2671 if (pop == 32) {
2672 Pcmpeqd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002673 } else {
2674 movl(kScratchRegister, Immediate(src));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002675 Movq(dst, kScratchRegister);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002676 }
2677 }
2678}
2679
2680
2681void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002682 if (src == 0) {
2683 Xorpd(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002684 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002685 unsigned nlz = base::bits::CountLeadingZeros64(src);
2686 unsigned ntz = base::bits::CountTrailingZeros64(src);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002687 unsigned pop = base::bits::CountPopulation64(src);
2688 DCHECK_NE(0u, pop);
2689 if (pop == 64) {
2690 Pcmpeqd(dst, dst);
2691 } else if (pop + ntz == 64) {
2692 Pcmpeqd(dst, dst);
2693 Psllq(dst, ntz);
2694 } else if (pop + nlz == 64) {
2695 Pcmpeqd(dst, dst);
2696 Psrlq(dst, nlz);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002697 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002698 uint32_t lower = static_cast<uint32_t>(src);
2699 uint32_t upper = static_cast<uint32_t>(src >> 32);
2700 if (upper == 0) {
2701 Move(dst, lower);
2702 } else {
2703 movq(kScratchRegister, src);
2704 Movq(dst, kScratchRegister);
2705 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002706 }
2707 }
2708}
2709
2710
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002711void MacroAssembler::Movaps(XMMRegister dst, XMMRegister src) {
2712 if (CpuFeatures::IsSupported(AVX)) {
2713 CpuFeatureScope scope(this, AVX);
2714 vmovaps(dst, src);
2715 } else {
2716 movaps(dst, src);
2717 }
2718}
2719
2720
2721void MacroAssembler::Movapd(XMMRegister dst, XMMRegister src) {
2722 if (CpuFeatures::IsSupported(AVX)) {
2723 CpuFeatureScope scope(this, AVX);
2724 vmovapd(dst, src);
2725 } else {
2726 movapd(dst, src);
2727 }
2728}
2729
2730
2731void MacroAssembler::Movsd(XMMRegister dst, XMMRegister src) {
2732 if (CpuFeatures::IsSupported(AVX)) {
2733 CpuFeatureScope scope(this, AVX);
2734 vmovsd(dst, dst, src);
2735 } else {
2736 movsd(dst, src);
2737 }
2738}
2739
2740
2741void MacroAssembler::Movsd(XMMRegister dst, const Operand& src) {
2742 if (CpuFeatures::IsSupported(AVX)) {
2743 CpuFeatureScope scope(this, AVX);
2744 vmovsd(dst, src);
2745 } else {
2746 movsd(dst, src);
2747 }
2748}
2749
2750
2751void MacroAssembler::Movsd(const Operand& dst, XMMRegister src) {
2752 if (CpuFeatures::IsSupported(AVX)) {
2753 CpuFeatureScope scope(this, AVX);
2754 vmovsd(dst, src);
2755 } else {
2756 movsd(dst, src);
2757 }
2758}
2759
2760
2761void MacroAssembler::Movss(XMMRegister dst, XMMRegister src) {
2762 if (CpuFeatures::IsSupported(AVX)) {
2763 CpuFeatureScope scope(this, AVX);
2764 vmovss(dst, dst, src);
2765 } else {
2766 movss(dst, src);
2767 }
2768}
2769
2770
2771void MacroAssembler::Movss(XMMRegister dst, const Operand& src) {
2772 if (CpuFeatures::IsSupported(AVX)) {
2773 CpuFeatureScope scope(this, AVX);
2774 vmovss(dst, src);
2775 } else {
2776 movss(dst, src);
2777 }
2778}
2779
2780
2781void MacroAssembler::Movss(const Operand& dst, XMMRegister src) {
2782 if (CpuFeatures::IsSupported(AVX)) {
2783 CpuFeatureScope scope(this, AVX);
2784 vmovss(dst, src);
2785 } else {
2786 movss(dst, src);
2787 }
2788}
2789
2790
2791void MacroAssembler::Movd(XMMRegister dst, Register src) {
2792 if (CpuFeatures::IsSupported(AVX)) {
2793 CpuFeatureScope scope(this, AVX);
2794 vmovd(dst, src);
2795 } else {
2796 movd(dst, src);
2797 }
2798}
2799
2800
2801void MacroAssembler::Movd(XMMRegister dst, const Operand& src) {
2802 if (CpuFeatures::IsSupported(AVX)) {
2803 CpuFeatureScope scope(this, AVX);
2804 vmovd(dst, src);
2805 } else {
2806 movd(dst, src);
2807 }
2808}
2809
2810
2811void MacroAssembler::Movd(Register dst, XMMRegister src) {
2812 if (CpuFeatures::IsSupported(AVX)) {
2813 CpuFeatureScope scope(this, AVX);
2814 vmovd(dst, src);
2815 } else {
2816 movd(dst, src);
2817 }
2818}
2819
2820
2821void MacroAssembler::Movq(XMMRegister dst, Register src) {
2822 if (CpuFeatures::IsSupported(AVX)) {
2823 CpuFeatureScope scope(this, AVX);
2824 vmovq(dst, src);
2825 } else {
2826 movq(dst, src);
2827 }
2828}
2829
2830
2831void MacroAssembler::Movq(Register dst, XMMRegister src) {
2832 if (CpuFeatures::IsSupported(AVX)) {
2833 CpuFeatureScope scope(this, AVX);
2834 vmovq(dst, src);
2835 } else {
2836 movq(dst, src);
2837 }
2838}
2839
2840
2841void MacroAssembler::Movmskpd(Register dst, XMMRegister src) {
2842 if (CpuFeatures::IsSupported(AVX)) {
2843 CpuFeatureScope scope(this, AVX);
2844 vmovmskpd(dst, src);
2845 } else {
2846 movmskpd(dst, src);
2847 }
2848}
2849
2850
2851void MacroAssembler::Roundss(XMMRegister dst, XMMRegister src,
2852 RoundingMode mode) {
2853 if (CpuFeatures::IsSupported(AVX)) {
2854 CpuFeatureScope scope(this, AVX);
2855 vroundss(dst, dst, src, mode);
2856 } else {
2857 roundss(dst, src, mode);
2858 }
2859}
2860
2861
2862void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src,
2863 RoundingMode mode) {
2864 if (CpuFeatures::IsSupported(AVX)) {
2865 CpuFeatureScope scope(this, AVX);
2866 vroundsd(dst, dst, src, mode);
2867 } else {
2868 roundsd(dst, src, mode);
2869 }
2870}
2871
2872
2873void MacroAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) {
2874 if (CpuFeatures::IsSupported(AVX)) {
2875 CpuFeatureScope scope(this, AVX);
2876 vsqrtsd(dst, dst, src);
2877 } else {
2878 sqrtsd(dst, src);
2879 }
2880}
2881
2882
2883void MacroAssembler::Sqrtsd(XMMRegister dst, const Operand& src) {
2884 if (CpuFeatures::IsSupported(AVX)) {
2885 CpuFeatureScope scope(this, AVX);
2886 vsqrtsd(dst, dst, src);
2887 } else {
2888 sqrtsd(dst, src);
2889 }
2890}
2891
2892
2893void MacroAssembler::Ucomiss(XMMRegister src1, XMMRegister src2) {
2894 if (CpuFeatures::IsSupported(AVX)) {
2895 CpuFeatureScope scope(this, AVX);
2896 vucomiss(src1, src2);
2897 } else {
2898 ucomiss(src1, src2);
2899 }
2900}
2901
2902
2903void MacroAssembler::Ucomiss(XMMRegister src1, const Operand& src2) {
2904 if (CpuFeatures::IsSupported(AVX)) {
2905 CpuFeatureScope scope(this, AVX);
2906 vucomiss(src1, src2);
2907 } else {
2908 ucomiss(src1, src2);
2909 }
2910}
2911
2912
2913void MacroAssembler::Ucomisd(XMMRegister src1, XMMRegister src2) {
2914 if (CpuFeatures::IsSupported(AVX)) {
2915 CpuFeatureScope scope(this, AVX);
2916 vucomisd(src1, src2);
2917 } else {
2918 ucomisd(src1, src2);
2919 }
2920}
2921
2922
2923void MacroAssembler::Ucomisd(XMMRegister src1, const Operand& src2) {
2924 if (CpuFeatures::IsSupported(AVX)) {
2925 CpuFeatureScope scope(this, AVX);
2926 vucomisd(src1, src2);
2927 } else {
2928 ucomisd(src1, src2);
2929 }
2930}
2931
2932
Steve Blocka7e24c12009-10-30 11:49:00 +00002933void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002934 AllowDeferredHandleDereference smi_check;
Steve Block3ce2e202009-11-05 08:53:23 +00002935 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002936 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002937 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002938 MoveHeapObject(kScratchRegister, source);
2939 cmpp(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00002940 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002941}
2942
2943
2944void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002945 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002946 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002947 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002948 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002949 MoveHeapObject(kScratchRegister, source);
2950 cmpp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002951 }
2952}
2953
2954
2955void MacroAssembler::Push(Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002956 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002957 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002958 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002959 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002960 MoveHeapObject(kScratchRegister, source);
2961 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002962 }
2963}
2964
2965
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002966void MacroAssembler::MoveHeapObject(Register result,
2967 Handle<Object> object) {
2968 AllowDeferredHandleDereference using_raw_address;
2969 DCHECK(object->IsHeapObject());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002970 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002971 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2972 Move(result, cell, RelocInfo::CELL);
2973 movp(result, Operand(result, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002974 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002975 Move(result, object, RelocInfo::EMBEDDED_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002976 }
2977}
2978
2979
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002980void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002981 if (dst.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002982 AllowDeferredHandleDereference embedding_raw_address;
2983 load_rax(cell.location(), RelocInfo::CELL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002984 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002985 Move(dst, cell, RelocInfo::CELL);
2986 movp(dst, Operand(dst, 0));
Steve Block3ce2e202009-11-05 08:53:23 +00002987 }
2988}
2989
2990
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002991void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2992 Register scratch) {
2993 Move(scratch, cell, RelocInfo::EMBEDDED_OBJECT);
2994 cmpp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2995}
2996
2997
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002998void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002999 Move(value, cell, RelocInfo::EMBEDDED_OBJECT);
3000 movp(value, FieldOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003001}
3002
3003
3004void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
3005 Label* miss) {
3006 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003007 JumpIfSmi(value, miss);
3008}
3009
3010
Leon Clarkee46be812010-01-19 14:06:41 +00003011void MacroAssembler::Drop(int stack_elements) {
3012 if (stack_elements > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003013 addp(rsp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003014 }
3015}
3016
3017
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003018void MacroAssembler::DropUnderReturnAddress(int stack_elements,
3019 Register scratch) {
3020 DCHECK(stack_elements > 0);
3021 if (kPointerSize == kInt64Size && stack_elements == 1) {
3022 popq(MemOperand(rsp, 0));
3023 return;
3024 }
3025
3026 PopReturnAddressTo(scratch);
3027 Drop(stack_elements);
3028 PushReturnAddressFrom(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00003029}
3030
3031
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003032void MacroAssembler::Push(Register src) {
3033 if (kPointerSize == kInt64Size) {
3034 pushq(src);
3035 } else {
3036 // x32 uses 64-bit push for rbp in the prologue.
3037 DCHECK(src.code() != rbp.code());
3038 leal(rsp, Operand(rsp, -4));
3039 movp(Operand(rsp, 0), src);
3040 }
3041}
3042
3043
3044void MacroAssembler::Push(const Operand& src) {
3045 if (kPointerSize == kInt64Size) {
3046 pushq(src);
3047 } else {
3048 movp(kScratchRegister, src);
3049 leal(rsp, Operand(rsp, -4));
3050 movp(Operand(rsp, 0), kScratchRegister);
3051 }
3052}
3053
3054
3055void MacroAssembler::PushQuad(const Operand& src) {
3056 if (kPointerSize == kInt64Size) {
3057 pushq(src);
3058 } else {
3059 movp(kScratchRegister, src);
3060 pushq(kScratchRegister);
3061 }
3062}
3063
3064
3065void MacroAssembler::Push(Immediate value) {
3066 if (kPointerSize == kInt64Size) {
3067 pushq(value);
3068 } else {
3069 leal(rsp, Operand(rsp, -4));
3070 movp(Operand(rsp, 0), value);
3071 }
3072}
3073
3074
3075void MacroAssembler::PushImm32(int32_t imm32) {
3076 if (kPointerSize == kInt64Size) {
3077 pushq_imm32(imm32);
3078 } else {
3079 leal(rsp, Operand(rsp, -4));
3080 movp(Operand(rsp, 0), Immediate(imm32));
3081 }
3082}
3083
3084
3085void MacroAssembler::Pop(Register dst) {
3086 if (kPointerSize == kInt64Size) {
3087 popq(dst);
3088 } else {
3089 // x32 uses 64-bit pop for rbp in the epilogue.
3090 DCHECK(dst.code() != rbp.code());
3091 movp(dst, Operand(rsp, 0));
3092 leal(rsp, Operand(rsp, 4));
3093 }
3094}
3095
3096
3097void MacroAssembler::Pop(const Operand& dst) {
3098 if (kPointerSize == kInt64Size) {
3099 popq(dst);
3100 } else {
3101 Register scratch = dst.AddressUsesRegister(kScratchRegister)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003102 ? kRootRegister : kScratchRegister;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003103 movp(scratch, Operand(rsp, 0));
3104 movp(dst, scratch);
3105 leal(rsp, Operand(rsp, 4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003106 if (scratch.is(kRootRegister)) {
3107 // Restore kRootRegister.
3108 InitializeRootRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003109 }
3110 }
3111}
3112
3113
3114void MacroAssembler::PopQuad(const Operand& dst) {
3115 if (kPointerSize == kInt64Size) {
3116 popq(dst);
3117 } else {
3118 popq(kScratchRegister);
3119 movp(dst, kScratchRegister);
3120 }
3121}
3122
3123
3124void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
3125 Register base,
3126 int offset) {
3127 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
3128 offset <= SharedFunctionInfo::kSize &&
3129 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
3130 if (kPointerSize == kInt64Size) {
3131 movsxlq(dst, FieldOperand(base, offset));
3132 } else {
3133 movp(dst, FieldOperand(base, offset));
3134 SmiToInteger32(dst, dst);
3135 }
3136}
3137
3138
3139void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base,
3140 int offset,
3141 int bits) {
3142 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
3143 offset <= SharedFunctionInfo::kSize &&
3144 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
3145 if (kPointerSize == kInt32Size) {
3146 // On x32, this field is represented by SMI.
3147 bits += kSmiShift;
3148 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003149 int byte_offset = bits / kBitsPerByte;
3150 int bit_in_byte = bits & (kBitsPerByte - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003151 testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003152}
3153
3154
Steve Blocka7e24c12009-10-30 11:49:00 +00003155void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003156 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003157 jmp(kScratchRegister);
3158}
3159
3160
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003161void MacroAssembler::Jump(const Operand& op) {
3162 if (kPointerSize == kInt64Size) {
3163 jmp(op);
3164 } else {
3165 movp(kScratchRegister, op);
3166 jmp(kScratchRegister);
3167 }
3168}
3169
3170
Steve Blocka7e24c12009-10-30 11:49:00 +00003171void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003172 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003173 jmp(kScratchRegister);
3174}
3175
3176
3177void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00003178 // TODO(X64): Inline this
3179 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003180}
3181
3182
Steve Block44f0eee2011-05-26 01:26:41 +01003183int MacroAssembler::CallSize(ExternalReference ext) {
3184 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003185 return LoadAddressSize(ext) +
3186 Assembler::kCallScratchRegisterInstructionLength;
Steve Block44f0eee2011-05-26 01:26:41 +01003187}
3188
3189
Steve Blocka7e24c12009-10-30 11:49:00 +00003190void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003191#ifdef DEBUG
3192 int end_position = pc_offset() + CallSize(ext);
3193#endif
3194 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003195 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003196#ifdef DEBUG
3197 CHECK_EQ(end_position, pc_offset());
3198#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003199}
3200
3201
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003202void MacroAssembler::Call(const Operand& op) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003203 if (kPointerSize == kInt64Size && !CpuFeatures::IsSupported(ATOM)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003204 call(op);
3205 } else {
3206 movp(kScratchRegister, op);
3207 call(kScratchRegister);
3208 }
3209}
3210
3211
Steve Blocka7e24c12009-10-30 11:49:00 +00003212void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003213#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003214 int end_position = pc_offset() + CallSize(destination);
Steve Block44f0eee2011-05-26 01:26:41 +01003215#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003216 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003217 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003218#ifdef DEBUG
3219 CHECK_EQ(pc_offset(), end_position);
3220#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003221}
3222
3223
Ben Murdoch257744e2011-11-30 15:57:28 +00003224void MacroAssembler::Call(Handle<Code> code_object,
3225 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003226 TypeFeedbackId ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01003227#ifdef DEBUG
3228 int end_position = pc_offset() + CallSize(code_object);
3229#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003230 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
3231 rmode == RelocInfo::CODE_AGE_SEQUENCE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003232 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01003233#ifdef DEBUG
3234 CHECK_EQ(end_position, pc_offset());
3235#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003236}
3237
3238
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003239void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
3240 if (imm8 == 0) {
3241 Movd(dst, src);
3242 return;
3243 }
3244 DCHECK_EQ(1, imm8);
3245 if (CpuFeatures::IsSupported(SSE4_1)) {
3246 CpuFeatureScope sse_scope(this, SSE4_1);
3247 pextrd(dst, src, imm8);
3248 return;
3249 }
3250 movq(dst, src);
3251 shrq(dst, Immediate(32));
3252}
3253
3254
3255void MacroAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
3256 if (CpuFeatures::IsSupported(SSE4_1)) {
3257 CpuFeatureScope sse_scope(this, SSE4_1);
3258 pinsrd(dst, src, imm8);
3259 return;
3260 }
3261 Movd(xmm0, src);
3262 if (imm8 == 1) {
3263 punpckldq(dst, xmm0);
3264 } else {
3265 DCHECK_EQ(0, imm8);
3266 Movss(dst, xmm0);
3267 }
3268}
3269
3270
3271void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
3272 DCHECK(imm8 == 0 || imm8 == 1);
3273 if (CpuFeatures::IsSupported(SSE4_1)) {
3274 CpuFeatureScope sse_scope(this, SSE4_1);
3275 pinsrd(dst, src, imm8);
3276 return;
3277 }
3278 Movd(xmm0, src);
3279 if (imm8 == 1) {
3280 punpckldq(dst, xmm0);
3281 } else {
3282 DCHECK_EQ(0, imm8);
3283 Movss(dst, xmm0);
3284 }
3285}
3286
3287
3288void MacroAssembler::Lzcntl(Register dst, Register src) {
3289 if (CpuFeatures::IsSupported(LZCNT)) {
3290 CpuFeatureScope scope(this, LZCNT);
3291 lzcntl(dst, src);
3292 return;
3293 }
3294 Label not_zero_src;
3295 bsrl(dst, src);
3296 j(not_zero, &not_zero_src, Label::kNear);
3297 Set(dst, 63); // 63^31 == 32
3298 bind(&not_zero_src);
3299 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3300}
3301
3302
3303void MacroAssembler::Lzcntl(Register dst, const Operand& src) {
3304 if (CpuFeatures::IsSupported(LZCNT)) {
3305 CpuFeatureScope scope(this, LZCNT);
3306 lzcntl(dst, src);
3307 return;
3308 }
3309 Label not_zero_src;
3310 bsrl(dst, src);
3311 j(not_zero, &not_zero_src, Label::kNear);
3312 Set(dst, 63); // 63^31 == 32
3313 bind(&not_zero_src);
3314 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3315}
3316
3317
3318void MacroAssembler::Lzcntq(Register dst, Register src) {
3319 if (CpuFeatures::IsSupported(LZCNT)) {
3320 CpuFeatureScope scope(this, LZCNT);
3321 lzcntq(dst, src);
3322 return;
3323 }
3324 Label not_zero_src;
3325 bsrq(dst, src);
3326 j(not_zero, &not_zero_src, Label::kNear);
3327 Set(dst, 127); // 127^63 == 64
3328 bind(&not_zero_src);
3329 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3330}
3331
3332
3333void MacroAssembler::Lzcntq(Register dst, const Operand& src) {
3334 if (CpuFeatures::IsSupported(LZCNT)) {
3335 CpuFeatureScope scope(this, LZCNT);
3336 lzcntq(dst, src);
3337 return;
3338 }
3339 Label not_zero_src;
3340 bsrq(dst, src);
3341 j(not_zero, &not_zero_src, Label::kNear);
3342 Set(dst, 127); // 127^63 == 64
3343 bind(&not_zero_src);
3344 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3345}
3346
3347
3348void MacroAssembler::Tzcntq(Register dst, Register src) {
3349 if (CpuFeatures::IsSupported(BMI1)) {
3350 CpuFeatureScope scope(this, BMI1);
3351 tzcntq(dst, src);
3352 return;
3353 }
3354 Label not_zero_src;
3355 bsfq(dst, src);
3356 j(not_zero, &not_zero_src, Label::kNear);
3357 // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
3358 Set(dst, 64);
3359 bind(&not_zero_src);
3360}
3361
3362
3363void MacroAssembler::Tzcntq(Register dst, const Operand& src) {
3364 if (CpuFeatures::IsSupported(BMI1)) {
3365 CpuFeatureScope scope(this, BMI1);
3366 tzcntq(dst, src);
3367 return;
3368 }
3369 Label not_zero_src;
3370 bsfq(dst, src);
3371 j(not_zero, &not_zero_src, Label::kNear);
3372 // Define the result of tzcnt(0) separately, because bsf(0) is undefined.
3373 Set(dst, 64);
3374 bind(&not_zero_src);
3375}
3376
3377
3378void MacroAssembler::Tzcntl(Register dst, Register src) {
3379 if (CpuFeatures::IsSupported(BMI1)) {
3380 CpuFeatureScope scope(this, BMI1);
3381 tzcntl(dst, src);
3382 return;
3383 }
3384 Label not_zero_src;
3385 bsfl(dst, src);
3386 j(not_zero, &not_zero_src, Label::kNear);
3387 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3388 bind(&not_zero_src);
3389}
3390
3391
3392void MacroAssembler::Tzcntl(Register dst, const Operand& src) {
3393 if (CpuFeatures::IsSupported(BMI1)) {
3394 CpuFeatureScope scope(this, BMI1);
3395 tzcntl(dst, src);
3396 return;
3397 }
3398 Label not_zero_src;
3399 bsfl(dst, src);
3400 j(not_zero, &not_zero_src, Label::kNear);
3401 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3402 bind(&not_zero_src);
3403}
3404
3405
3406void MacroAssembler::Popcntl(Register dst, Register src) {
3407 if (CpuFeatures::IsSupported(POPCNT)) {
3408 CpuFeatureScope scope(this, POPCNT);
3409 popcntl(dst, src);
3410 return;
3411 }
3412 UNREACHABLE();
3413}
3414
3415
3416void MacroAssembler::Popcntl(Register dst, const Operand& src) {
3417 if (CpuFeatures::IsSupported(POPCNT)) {
3418 CpuFeatureScope scope(this, POPCNT);
3419 popcntl(dst, src);
3420 return;
3421 }
3422 UNREACHABLE();
3423}
3424
3425
3426void MacroAssembler::Popcntq(Register dst, Register src) {
3427 if (CpuFeatures::IsSupported(POPCNT)) {
3428 CpuFeatureScope scope(this, POPCNT);
3429 popcntq(dst, src);
3430 return;
3431 }
3432 UNREACHABLE();
3433}
3434
3435
3436void MacroAssembler::Popcntq(Register dst, const Operand& src) {
3437 if (CpuFeatures::IsSupported(POPCNT)) {
3438 CpuFeatureScope scope(this, POPCNT);
3439 popcntq(dst, src);
3440 return;
3441 }
3442 UNREACHABLE();
3443}
3444
3445
Steve Block1e0659c2011-05-24 12:43:12 +01003446void MacroAssembler::Pushad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003447 Push(rax);
3448 Push(rcx);
3449 Push(rdx);
3450 Push(rbx);
Steve Block1e0659c2011-05-24 12:43:12 +01003451 // Not pushing rsp or rbp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003452 Push(rsi);
3453 Push(rdi);
3454 Push(r8);
3455 Push(r9);
Steve Block1e0659c2011-05-24 12:43:12 +01003456 // r10 is kScratchRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003457 Push(r11);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003458 Push(r12);
Steve Block1e0659c2011-05-24 12:43:12 +01003459 // r13 is kRootRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003460 Push(r14);
3461 Push(r15);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003462 STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003463 // Use lea for symmetry with Popad.
3464 int sp_delta =
3465 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003466 leap(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01003467}
3468
3469
3470void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003471 // Popad must not change the flags, so use lea instead of addq.
3472 int sp_delta =
3473 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003474 leap(rsp, Operand(rsp, sp_delta));
3475 Pop(r15);
3476 Pop(r14);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003477 Pop(r12);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003478 Pop(r11);
3479 Pop(r9);
3480 Pop(r8);
3481 Pop(rdi);
3482 Pop(rsi);
3483 Pop(rbx);
3484 Pop(rdx);
3485 Pop(rcx);
3486 Pop(rax);
Steve Block1e0659c2011-05-24 12:43:12 +01003487}
3488
3489
3490void MacroAssembler::Dropad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003491 addp(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01003492}
3493
3494
3495// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01003496// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003497const int
3498MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
Steve Block1e0659c2011-05-24 12:43:12 +01003499 0,
3500 1,
3501 2,
3502 3,
3503 -1,
3504 -1,
3505 4,
3506 5,
3507 6,
3508 7,
3509 -1,
3510 8,
Steve Block44f0eee2011-05-26 01:26:41 +01003511 9,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003512 -1,
3513 10,
3514 11
Steve Block1e0659c2011-05-24 12:43:12 +01003515};
3516
3517
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003518void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3519 const Immediate& imm) {
3520 movp(SafepointRegisterSlot(dst), imm);
3521}
3522
3523
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003524void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003525 movp(SafepointRegisterSlot(dst), src);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003526}
3527
3528
3529void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003530 movp(dst, SafepointRegisterSlot(src));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003531}
3532
3533
3534Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
3535 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
3536}
3537
3538
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003539void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003540 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003541 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003542 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003543
3544 // Link the current handler as the next handler.
3545 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003546 Push(ExternalOperand(handler_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003547
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003548 // Set this new handler as the current one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003549 movp(ExternalOperand(handler_address), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00003550}
3551
3552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003553void MacroAssembler::PopStackHandler() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003554 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3555 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003556 Pop(ExternalOperand(handler_address));
3557 addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003558}
3559
3560
Steve Blocka7e24c12009-10-30 11:49:00 +00003561void MacroAssembler::Ret() {
3562 ret(0);
3563}
3564
3565
Steve Block1e0659c2011-05-24 12:43:12 +01003566void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
3567 if (is_uint16(bytes_dropped)) {
3568 ret(bytes_dropped);
3569 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003570 PopReturnAddressTo(scratch);
3571 addp(rsp, Immediate(bytes_dropped));
3572 PushReturnAddressFrom(scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003573 ret(0);
3574 }
3575}
3576
3577
Steve Blocka7e24c12009-10-30 11:49:00 +00003578void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00003579 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01003580 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003581}
3582
3583
3584void MacroAssembler::CmpObjectType(Register heap_object,
3585 InstanceType type,
3586 Register map) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003587 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003588 CmpInstanceType(map, type);
3589}
3590
3591
3592void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
3593 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
3594 Immediate(static_cast<int8_t>(type)));
3595}
3596
3597
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003598void MacroAssembler::CheckFastElements(Register map,
3599 Label* fail,
3600 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003601 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3602 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3603 STATIC_ASSERT(FAST_ELEMENTS == 2);
3604 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003605 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003606 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003607 j(above, fail, distance);
3608}
3609
3610
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003611void MacroAssembler::CheckFastObjectElements(Register map,
3612 Label* fail,
3613 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003614 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3615 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3616 STATIC_ASSERT(FAST_ELEMENTS == 2);
3617 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003618 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003619 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003620 j(below_equal, fail, distance);
3621 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003622 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003623 j(above, fail, distance);
3624}
3625
3626
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003627void MacroAssembler::CheckFastSmiElements(Register map,
3628 Label* fail,
3629 Label::Distance distance) {
3630 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3631 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003632 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003633 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003634 j(above, fail, distance);
3635}
3636
3637
3638void MacroAssembler::StoreNumberToDoubleElements(
3639 Register maybe_number,
3640 Register elements,
3641 Register index,
3642 XMMRegister xmm_scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003643 Label* fail,
3644 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003645 Label smi_value, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003646
3647 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
3648
3649 CheckMap(maybe_number,
3650 isolate()->factory()->heap_number_map(),
3651 fail,
3652 DONT_DO_SMI_CHECK);
3653
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003654 // Double value, turn potential sNaN into qNaN.
3655 Move(xmm_scratch, 1.0);
3656 mulsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
3657 jmp(&done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003658
3659 bind(&smi_value);
3660 // Value is a smi. convert to a double and store.
3661 // Preserve original value.
3662 SmiToInteger32(kScratchRegister, maybe_number);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003663 Cvtlsi2sd(xmm_scratch, kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003664 bind(&done);
3665 Movsd(FieldOperand(elements, index, times_8,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003666 FixedDoubleArray::kHeaderSize - elements_offset),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003667 xmm_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003668}
3669
3670
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003671void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003672 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003673}
3674
3675
Andrei Popescu31002712010-02-23 13:46:05 +00003676void MacroAssembler::CheckMap(Register obj,
3677 Handle<Map> map,
3678 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003679 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003680 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00003681 JumpIfSmi(obj, fail);
3682 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003683
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003684 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +00003685 j(not_equal, fail);
3686}
3687
3688
Ben Murdoch257744e2011-11-30 15:57:28 +00003689void MacroAssembler::ClampUint8(Register reg) {
3690 Label done;
3691 testl(reg, Immediate(0xFFFFFF00));
3692 j(zero, &done, Label::kNear);
3693 setcc(negative, reg); // 1 if negative, 0 if positive.
3694 decb(reg); // 0 if negative, 255 if positive.
3695 bind(&done);
3696}
3697
3698
3699void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
3700 XMMRegister temp_xmm_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003701 Register result_reg) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003702 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003703 Label conv_failure;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003704 Xorpd(temp_xmm_reg, temp_xmm_reg);
3705 Cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003706 testl(result_reg, Immediate(0xFFFFFF00));
3707 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003708 cmpl(result_reg, Immediate(1));
3709 j(overflow, &conv_failure, Label::kNear);
3710 movl(result_reg, Immediate(0));
3711 setcc(sign, result_reg);
3712 subl(result_reg, Immediate(1));
3713 andl(result_reg, Immediate(255));
3714 jmp(&done, Label::kNear);
3715 bind(&conv_failure);
3716 Set(result_reg, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003717 Ucomisd(input_reg, temp_xmm_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003718 j(below, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003719 Set(result_reg, 255);
3720 bind(&done);
3721}
3722
3723
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003724void MacroAssembler::LoadUint32(XMMRegister dst,
3725 Register src) {
3726 if (FLAG_debug_code) {
3727 cmpq(src, Immediate(0xffffffff));
3728 Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
3729 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003730 Cvtqsi2sd(dst, src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003731}
3732
3733
3734void MacroAssembler::SlowTruncateToI(Register result_reg,
3735 Register input_reg,
3736 int offset) {
3737 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
3738 call(stub.GetCode(), RelocInfo::CODE_TARGET);
3739}
3740
3741
3742void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
3743 Register input_reg) {
3744 Label done;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003745 Movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3746 Cvttsd2siq(result_reg, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003747 cmpq(result_reg, Immediate(1));
3748 j(no_overflow, &done, Label::kNear);
3749
3750 // Slow case.
3751 if (input_reg.is(result_reg)) {
3752 subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003753 Movsd(MemOperand(rsp, 0), xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003754 SlowTruncateToI(result_reg, rsp, 0);
3755 addp(rsp, Immediate(kDoubleSize));
3756 } else {
3757 SlowTruncateToI(result_reg, input_reg);
3758 }
3759
3760 bind(&done);
3761 // Keep our invariant that the upper 32 bits are zero.
3762 movl(result_reg, result_reg);
3763}
3764
3765
3766void MacroAssembler::TruncateDoubleToI(Register result_reg,
3767 XMMRegister input_reg) {
3768 Label done;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003769 Cvttsd2siq(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003770 cmpq(result_reg, Immediate(1));
3771 j(no_overflow, &done, Label::kNear);
3772
3773 subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003774 Movsd(MemOperand(rsp, 0), input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003775 SlowTruncateToI(result_reg, rsp, 0);
3776 addp(rsp, Immediate(kDoubleSize));
3777
3778 bind(&done);
3779 // Keep our invariant that the upper 32 bits are zero.
3780 movl(result_reg, result_reg);
3781}
3782
3783
3784void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
3785 XMMRegister scratch,
3786 MinusZeroMode minus_zero_mode,
3787 Label* lost_precision, Label* is_nan,
3788 Label* minus_zero, Label::Distance dst) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003789 Cvttsd2si(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003790 Cvtlsi2sd(xmm0, result_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003791 Ucomisd(xmm0, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003792 j(not_equal, lost_precision, dst);
3793 j(parity_even, is_nan, dst); // NaN.
3794 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
3795 Label done;
3796 // The integer converted back is equal to the original. We
3797 // only have to test if we got -0 as an input.
3798 testl(result_reg, result_reg);
3799 j(not_zero, &done, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003800 Movmskpd(result_reg, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003801 // Bit 0 contains the sign of the double in input_reg.
3802 // If input was positive, we are ok and return 0, otherwise
3803 // jump to minus_zero.
3804 andl(result_reg, Immediate(1));
3805 j(not_zero, minus_zero, dst);
3806 bind(&done);
3807 }
3808}
3809
3810
Ben Murdoch257744e2011-11-30 15:57:28 +00003811void MacroAssembler::LoadInstanceDescriptors(Register map,
3812 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003813 movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
3814}
3815
3816
3817void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3818 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3819 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3820}
3821
3822
3823void MacroAssembler::EnumLength(Register dst, Register map) {
3824 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3825 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3826 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3827 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003828}
3829
3830
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003831void MacroAssembler::LoadAccessor(Register dst, Register holder,
3832 int accessor_index,
3833 AccessorComponent accessor) {
3834 movp(dst, FieldOperand(holder, HeapObject::kMapOffset));
3835 LoadInstanceDescriptors(dst, dst);
3836 movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3837 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3838 : AccessorPair::kSetterOffset;
3839 movp(dst, FieldOperand(dst, offset));
3840}
3841
3842
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003843void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
3844 Register scratch2, Handle<WeakCell> cell,
3845 Handle<Code> success,
3846 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003847 Label fail;
3848 if (smi_check_type == DO_SMI_CHECK) {
3849 JumpIfSmi(obj, &fail);
3850 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003851 movq(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
3852 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003853 j(equal, success, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00003854 bind(&fail);
3855}
3856
3857
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003858void MacroAssembler::AssertNumber(Register object) {
3859 if (emit_debug_code()) {
3860 Label ok;
3861 Condition is_smi = CheckSmi(object);
3862 j(is_smi, &ok, Label::kNear);
3863 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3864 isolate()->factory()->heap_number_map());
3865 Check(equal, kOperandIsNotANumber);
3866 bind(&ok);
3867 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003868}
3869
3870
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003871void MacroAssembler::AssertNotSmi(Register object) {
3872 if (emit_debug_code()) {
3873 Condition is_smi = CheckSmi(object);
3874 Check(NegateCondition(is_smi), kOperandIsASmi);
3875 }
Iain Merrick75681382010-08-19 15:07:18 +01003876}
3877
3878
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003879void MacroAssembler::AssertSmi(Register object) {
3880 if (emit_debug_code()) {
3881 Condition is_smi = CheckSmi(object);
3882 Check(is_smi, kOperandIsNotASmi);
3883 }
Steve Block44f0eee2011-05-26 01:26:41 +01003884}
3885
3886
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003887void MacroAssembler::AssertSmi(const Operand& object) {
3888 if (emit_debug_code()) {
3889 Condition is_smi = CheckSmi(object);
3890 Check(is_smi, kOperandIsNotASmi);
3891 }
Steve Block6ded16b2010-05-10 14:33:55 +01003892}
3893
3894
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003895void MacroAssembler::AssertZeroExtended(Register int32_register) {
3896 if (emit_debug_code()) {
3897 DCHECK(!int32_register.is(kScratchRegister));
3898 movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
3899 cmpq(kScratchRegister, int32_register);
3900 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3901 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003902}
3903
3904
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003905void MacroAssembler::AssertString(Register object) {
3906 if (emit_debug_code()) {
3907 testb(object, Immediate(kSmiTagMask));
3908 Check(not_equal, kOperandIsASmiAndNotAString);
3909 Push(object);
3910 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3911 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3912 Pop(object);
3913 Check(below, kOperandIsNotAString);
3914 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003915}
3916
3917
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003918void MacroAssembler::AssertName(Register object) {
3919 if (emit_debug_code()) {
3920 testb(object, Immediate(kSmiTagMask));
3921 Check(not_equal, kOperandIsASmiAndNotAName);
3922 Push(object);
3923 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3924 CmpInstanceType(object, LAST_NAME_TYPE);
3925 Pop(object);
3926 Check(below_equal, kOperandIsNotAName);
3927 }
3928}
3929
3930
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003931void MacroAssembler::AssertFunction(Register object) {
3932 if (emit_debug_code()) {
3933 testb(object, Immediate(kSmiTagMask));
3934 Check(not_equal, kOperandIsASmiAndNotAFunction);
3935 Push(object);
3936 CmpObjectType(object, JS_FUNCTION_TYPE, object);
3937 Pop(object);
3938 Check(equal, kOperandIsNotAFunction);
3939 }
3940}
3941
3942
3943void MacroAssembler::AssertBoundFunction(Register object) {
3944 if (emit_debug_code()) {
3945 testb(object, Immediate(kSmiTagMask));
3946 Check(not_equal, kOperandIsASmiAndNotABoundFunction);
3947 Push(object);
3948 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
3949 Pop(object);
3950 Check(equal, kOperandIsNotABoundFunction);
3951 }
3952}
3953
3954
Ben Murdoch097c5b22016-05-18 11:27:45 +01003955void MacroAssembler::AssertReceiver(Register object) {
3956 if (emit_debug_code()) {
3957 testb(object, Immediate(kSmiTagMask));
3958 Check(not_equal, kOperandIsASmiAndNotAReceiver);
3959 Push(object);
3960 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3961 CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
3962 Pop(object);
3963 Check(above_equal, kOperandIsNotAReceiver);
3964 }
3965}
3966
3967
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003968void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
3969 if (emit_debug_code()) {
3970 Label done_checking;
3971 AssertNotSmi(object);
3972 Cmp(object, isolate()->factory()->undefined_value());
3973 j(equal, &done_checking);
3974 Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
3975 Assert(equal, kExpectedUndefinedOrCell);
3976 bind(&done_checking);
3977 }
3978}
3979
3980
3981void MacroAssembler::AssertRootValue(Register src,
3982 Heap::RootListIndex root_value_index,
3983 BailoutReason reason) {
3984 if (emit_debug_code()) {
3985 DCHECK(!src.is(kScratchRegister));
3986 LoadRoot(kScratchRegister, root_value_index);
3987 cmpp(src, kScratchRegister);
3988 Check(equal, reason);
3989 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003990}
3991
3992
3993
Leon Clarked91b9f72010-01-27 17:25:45 +00003994Condition MacroAssembler::IsObjectStringType(Register heap_object,
3995 Register map,
3996 Register instance_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003997 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00003998 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003999 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00004000 testb(instance_type, Immediate(kIsNotStringMask));
4001 return zero;
4002}
4003
4004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004005Condition MacroAssembler::IsObjectNameType(Register heap_object,
4006 Register map,
4007 Register instance_type) {
4008 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
4009 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
4010 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE)));
4011 return below_equal;
4012}
4013
4014
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004015void MacroAssembler::GetMapConstructor(Register result, Register map,
4016 Register temp) {
4017 Label done, loop;
4018 movp(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
4019 bind(&loop);
4020 JumpIfSmi(result, &done, Label::kNear);
4021 CmpObjectType(result, MAP_TYPE, temp);
4022 j(not_equal, &done, Label::kNear);
4023 movp(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
4024 jmp(&loop);
4025 bind(&done);
4026}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004027
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004028
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004029void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
4030 Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004031 // Get the prototype or initial map from the function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004032 movp(result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004033 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4034
4035 // If the prototype or initial map is the hole, don't return it and
4036 // simply miss the cache instead. This will allow us to allocate a
4037 // prototype object on-demand in the runtime system.
4038 CompareRoot(result, Heap::kTheHoleValueRootIndex);
4039 j(equal, miss);
4040
4041 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00004042 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00004043 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00004044 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00004045
4046 // Get the prototype from the initial map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004047 movp(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004048
Steve Blocka7e24c12009-10-30 11:49:00 +00004049 // All done.
4050 bind(&done);
4051}
4052
4053
4054void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
4055 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004056 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01004057 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004058 }
4059}
4060
4061
4062void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004063 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004064 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004065 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00004066 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01004067 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004068 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004069 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004070 }
4071 }
4072}
4073
4074
4075void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004076 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004077 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004078 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00004079 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01004080 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004081 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004082 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00004083 }
4084 }
4085}
4086
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004087
Andrei Popescu402d9372010-02-26 13:31:12 +00004088void MacroAssembler::DebugBreak() {
Steve Block9fac8402011-05-12 15:51:54 +01004089 Set(rax, 0); // No arguments.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004090 LoadAddress(rbx,
4091 ExternalReference(Runtime::kHandleDebuggerStatement, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004092 CEntryStub ces(isolate(), 1);
4093 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004094 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004095}
Ben Murdoch257744e2011-11-30 15:57:28 +00004096
4097
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004098void MacroAssembler::InvokeFunction(Register function,
4099 Register new_target,
4100 const ParameterCount& actual,
4101 InvokeFlag flag,
4102 const CallWrapper& call_wrapper) {
4103 movp(rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
4104 LoadSharedFunctionInfoSpecialField(
4105 rbx, rbx, SharedFunctionInfo::kFormalParameterCountOffset);
4106
4107 ParameterCount expected(rbx);
4108 InvokeFunction(function, new_target, expected, actual, flag, call_wrapper);
4109}
4110
4111
4112void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
4113 const ParameterCount& expected,
4114 const ParameterCount& actual,
4115 InvokeFlag flag,
4116 const CallWrapper& call_wrapper) {
4117 Move(rdi, function);
4118 InvokeFunction(rdi, no_reg, expected, actual, flag, call_wrapper);
4119}
4120
4121
4122void MacroAssembler::InvokeFunction(Register function,
4123 Register new_target,
4124 const ParameterCount& expected,
4125 const ParameterCount& actual,
4126 InvokeFlag flag,
4127 const CallWrapper& call_wrapper) {
4128 DCHECK(function.is(rdi));
4129 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
4130 InvokeFunctionCode(rdi, new_target, expected, actual, flag, call_wrapper);
4131}
4132
4133
4134void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
4135 const ParameterCount& expected,
4136 const ParameterCount& actual,
4137 InvokeFlag flag,
4138 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004139 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004140 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004141 DCHECK(function.is(rdi));
4142 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(rdx));
4143
4144 if (call_wrapper.NeedsDebugStepCheck()) {
4145 FloodFunctionIfStepping(function, new_target, expected, actual);
4146 }
4147
4148 // Clear the new.target register if not given.
4149 if (!new_target.is_valid()) {
4150 LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
4151 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004152
Ben Murdoch257744e2011-11-30 15:57:28 +00004153 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004154 bool definitely_mismatches = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004155 InvokePrologue(expected,
4156 actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004157 &done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004158 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004159 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00004160 Label::kNear,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004161 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004162 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004163 // We call indirectly through the code field in the function to
4164 // allow recompilation to take effect without changing any of the
4165 // call sites.
4166 Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004167 if (flag == CALL_FUNCTION) {
4168 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004169 call(code);
4170 call_wrapper.AfterCall();
4171 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004172 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004173 jmp(code);
4174 }
4175 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00004176 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004177}
4178
4179
Ben Murdoch257744e2011-11-30 15:57:28 +00004180void MacroAssembler::InvokePrologue(const ParameterCount& expected,
4181 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00004182 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004183 bool* definitely_mismatches,
Ben Murdoch257744e2011-11-30 15:57:28 +00004184 InvokeFlag flag,
4185 Label::Distance near_jump,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004186 const CallWrapper& call_wrapper) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004187 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004188 *definitely_mismatches = false;
Ben Murdoch257744e2011-11-30 15:57:28 +00004189 Label invoke;
4190 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004191 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004192 Set(rax, actual.immediate());
Ben Murdoch257744e2011-11-30 15:57:28 +00004193 if (expected.immediate() == actual.immediate()) {
4194 definitely_matches = true;
4195 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004196 if (expected.immediate() ==
4197 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
4198 // Don't worry about adapting arguments for built-ins that
4199 // don't want that done. Skip adaption code by making it look
4200 // like we have a match between expected and actual number of
4201 // arguments.
4202 definitely_matches = true;
4203 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004204 *definitely_mismatches = true;
Ben Murdoch257744e2011-11-30 15:57:28 +00004205 Set(rbx, expected.immediate());
4206 }
4207 }
4208 } else {
4209 if (actual.is_immediate()) {
4210 // Expected is in register, actual is immediate. This is the
4211 // case when we invoke function values without going through the
4212 // IC mechanism.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004213 Set(rax, actual.immediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004214 cmpp(expected.reg(), Immediate(actual.immediate()));
Ben Murdoch257744e2011-11-30 15:57:28 +00004215 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004216 DCHECK(expected.reg().is(rbx));
Ben Murdoch257744e2011-11-30 15:57:28 +00004217 } else if (!expected.reg().is(actual.reg())) {
4218 // Both expected and actual are in (different) registers. This
4219 // is the case when we invoke functions using call and apply.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004220 cmpp(expected.reg(), actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00004221 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004222 DCHECK(actual.reg().is(rax));
4223 DCHECK(expected.reg().is(rbx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004224 } else {
4225 Move(rax, actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00004226 }
4227 }
4228
4229 if (!definitely_matches) {
4230 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdoch257744e2011-11-30 15:57:28 +00004231 if (flag == CALL_FUNCTION) {
4232 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch257744e2011-11-30 15:57:28 +00004233 Call(adaptor, RelocInfo::CODE_TARGET);
4234 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004235 if (!*definitely_mismatches) {
4236 jmp(done, near_jump);
4237 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004238 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004239 Jump(adaptor, RelocInfo::CODE_TARGET);
4240 }
4241 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01004242 }
Andrei Popescu402d9372010-02-26 13:31:12 +00004243}
4244
4245
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004246void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
4247 const ParameterCount& expected,
4248 const ParameterCount& actual) {
4249 Label skip_flooding;
4250 ExternalReference step_in_enabled =
4251 ExternalReference::debug_step_in_enabled_address(isolate());
4252 Operand step_in_enabled_operand = ExternalOperand(step_in_enabled);
4253 cmpb(step_in_enabled_operand, Immediate(0));
4254 j(equal, &skip_flooding);
4255 {
4256 FrameScope frame(this,
4257 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
4258 if (expected.is_reg()) {
4259 Integer32ToSmi(expected.reg(), expected.reg());
4260 Push(expected.reg());
4261 }
4262 if (actual.is_reg()) {
4263 Integer32ToSmi(actual.reg(), actual.reg());
4264 Push(actual.reg());
4265 }
4266 if (new_target.is_valid()) {
4267 Push(new_target);
4268 }
4269 Push(fun);
4270 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004271 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004272 Pop(fun);
4273 if (new_target.is_valid()) {
4274 Pop(new_target);
4275 }
4276 if (actual.is_reg()) {
4277 Pop(actual.reg());
4278 SmiToInteger64(actual.reg(), actual.reg());
4279 }
4280 if (expected.is_reg()) {
4281 Pop(expected.reg());
4282 SmiToInteger64(expected.reg(), expected.reg());
4283 }
4284 }
4285 bind(&skip_flooding);
4286}
4287
4288
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004289void MacroAssembler::StubPrologue() {
4290 pushq(rbp); // Caller's frame pointer.
4291 movp(rbp, rsp);
4292 Push(rsi); // Callee's context.
4293 Push(Smi::FromInt(StackFrame::STUB));
4294}
4295
4296
4297void MacroAssembler::Prologue(bool code_pre_aging) {
4298 PredictableCodeSizeScope predictible_code_size_scope(this,
4299 kNoCodeAgeSequenceLength);
4300 if (code_pre_aging) {
4301 // Pre-age the code.
4302 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
4303 RelocInfo::CODE_AGE_SEQUENCE);
4304 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
4305 } else {
4306 pushq(rbp); // Caller's frame pointer.
4307 movp(rbp, rsp);
4308 Push(rsi); // Callee's context.
4309 Push(rdi); // Callee's JS function.
4310 }
4311}
4312
4313
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004314void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
4315 movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4316 movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
4317 movp(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
4318}
4319
4320
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004321void MacroAssembler::EnterFrame(StackFrame::Type type,
4322 bool load_constant_pool_pointer_reg) {
4323 // Out-of-line constant pool not implemented on x64.
4324 UNREACHABLE();
4325}
4326
4327
Steve Blocka7e24c12009-10-30 11:49:00 +00004328void MacroAssembler::EnterFrame(StackFrame::Type type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004329 pushq(rbp);
4330 movp(rbp, rsp);
4331 Push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00004332 Push(Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004333 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4334 Push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01004335 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004336 Move(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00004337 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00004338 RelocInfo::EMBEDDED_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004339 cmpp(Operand(rsp, 0), kScratchRegister);
4340 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +00004341 }
4342}
4343
4344
4345void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01004346 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004347 Move(kScratchRegister, Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004348 cmpp(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
4349 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +00004350 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004351 movp(rsp, rbp);
4352 popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004353}
4354
4355
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004356void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004357 // Set up the frame structure on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00004358 // All constants are relative to the frame pointer of the exit frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004359 DCHECK(ExitFrameConstants::kCallerSPDisplacement ==
4360 kFPOnStackSize + kPCOnStackSize);
4361 DCHECK(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize);
4362 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
4363 pushq(rbp);
4364 movp(rbp, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004365
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004366 // Reserve room for entry stack pointer and push the code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004367 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
4368 Push(Immediate(0)); // Saved entry sp, patched before call.
4369 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4370 Push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00004371
4372 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01004373 if (save_rax) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004374 movp(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01004375 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004376
Ben Murdoch589d6972011-11-30 16:04:58 +00004377 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
4378 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004379 Store(ExternalReference(Isolate::kCFunctionAddress, isolate()), rbx);
Ben Murdochbb769b22010-08-11 14:56:33 +01004380}
Steve Blocka7e24c12009-10-30 11:49:00 +00004381
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004382
Steve Block1e0659c2011-05-24 12:43:12 +01004383void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
4384 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004385#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01004386 const int kShadowSpace = 4;
4387 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00004388#endif
Steve Block1e0659c2011-05-24 12:43:12 +01004389 // Optionally save all XMM registers.
4390 if (save_doubles) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004391 int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
4392 arg_stack_space * kRegisterSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004393 subp(rsp, Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +01004394 int offset = -2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004395 const RegisterConfiguration* config =
4396 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
4397 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4398 DoubleRegister reg =
4399 DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
4400 Movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
Steve Block1e0659c2011-05-24 12:43:12 +01004401 }
4402 } else if (arg_stack_space > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004403 subp(rsp, Immediate(arg_stack_space * kRegisterSize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004404 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004405
4406 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004407 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00004408 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004409 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
4410 DCHECK(is_int8(kFrameAlignment));
4411 andp(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00004412 }
4413
4414 // Patch the saved entry sp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004415 movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004416}
4417
4418
Steve Block1e0659c2011-05-24 12:43:12 +01004419void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004420 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01004421
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004422 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01004423 // so it must be retained across the C-call.
4424 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004425 leap(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01004426
Steve Block1e0659c2011-05-24 12:43:12 +01004427 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01004428}
4429
4430
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004431void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004432 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01004433 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01004434}
4435
4436
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004437void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004438 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01004439 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01004440 if (save_doubles) {
4441 int offset = -2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004442 const RegisterConfiguration* config =
4443 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
4444 for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
4445 DoubleRegister reg =
4446 DoubleRegister::from_code(config->GetAllocatableDoubleCode(i));
4447 Movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
Steve Block1e0659c2011-05-24 12:43:12 +01004448 }
4449 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004450
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004451 if (pop_arguments) {
4452 // Get the return address from the stack and restore the frame pointer.
4453 movp(rcx, Operand(rbp, kFPOnStackSize));
4454 movp(rbp, Operand(rbp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004455
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004456 // Drop everything up to and including the arguments and the receiver
4457 // from the caller stack.
4458 leap(rsp, Operand(r15, 1 * kPointerSize));
4459
4460 PushReturnAddressFrom(rcx);
4461 } else {
4462 // Otherwise just leave the exit frame.
4463 leave();
4464 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004465
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004466 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004467}
4468
4469
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004470void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
4471 movp(rsp, rbp);
4472 popq(rbp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004473
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004474 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004475}
4476
4477
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004478void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004479 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00004480 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01004481 Operand context_operand = ExternalOperand(context_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004482 if (restore_context) {
4483 movp(rsi, context_operand);
4484 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004485#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004486 movp(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004487#endif
4488
Steve Blocka7e24c12009-10-30 11:49:00 +00004489 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00004490 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004491 isolate());
4492 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004493 movp(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004494}
4495
4496
Steve Blocka7e24c12009-10-30 11:49:00 +00004497void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
4498 Register scratch,
4499 Label* miss) {
4500 Label same_contexts;
4501
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004502 DCHECK(!holder_reg.is(scratch));
4503 DCHECK(!scratch.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00004504 // Load current lexical context from the stack frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004505 movp(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004506
4507 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01004508 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004509 cmpp(scratch, Immediate(0));
4510 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004511 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004512 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004513 movp(scratch, ContextOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00004514
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004515 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004516 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004517 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004518 isolate()->factory()->native_context_map());
4519 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004520 }
4521
4522 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004523 cmpp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004524 j(equal, &same_contexts);
4525
4526 // Compare security tokens.
4527 // Check that the security token in the calling global object is
4528 // compatible with the security token in the receiving global
4529 // object.
4530
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004531 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004532 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004533 // Preserve original value of holder_reg.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004534 Push(holder_reg);
4535 movp(holder_reg,
4536 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004537 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004538 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00004539
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004540 // Read the first word and compare to native_context_map(),
4541 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
4542 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
4543 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
4544 Pop(holder_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00004545 }
4546
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004547 movp(kScratchRegister,
4548 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00004549 int token_offset =
4550 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004551 movp(scratch, FieldOperand(scratch, token_offset));
4552 cmpp(scratch, FieldOperand(kScratchRegister, token_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004553 j(not_equal, miss);
4554
4555 bind(&same_contexts);
4556}
4557
4558
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004559// Compute the hash code from the untagged key. This must be kept in sync with
4560// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
4561// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00004562void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
4563 // First of all we assign the hash seed to scratch.
4564 LoadRoot(scratch, Heap::kHashSeedRootIndex);
4565 SmiToInteger32(scratch, scratch);
4566
4567 // Xor original key with a seed.
4568 xorl(r0, scratch);
4569
4570 // Compute the hash code from the untagged key. This must be kept in sync
4571 // with ComputeIntegerHash in utils.h.
4572 //
4573 // hash = ~hash + (hash << 15);
4574 movl(scratch, r0);
4575 notl(r0);
4576 shll(scratch, Immediate(15));
4577 addl(r0, scratch);
4578 // hash = hash ^ (hash >> 12);
4579 movl(scratch, r0);
4580 shrl(scratch, Immediate(12));
4581 xorl(r0, scratch);
4582 // hash = hash + (hash << 2);
4583 leal(r0, Operand(r0, r0, times_4, 0));
4584 // hash = hash ^ (hash >> 4);
4585 movl(scratch, r0);
4586 shrl(scratch, Immediate(4));
4587 xorl(r0, scratch);
4588 // hash = hash * 2057;
4589 imull(r0, r0, Immediate(2057));
4590 // hash = hash ^ (hash >> 16);
4591 movl(scratch, r0);
4592 shrl(scratch, Immediate(16));
4593 xorl(r0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004594 andl(r0, Immediate(0x3fffffff));
Ben Murdochc7cc0282012-03-05 14:35:55 +00004595}
4596
4597
4598
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004599void MacroAssembler::LoadFromNumberDictionary(Label* miss,
4600 Register elements,
4601 Register key,
4602 Register r0,
4603 Register r1,
4604 Register r2,
4605 Register result) {
4606 // Register use:
4607 //
4608 // elements - holds the slow-case elements of the receiver on entry.
4609 // Unchanged unless 'result' is the same register.
4610 //
4611 // key - holds the smi key on entry.
4612 // Unchanged unless 'result' is the same register.
4613 //
4614 // Scratch registers:
4615 //
4616 // r0 - holds the untagged key on entry and holds the hash once computed.
4617 //
4618 // r1 - used to hold the capacity mask of the dictionary
4619 //
4620 // r2 - used for the index into the dictionary.
4621 //
4622 // result - holds the result on exit if the load succeeded.
4623 // Allowed to be the same as 'key' or 'result'.
4624 // Unchanged on bailout so 'key' or 'result' can be used
4625 // in further computation.
4626
4627 Label done;
4628
Ben Murdochc7cc0282012-03-05 14:35:55 +00004629 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004630
4631 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00004632 SmiToInteger32(r1, FieldOperand(elements,
4633 SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004634 decl(r1);
4635
4636 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004637 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004638 // Use r2 for index calculations and keep the hash intact in r0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004639 movp(r2, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004640 // Compute the masked index: (hash + i + i * i) & mask.
4641 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00004642 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004643 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004644 andp(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004645
4646 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004647 DCHECK(SeededNumberDictionary::kEntrySize == 3);
4648 leap(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004649
4650 // Check if the key matches.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004651 cmpp(key, FieldOperand(elements,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004652 r2,
4653 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00004654 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004655 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004656 j(equal, &done);
4657 } else {
4658 j(not_equal, miss);
4659 }
4660 }
4661
4662 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004663 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004664 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004665 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004666 DCHECK_EQ(DATA, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004667 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00004668 Smi::FromInt(PropertyDetails::TypeField::kMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004669 j(not_zero, miss);
4670
4671 // Get the value at the masked, scaled index.
4672 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004673 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004674 movp(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004675}
4676
4677
Steve Blocka7e24c12009-10-30 11:49:00 +00004678void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004679 Register scratch,
4680 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004681 ExternalReference allocation_top =
4682 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004683
4684 // Just return if allocation top is already known.
4685 if ((flags & RESULT_CONTAINS_TOP) != 0) {
4686 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004687 DCHECK(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00004688#ifdef DEBUG
4689 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004690 Operand top_operand = ExternalOperand(allocation_top);
4691 cmpp(result, top_operand);
4692 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004693#endif
4694 return;
4695 }
4696
Steve Block6ded16b2010-05-10 14:33:55 +01004697 // Move address of new object to result. Use scratch register if available,
4698 // and keep address in scratch until call to UpdateAllocationTopHelper.
4699 if (scratch.is_valid()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004700 LoadAddress(scratch, allocation_top);
4701 movp(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01004702 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004703 Load(result, allocation_top);
4704 }
4705}
4706
4707
4708void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
4709 Register scratch,
4710 Label* gc_required,
4711 AllocationFlags flags) {
4712 if (kPointerSize == kDoubleSize) {
4713 if (FLAG_debug_code) {
4714 testl(result, Immediate(kDoubleAlignmentMask));
4715 Check(zero, kAllocationIsNotDoubleAligned);
4716 }
4717 } else {
4718 // Align the next allocation. Storing the filler map without checking top
4719 // is safe in new-space because the limit of the heap is aligned there.
4720 DCHECK(kPointerSize * 2 == kDoubleSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004721 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
4722 // Make sure scratch is not clobbered by this function as it might be
4723 // used in UpdateAllocationTopHelper later.
4724 DCHECK(!scratch.is(kScratchRegister));
4725 Label aligned;
4726 testl(result, Immediate(kDoubleAlignmentMask));
4727 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004728 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004729 ExternalReference allocation_limit =
4730 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4731 cmpp(result, ExternalOperand(allocation_limit));
4732 j(above_equal, gc_required);
4733 }
4734 LoadRoot(kScratchRegister, Heap::kOnePointerFillerMapRootIndex);
4735 movp(Operand(result, 0), kScratchRegister);
4736 addp(result, Immediate(kDoubleSize / 2));
4737 bind(&aligned);
Steve Blocka7e24c12009-10-30 11:49:00 +00004738 }
4739}
4740
4741
4742void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004743 Register scratch,
4744 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01004745 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004746 testp(result_end, Immediate(kObjectAlignmentMask));
4747 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00004748 }
4749
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004750 ExternalReference allocation_top =
4751 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004752
4753 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01004754 if (scratch.is_valid()) {
4755 // Scratch already contains address of allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004756 movp(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004757 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004758 Store(allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004759 }
4760}
4761
4762
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004763void MacroAssembler::Allocate(int object_size,
4764 Register result,
4765 Register result_end,
4766 Register scratch,
4767 Label* gc_required,
4768 AllocationFlags flags) {
4769 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
4770 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07004771 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004772 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004773 // Trash the registers to simulate an allocation failure.
4774 movl(result, Immediate(0x7091));
4775 if (result_end.is_valid()) {
4776 movl(result_end, Immediate(0x7191));
4777 }
4778 if (scratch.is_valid()) {
4779 movl(scratch, Immediate(0x7291));
4780 }
4781 }
4782 jmp(gc_required);
4783 return;
4784 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004785 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00004786
4787 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004788 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004789
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004790 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4791 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
4792 }
4793
Steve Blocka7e24c12009-10-30 11:49:00 +00004794 // Calculate new top and bail out if new space is exhausted.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004795 ExternalReference allocation_limit =
4796 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block6ded16b2010-05-10 14:33:55 +01004797
4798 Register top_reg = result_end.is_valid() ? result_end : result;
4799
Steve Block1e0659c2011-05-24 12:43:12 +01004800 if (!top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004801 movp(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01004802 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004803 addp(top_reg, Immediate(object_size));
Steve Block1e0659c2011-05-24 12:43:12 +01004804 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004805 Operand limit_operand = ExternalOperand(allocation_limit);
4806 cmpp(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004807 j(above, gc_required);
4808
4809 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004810 UpdateAllocationTopHelper(top_reg, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004811
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004812 bool tag_result = (flags & TAG_OBJECT) != 0;
Steve Block6ded16b2010-05-10 14:33:55 +01004813 if (top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004814 if (tag_result) {
4815 subp(result, Immediate(object_size - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01004816 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004817 subp(result, Immediate(object_size));
Steve Block6ded16b2010-05-10 14:33:55 +01004818 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004819 } else if (tag_result) {
Steve Block6ded16b2010-05-10 14:33:55 +01004820 // Tag the result if requested.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004821 DCHECK(kHeapObjectTag == 1);
4822 incp(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004823 }
4824}
4825
4826
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004827void MacroAssembler::Allocate(int header_size,
4828 ScaleFactor element_size,
4829 Register element_count,
4830 Register result,
4831 Register result_end,
4832 Register scratch,
4833 Label* gc_required,
4834 AllocationFlags flags) {
4835 DCHECK((flags & SIZE_IN_WORDS) == 0);
4836 leap(result_end, Operand(element_count, element_size, header_size));
4837 Allocate(result_end, result, result_end, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004838}
4839
4840
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004841void MacroAssembler::Allocate(Register object_size,
4842 Register result,
4843 Register result_end,
4844 Register scratch,
4845 Label* gc_required,
4846 AllocationFlags flags) {
4847 DCHECK((flags & SIZE_IN_WORDS) == 0);
John Reck59135872010-11-02 12:39:01 -07004848 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004849 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004850 // Trash the registers to simulate an allocation failure.
4851 movl(result, Immediate(0x7091));
4852 movl(result_end, Immediate(0x7191));
4853 if (scratch.is_valid()) {
4854 movl(scratch, Immediate(0x7291));
4855 }
4856 // object_size is left unchanged by this function.
4857 }
4858 jmp(gc_required);
4859 return;
4860 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004861 DCHECK(!result.is(result_end));
John Reck59135872010-11-02 12:39:01 -07004862
Steve Blocka7e24c12009-10-30 11:49:00 +00004863 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004864 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004865
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004866 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4867 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004868 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004869
4870 // Calculate new top and bail out if new space is exhausted.
4871 ExternalReference allocation_limit =
4872 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4873 if (!object_size.is(result_end)) {
4874 movp(result_end, object_size);
4875 }
4876 addp(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01004877 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004878 Operand limit_operand = ExternalOperand(allocation_limit);
4879 cmpp(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004880 j(above, gc_required);
4881
4882 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004883 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004884
4885 // Tag the result if requested.
4886 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004887 addp(result, Immediate(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00004888 }
4889}
4890
4891
Steve Block3ce2e202009-11-05 08:53:23 +00004892void MacroAssembler::AllocateHeapNumber(Register result,
4893 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004894 Label* gc_required,
4895 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00004896 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004897 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
4898
4899 Heap::RootListIndex map_index = mode == MUTABLE
4900 ? Heap::kMutableHeapNumberMapRootIndex
4901 : Heap::kHeapNumberMapRootIndex;
Steve Block3ce2e202009-11-05 08:53:23 +00004902
4903 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004904 LoadRoot(kScratchRegister, map_index);
4905 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00004906}
4907
4908
Leon Clarkee46be812010-01-19 14:06:41 +00004909void MacroAssembler::AllocateTwoByteString(Register result,
4910 Register length,
4911 Register scratch1,
4912 Register scratch2,
4913 Register scratch3,
4914 Label* gc_required) {
4915 // Calculate the number of bytes needed for the characters in the string while
4916 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01004917 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
4918 kObjectAlignmentMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004919 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004920 // scratch1 = length * 2 + kObjectAlignmentMask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004921 leap(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
Steve Block6ded16b2010-05-10 14:33:55 +01004922 kHeaderAlignment));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004923 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01004924 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004925 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01004926 }
Leon Clarkee46be812010-01-19 14:06:41 +00004927
4928 // Allocate two byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004929 Allocate(SeqTwoByteString::kHeaderSize,
4930 times_1,
4931 scratch1,
4932 result,
4933 scratch2,
4934 scratch3,
4935 gc_required,
4936 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004937
4938 // Set the map, length and hash field.
4939 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004940 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01004941 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004942 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4943 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00004944 Immediate(String::kEmptyHashField));
4945}
4946
4947
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004948void MacroAssembler::AllocateOneByteString(Register result, Register length,
4949 Register scratch1, Register scratch2,
4950 Register scratch3,
4951 Label* gc_required) {
Leon Clarkee46be812010-01-19 14:06:41 +00004952 // Calculate the number of bytes needed for the characters in the string while
4953 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004954 const int kHeaderAlignment = SeqOneByteString::kHeaderSize &
Steve Block6ded16b2010-05-10 14:33:55 +01004955 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00004956 movl(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004957 DCHECK(kCharSize == 1);
4958 addp(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
4959 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01004960 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004961 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01004962 }
Leon Clarkee46be812010-01-19 14:06:41 +00004963
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004964 // Allocate one-byte string in new space.
4965 Allocate(SeqOneByteString::kHeaderSize,
4966 times_1,
4967 scratch1,
4968 result,
4969 scratch2,
4970 scratch3,
4971 gc_required,
4972 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004973
4974 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004975 LoadRoot(kScratchRegister, Heap::kOneByteStringMapRootIndex);
4976 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01004977 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004978 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4979 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00004980 Immediate(String::kEmptyHashField));
4981}
4982
4983
Ben Murdoch589d6972011-11-30 16:04:58 +00004984void MacroAssembler::AllocateTwoByteConsString(Register result,
Leon Clarkee46be812010-01-19 14:06:41 +00004985 Register scratch1,
4986 Register scratch2,
4987 Label* gc_required) {
4988 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004989 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
4990 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004991
4992 // Set the map. The other fields are left uninitialized.
4993 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004994 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00004995}
4996
4997
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004998void MacroAssembler::AllocateOneByteConsString(Register result,
4999 Register scratch1,
5000 Register scratch2,
5001 Label* gc_required) {
5002 Allocate(ConsString::kSize,
5003 result,
5004 scratch1,
5005 scratch2,
5006 gc_required,
5007 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00005008
5009 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005010 LoadRoot(kScratchRegister, Heap::kConsOneByteStringMapRootIndex);
5011 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00005012}
5013
5014
Ben Murdoch589d6972011-11-30 16:04:58 +00005015void MacroAssembler::AllocateTwoByteSlicedString(Register result,
5016 Register scratch1,
5017 Register scratch2,
5018 Label* gc_required) {
5019 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005020 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
5021 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00005022
5023 // Set the map. The other fields are left uninitialized.
5024 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005025 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00005026}
5027
5028
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005029void MacroAssembler::AllocateOneByteSlicedString(Register result,
5030 Register scratch1,
5031 Register scratch2,
5032 Label* gc_required) {
Ben Murdoch589d6972011-11-30 16:04:58 +00005033 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005034 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
5035 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00005036
5037 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005038 LoadRoot(kScratchRegister, Heap::kSlicedOneByteStringMapRootIndex);
5039 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00005040}
5041
5042
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005043void MacroAssembler::AllocateJSValue(Register result, Register constructor,
5044 Register value, Register scratch,
5045 Label* gc_required) {
5046 DCHECK(!result.is(constructor));
5047 DCHECK(!result.is(scratch));
5048 DCHECK(!result.is(value));
5049
5050 // Allocate JSValue in new space.
5051 Allocate(JSValue::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
5052
5053 // Initialize the JSValue.
5054 LoadGlobalFunctionInitialMap(constructor, scratch);
5055 movp(FieldOperand(result, HeapObject::kMapOffset), scratch);
5056 LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
5057 movp(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
5058 movp(FieldOperand(result, JSObject::kElementsOffset), scratch);
5059 movp(FieldOperand(result, JSValue::kValueOffset), value);
5060 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
5061}
5062
5063
Steve Block44f0eee2011-05-26 01:26:41 +01005064// Copy memory, byte-by-byte, from source to destination. Not optimized for
5065// long or aligned copies. The contents of scratch and length are destroyed.
5066// Destination is incremented by length, source, length and scratch are
5067// clobbered.
5068// A simpler loop is faster on small copies, but slower on large ones.
5069// The cld() instruction must have been emitted, to set the direction flag(),
5070// before calling this function.
5071void MacroAssembler::CopyBytes(Register destination,
5072 Register source,
5073 Register length,
5074 int min_length,
5075 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005076 DCHECK(min_length >= 0);
5077 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01005078 cmpl(length, Immediate(min_length));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005079 Assert(greater_equal, kInvalidMinLength);
Steve Block44f0eee2011-05-26 01:26:41 +01005080 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005081 Label short_loop, len8, len16, len24, done, short_string;
Steve Block44f0eee2011-05-26 01:26:41 +01005082
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005083 const int kLongStringLimit = 4 * kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01005084 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005085 cmpl(length, Immediate(kPointerSize));
5086 j(below, &short_string, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005087 }
5088
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005089 DCHECK(source.is(rsi));
5090 DCHECK(destination.is(rdi));
5091 DCHECK(length.is(rcx));
5092
5093 if (min_length <= kLongStringLimit) {
5094 cmpl(length, Immediate(2 * kPointerSize));
5095 j(below_equal, &len8, Label::kNear);
5096 cmpl(length, Immediate(3 * kPointerSize));
5097 j(below_equal, &len16, Label::kNear);
5098 cmpl(length, Immediate(4 * kPointerSize));
5099 j(below_equal, &len24, Label::kNear);
5100 }
Steve Block44f0eee2011-05-26 01:26:41 +01005101
5102 // Because source is 8-byte aligned in our uses of this function,
5103 // we keep source aligned for the rep movs operation by copying the odd bytes
5104 // at the end of the ranges.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005105 movp(scratch, length);
5106 shrl(length, Immediate(kPointerSizeLog2));
5107 repmovsp();
Steve Block44f0eee2011-05-26 01:26:41 +01005108 // Move remaining bytes of length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005109 andl(scratch, Immediate(kPointerSize - 1));
5110 movp(length, Operand(source, scratch, times_1, -kPointerSize));
5111 movp(Operand(destination, scratch, times_1, -kPointerSize), length);
5112 addp(destination, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01005113
5114 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005115 jmp(&done, Label::kNear);
5116 bind(&len24);
5117 movp(scratch, Operand(source, 2 * kPointerSize));
5118 movp(Operand(destination, 2 * kPointerSize), scratch);
5119 bind(&len16);
5120 movp(scratch, Operand(source, kPointerSize));
5121 movp(Operand(destination, kPointerSize), scratch);
5122 bind(&len8);
5123 movp(scratch, Operand(source, 0));
5124 movp(Operand(destination, 0), scratch);
5125 // Move remaining bytes of length.
5126 movp(scratch, Operand(source, length, times_1, -kPointerSize));
5127 movp(Operand(destination, length, times_1, -kPointerSize), scratch);
5128 addp(destination, length);
5129 jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005130
5131 bind(&short_string);
5132 if (min_length == 0) {
5133 testl(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005134 j(zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005135 }
Steve Block44f0eee2011-05-26 01:26:41 +01005136
5137 bind(&short_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005138 movb(scratch, Operand(source, 0));
5139 movb(Operand(destination, 0), scratch);
5140 incp(source);
5141 incp(destination);
5142 decl(length);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005143 j(not_zero, &short_loop, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01005144 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005145
5146 bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01005147}
5148
5149
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005150void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
5151 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005152 Register filler) {
5153 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +01005154 jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005155 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005156 movp(Operand(current_address, 0), filler);
5157 addp(current_address, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005158 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005159 cmpp(current_address, end_address);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005160 j(below, &loop, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005161}
5162
5163
Steve Blockd0582a62009-12-15 09:54:21 +00005164void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
5165 if (context_chain_length > 0) {
5166 // Move up the chain of contexts to the context containing the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005167 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00005168 for (int i = 1; i < context_chain_length; i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005169 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00005170 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005171 } else {
5172 // Slot is in the current function context. Move it into the
5173 // destination register in case we store into it (the write barrier
5174 // cannot be allowed to destroy the context in rsi).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005175 movp(dst, rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005176 }
5177
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005178 // We should not have found a with context by walking the context
5179 // chain (i.e., the static scope chain and runtime context chain do
5180 // not agree). A variable occurring in such a scope should have
5181 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01005182 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005183 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
5184 Heap::kWithContextMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005185 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00005186 }
5187}
5188
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005189
5190void MacroAssembler::LoadTransitionedArrayMapConditional(
5191 ElementsKind expected_kind,
5192 ElementsKind transitioned_kind,
5193 Register map_in_out,
5194 Register scratch,
5195 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005196 DCHECK(IsFastElementsKind(expected_kind));
5197 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005198
5199 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005200 movp(scratch, NativeContextOperand());
5201 cmpp(map_in_out,
5202 ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005203 j(not_equal, no_map_match);
5204
5205 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005206 movp(map_in_out,
5207 ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005208}
5209
5210
Steve Block44f0eee2011-05-26 01:26:41 +01005211#ifdef _WIN64
5212static const int kRegisterPassedArguments = 4;
5213#else
5214static const int kRegisterPassedArguments = 6;
5215#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005216
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005217
5218void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
5219 movp(dst, NativeContextOperand());
5220 movp(dst, ContextOperand(dst, index));
Ben Murdochb0fe1622011-05-05 13:52:32 +01005221}
5222
5223
5224void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
5225 Register map) {
5226 // Load the initial map. The global functions all have initial maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005227 movp(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01005228 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01005229 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00005230 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005231 jmp(&ok);
5232 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005233 Abort(kGlobalFunctionsMustHaveInitialMap);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005234 bind(&ok);
5235 }
5236}
5237
5238
Leon Clarke4515c472010-02-03 11:58:03 +00005239int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005240 // On Windows 64 stack slots are reserved by the caller for all arguments
5241 // including the ones passed in registers, and space is always allocated for
5242 // the four register arguments even if the function takes fewer than four
5243 // arguments.
5244 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
5245 // and the caller does not reserve stack slots for them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005246 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00005247#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01005248 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005249 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
5250 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00005251#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005252 if (num_arguments < kRegisterPassedArguments) return 0;
5253 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00005254#endif
Leon Clarke4515c472010-02-03 11:58:03 +00005255}
5256
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005257
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005258void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
5259 Register index,
5260 Register value,
5261 uint32_t encoding_mask) {
5262 Label is_object;
5263 JumpIfNotSmi(string, &is_object);
5264 Abort(kNonObject);
5265 bind(&is_object);
5266
5267 Push(value);
5268 movp(value, FieldOperand(string, HeapObject::kMapOffset));
5269 movzxbp(value, FieldOperand(value, Map::kInstanceTypeOffset));
5270
5271 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
5272 cmpp(value, Immediate(encoding_mask));
5273 Pop(value);
5274 Check(equal, kUnexpectedStringType);
5275
5276 // The index is assumed to be untagged coming in, tag it to compare with the
5277 // string length without using a temp register, it is restored at the end of
5278 // this function.
5279 Integer32ToSmi(index, index);
5280 SmiCompare(index, FieldOperand(string, String::kLengthOffset));
5281 Check(less, kIndexIsTooLarge);
5282
5283 SmiCompare(index, Smi::FromInt(0));
5284 Check(greater_equal, kIndexIsNegative);
5285
5286 // Restore the index
5287 SmiToInteger32(index, index);
5288}
5289
5290
Leon Clarke4515c472010-02-03 11:58:03 +00005291void MacroAssembler::PrepareCallCFunction(int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005292 int frame_alignment = base::OS::ActivationFrameAlignment();
5293 DCHECK(frame_alignment != 0);
5294 DCHECK(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005295
Leon Clarke4515c472010-02-03 11:58:03 +00005296 // Make stack end at alignment and allocate space for arguments and old rsp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005297 movp(kScratchRegister, rsp);
5298 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Leon Clarke4515c472010-02-03 11:58:03 +00005299 int argument_slots_on_stack =
5300 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005301 subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
5302 andp(rsp, Immediate(-frame_alignment));
5303 movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
Leon Clarke4515c472010-02-03 11:58:03 +00005304}
5305
5306
5307void MacroAssembler::CallCFunction(ExternalReference function,
5308 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01005309 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00005310 CallCFunction(rax, num_arguments);
5311}
5312
5313
5314void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005315 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01005316 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01005317 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005318 CheckStackAlignment();
5319 }
5320
Leon Clarke4515c472010-02-03 11:58:03 +00005321 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005322 DCHECK(base::OS::ActivationFrameAlignment() != 0);
5323 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00005324 int argument_slots_on_stack =
5325 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005326 movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
Leon Clarke4515c472010-02-03 11:58:03 +00005327}
5328
Steve Blockd0582a62009-12-15 09:54:21 +00005329
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005330#ifdef DEBUG
5331bool AreAliased(Register reg1,
5332 Register reg2,
5333 Register reg3,
5334 Register reg4,
5335 Register reg5,
5336 Register reg6,
5337 Register reg7,
5338 Register reg8) {
5339 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
5340 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
5341 reg7.is_valid() + reg8.is_valid();
5342
5343 RegList regs = 0;
5344 if (reg1.is_valid()) regs |= reg1.bit();
5345 if (reg2.is_valid()) regs |= reg2.bit();
5346 if (reg3.is_valid()) regs |= reg3.bit();
5347 if (reg4.is_valid()) regs |= reg4.bit();
5348 if (reg5.is_valid()) regs |= reg5.bit();
5349 if (reg6.is_valid()) regs |= reg6.bit();
5350 if (reg7.is_valid()) regs |= reg7.bit();
5351 if (reg8.is_valid()) regs |= reg8.bit();
5352 int n_of_non_aliasing_regs = NumRegs(regs);
5353
5354 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005355}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005356#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005357
5358
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005359CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01005360 : address_(address),
5361 size_(size),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005362 masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005363 // Create a new macro assembler pointing to the address of the code to patch.
5364 // The size is adjusted with kGap on order for the assembler to generate size
5365 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005366 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005367}
5368
5369
5370CodePatcher::~CodePatcher() {
5371 // Indicate that code has changed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005372 Assembler::FlushICache(masm_.isolate(), address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005373
5374 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005375 DCHECK(masm_.pc_ == address_ + size_);
5376 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005377}
5378
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005379
5380void MacroAssembler::CheckPageFlag(
5381 Register object,
5382 Register scratch,
5383 int mask,
5384 Condition cc,
5385 Label* condition_met,
5386 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005387 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005388 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005389 andp(scratch, Immediate(~Page::kPageAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005390 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005391 movp(scratch, Immediate(~Page::kPageAlignmentMask));
5392 andp(scratch, object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005393 }
5394 if (mask < (1 << kBitsPerByte)) {
5395 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
5396 Immediate(static_cast<uint8_t>(mask)));
5397 } else {
5398 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
5399 }
5400 j(cc, condition_met, condition_met_distance);
5401}
5402
5403
5404void MacroAssembler::JumpIfBlack(Register object,
5405 Register bitmap_scratch,
5406 Register mask_scratch,
5407 Label* on_black,
5408 Label::Distance on_black_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005409 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005410
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005411 GetMarkBits(object, bitmap_scratch, mask_scratch);
5412
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005413 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005414 // The mask_scratch register contains a 1 at the position of the first bit
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005415 // and a 1 at a position of the second bit. All other positions are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005416 movp(rcx, mask_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005417 andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
5418 cmpp(mask_scratch, rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005419 j(equal, on_black, on_black_distance);
5420}
5421
5422
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005423void MacroAssembler::GetMarkBits(Register addr_reg,
5424 Register bitmap_reg,
5425 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005426 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
5427 movp(bitmap_reg, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005428 // Sign extended 32 bit immediate.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005429 andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
5430 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005431 int shift =
5432 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
5433 shrl(rcx, Immediate(shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005434 andp(rcx,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005435 Immediate((Page::kPageAlignmentMask >> shift) &
5436 ~(Bitmap::kBytesPerCell - 1)));
5437
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005438 addp(bitmap_reg, rcx);
5439 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005440 shrl(rcx, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005441 andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005442 movl(mask_reg, Immediate(3));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005443 shlp_cl(mask_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005444}
5445
5446
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005447void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
5448 Register mask_scratch, Label* value_is_white,
5449 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005450 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005451 GetMarkBits(value, bitmap_scratch, mask_scratch);
5452
5453 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005454 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005455 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
5456 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005457 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005458
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005459 // Since both black and grey have a 1 in the first position and white does
5460 // not have a 1 there we only need to check one bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005461 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005462 j(zero, value_is_white, distance);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005463}
5464
5465
Ben Murdoch097c5b22016-05-18 11:27:45 +01005466void MacroAssembler::CheckEnumCache(Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005467 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005468 Register empty_fixed_array_value = r8;
5469 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005470 movp(rcx, rax);
5471
5472 // Check if the enum length field is properly initialized, indicating that
5473 // there is an enum cache.
5474 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
5475
5476 EnumLength(rdx, rbx);
5477 Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
5478 j(equal, call_runtime);
5479
5480 jmp(&start);
5481
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005482 bind(&next);
5483
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005484 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005485
5486 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005487 EnumLength(rdx, rbx);
5488 Cmp(rdx, Smi::FromInt(0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005489 j(not_equal, call_runtime);
5490
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005491 bind(&start);
5492
5493 // Check that there are no elements. Register rcx contains the current JS
5494 // object we've reached through the prototype chain.
5495 Label no_elements;
5496 cmpp(empty_fixed_array_value,
5497 FieldOperand(rcx, JSObject::kElementsOffset));
5498 j(equal, &no_elements);
5499
5500 // Second chance, the object may be using the empty slow element dictionary.
5501 LoadRoot(kScratchRegister, Heap::kEmptySlowElementDictionaryRootIndex);
5502 cmpp(kScratchRegister, FieldOperand(rcx, JSObject::kElementsOffset));
5503 j(not_equal, call_runtime);
5504
5505 bind(&no_elements);
5506 movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005507 CompareRoot(rcx, Heap::kNullValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005508 j(not_equal, &next);
5509}
5510
Ben Murdoch097c5b22016-05-18 11:27:45 +01005511
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005512void MacroAssembler::TestJSArrayForAllocationMemento(
5513 Register receiver_reg,
5514 Register scratch_reg,
5515 Label* no_memento_found) {
5516 ExternalReference new_space_start =
5517 ExternalReference::new_space_start(isolate());
5518 ExternalReference new_space_allocation_top =
5519 ExternalReference::new_space_allocation_top_address(isolate());
5520
5521 leap(scratch_reg, Operand(receiver_reg,
5522 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
5523 Move(kScratchRegister, new_space_start);
5524 cmpp(scratch_reg, kScratchRegister);
5525 j(less, no_memento_found);
5526 cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
5527 j(greater, no_memento_found);
5528 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
5529 Heap::kAllocationMementoMapRootIndex);
5530}
5531
5532
5533void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5534 Register object,
5535 Register scratch0,
5536 Register scratch1,
5537 Label* found) {
5538 DCHECK(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
5539 DCHECK(!scratch1.is(scratch0));
5540 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005541 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005542
5543 movp(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005544 movp(current, FieldOperand(current, HeapObject::kMapOffset));
5545 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5546 CompareRoot(current, Heap::kNullValueRootIndex);
5547 j(equal, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005548
5549 // Loop based on the map going up the prototype chain.
5550 bind(&loop_again);
5551 movp(current, FieldOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005552 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
5553 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
5554 CmpInstanceType(current, JS_OBJECT_TYPE);
5555 j(below, found);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005556 movp(scratch1, FieldOperand(current, Map::kBitField2Offset));
5557 DecodeField<Map::ElementsKindBits>(scratch1);
5558 cmpp(scratch1, Immediate(DICTIONARY_ELEMENTS));
5559 j(equal, found);
5560 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5561 CompareRoot(current, Heap::kNullValueRootIndex);
5562 j(not_equal, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005563
5564 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005565}
5566
5567
5568void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
5569 DCHECK(!dividend.is(rax));
5570 DCHECK(!dividend.is(rdx));
5571 base::MagicNumbersForDivision<uint32_t> mag =
5572 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
5573 movl(rax, Immediate(mag.multiplier));
5574 imull(dividend);
5575 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
5576 if (divisor > 0 && neg) addl(rdx, dividend);
5577 if (divisor < 0 && !neg && mag.multiplier > 0) subl(rdx, dividend);
5578 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift));
5579 movl(rax, dividend);
5580 shrl(rax, Immediate(31));
5581 addl(rdx, rax);
5582}
5583
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005584
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005585} // namespace internal
5586} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01005587
5588#endif // V8_TARGET_ARCH_X64