blob: 5b897de545878f952b579e8b205b0fefb2062de0 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
13#include "src/cpu-profiler.h"
14#include "src/debug.h"
15#include "src/heap/heap.h"
16#include "src/isolate-inl.h"
17#include "src/serialize.h"
18#include "src/x64/assembler-x64.h"
19#include "src/x64/macro-assembler-x64.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000020
21namespace v8 {
22namespace internal {
23
Ben Murdoch8b112d22011-06-08 16:22:53 +010024MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
25 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000026 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010027 has_frame_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010028 root_array_available_(true) {
29 if (isolate() != NULL) {
30 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
31 isolate());
32 }
Steve Block44f0eee2011-05-26 01:26:41 +010033}
34
35
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036static const int64_t kInvalidRootRegisterDelta = -1;
37
38
39int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
40 if (predictable_code_size() &&
41 (other.address() < reinterpret_cast<Address>(isolate()) ||
42 other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
43 return kInvalidRootRegisterDelta;
44 }
Steve Block44f0eee2011-05-26 01:26:41 +010045 Address roots_register_value = kRootRegisterBias +
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
47
48 int64_t delta = kInvalidRootRegisterDelta; // Bogus initialization.
49 if (kPointerSize == kInt64Size) {
50 delta = other.address() - roots_register_value;
51 } else {
52 // For x32, zero extend the address to 64-bit and calculate the delta.
53 uint64_t o = static_cast<uint32_t>(
54 reinterpret_cast<intptr_t>(other.address()));
55 uint64_t r = static_cast<uint32_t>(
56 reinterpret_cast<intptr_t>(roots_register_value));
57 delta = o - r;
58 }
Steve Block44f0eee2011-05-26 01:26:41 +010059 return delta;
60}
61
62
63Operand MacroAssembler::ExternalOperand(ExternalReference target,
64 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065 if (root_array_available_ && !serializer_enabled()) {
66 int64_t delta = RootRegisterDelta(target);
67 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 return Operand(kRootRegister, static_cast<int32_t>(delta));
69 }
70 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 Move(scratch, target);
Steve Block44f0eee2011-05-26 01:26:41 +010072 return Operand(scratch, 0);
73}
74
75
76void MacroAssembler::Load(Register destination, ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 if (root_array_available_ && !serializer_enabled()) {
78 int64_t delta = RootRegisterDelta(source);
79 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
80 movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +010081 return;
82 }
83 }
84 // Safe code.
85 if (destination.is(rax)) {
86 load_rax(source);
87 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 Move(kScratchRegister, source);
89 movp(destination, Operand(kScratchRegister, 0));
Steve Block44f0eee2011-05-26 01:26:41 +010090 }
91}
92
93
94void MacroAssembler::Store(ExternalReference destination, Register source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 if (root_array_available_ && !serializer_enabled()) {
96 int64_t delta = RootRegisterDelta(destination);
97 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
98 movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
Steve Block44f0eee2011-05-26 01:26:41 +010099 return;
100 }
101 }
102 // Safe code.
103 if (source.is(rax)) {
104 store_rax(destination);
105 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 Move(kScratchRegister, destination);
107 movp(Operand(kScratchRegister, 0), source);
Steve Block44f0eee2011-05-26 01:26:41 +0100108 }
109}
110
111
112void MacroAssembler::LoadAddress(Register destination,
113 ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000114 if (root_array_available_ && !serializer_enabled()) {
115 int64_t delta = RootRegisterDelta(source);
116 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
117 leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +0100118 return;
119 }
120 }
121 // Safe code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000122 Move(destination, source);
Steve Block44f0eee2011-05-26 01:26:41 +0100123}
124
125
126int MacroAssembler::LoadAddressSize(ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 if (root_array_available_ && !serializer_enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100128 // This calculation depends on the internals of LoadAddress.
129 // It's correctness is ensured by the asserts in the Call
130 // instruction below.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 int64_t delta = RootRegisterDelta(source);
132 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
133 // Operand is leap(scratch, Operand(kRootRegister, delta));
Steve Block44f0eee2011-05-26 01:26:41 +0100134 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
135 int size = 4;
136 if (!is_int8(static_cast<int32_t>(delta))) {
137 size += 3; // Need full four-byte displacement in lea.
138 }
139 return size;
140 }
141 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 // Size of movp(destination, src);
143 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
144}
145
146
147void MacroAssembler::PushAddress(ExternalReference source) {
148 int64_t address = reinterpret_cast<int64_t>(source.address());
149 if (is_int32(address) && !serializer_enabled()) {
150 if (emit_debug_code()) {
151 Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone());
152 }
153 Push(Immediate(static_cast<int32_t>(address)));
154 return;
155 }
156 LoadAddress(kScratchRegister, source);
157 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000158}
159
160
Steve Block3ce2e202009-11-05 08:53:23 +0000161void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 DCHECK(root_array_available_);
163 movp(destination, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100164 (index << kPointerSizeLog2) - kRootRegisterBias));
165}
166
167
168void MacroAssembler::LoadRootIndexed(Register destination,
169 Register variable_offset,
170 int fixed_offset) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 DCHECK(root_array_available_);
172 movp(destination,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100173 Operand(kRootRegister,
174 variable_offset, times_pointer_size,
175 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000176}
177
178
Kristian Monsen25f61362010-05-21 11:50:48 +0100179void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 DCHECK(root_array_available_);
181 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100182 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100183}
184
185
Steve Blocka7e24c12009-10-30 11:49:00 +0000186void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 DCHECK(root_array_available_);
188 Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000189}
190
191
Steve Block3ce2e202009-11-05 08:53:23 +0000192void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 DCHECK(root_array_available_);
194 cmpp(with, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100195 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000196}
197
198
Steve Block1e0659c2011-05-24 12:43:12 +0100199void MacroAssembler::CompareRoot(const Operand& with,
200 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000201 DCHECK(root_array_available_);
202 DCHECK(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 LoadRoot(kScratchRegister, index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 cmpp(with, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000205}
206
207
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100208void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
209 Register addr,
210 Register scratch,
211 SaveFPRegsMode save_fp,
212 RememberedSetFinalAction and_then) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100214 Label ok;
215 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
216 int3();
217 bind(&ok);
Steve Block6ded16b2010-05-10 14:33:55 +0100218 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100219 // Load store buffer top.
220 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
221 // Store pointer to buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 movp(Operand(scratch, 0), addr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 // Increment buffer top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 addp(scratch, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100225 // Write back new top of buffer.
226 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
227 // Call stub on end of buffer.
228 Label done;
229 // Check for end of buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000230 testp(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100231 if (and_then == kReturnAtEnd) {
232 Label buffer_overflowed;
233 j(not_equal, &buffer_overflowed, Label::kNear);
234 ret(0);
235 bind(&buffer_overflowed);
236 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100238 j(equal, &done, Label::kNear);
239 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000240 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100241 CallStub(&store_buffer_overflow);
242 if (and_then == kReturnAtEnd) {
243 ret(0);
244 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100246 bind(&done);
247 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000248}
249
250
Ben Murdoch257744e2011-11-30 15:57:28 +0000251void MacroAssembler::InNewSpace(Register object,
252 Register scratch,
253 Condition cc,
254 Label* branch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100255 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 if (serializer_enabled()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000257 // Can't do arithmetic on external references if it might get serialized.
258 // The mask isn't really an address. We load it as an external reference in
259 // case the size of the new space is different between the snapshot maker
260 // and the running system.
261 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 Move(kScratchRegister, ExternalReference::new_space_mask(isolate()));
263 andp(scratch, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +0000264 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000265 Move(scratch, ExternalReference::new_space_mask(isolate()));
266 andp(scratch, object);
Ben Murdoch257744e2011-11-30 15:57:28 +0000267 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 Move(kScratchRegister, ExternalReference::new_space_start(isolate()));
269 cmpp(scratch, kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100270 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000271 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 DCHECK(kPointerSize == kInt64Size
273 ? is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask()))
274 : kPointerSize == kInt32Size);
Ben Murdoch257744e2011-11-30 15:57:28 +0000275 intptr_t new_space_start =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart());
277 Move(kScratchRegister, reinterpret_cast<Address>(-new_space_start),
278 Assembler::RelocInfoNone());
Ben Murdoch257744e2011-11-30 15:57:28 +0000279 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 addp(scratch, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +0000281 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 leap(scratch, Operand(object, kScratchRegister, times_1, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +0000283 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284 andp(scratch,
285 Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100286 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000287 }
288}
289
290
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100291void MacroAssembler::RecordWriteField(
292 Register object,
293 int offset,
294 Register value,
295 Register dst,
296 SaveFPRegsMode save_fp,
297 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 SmiCheck smi_check,
299 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100300 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100301 // catch stores of Smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000302 Label done;
303
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100304 // Skip barrier if writing a smi.
305 if (smi_check == INLINE_SMI_CHECK) {
306 JumpIfSmi(value, &done);
307 }
308
309 // Although the object register is tagged, the offset is relative to the start
310 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000311 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100312
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000313 leap(dst, FieldOperand(object, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100314 if (emit_debug_code()) {
315 Label ok;
316 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
317 j(zero, &ok, Label::kNear);
318 int3();
319 bind(&ok);
320 }
321
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 RecordWrite(object, dst, value, save_fp, remembered_set_action,
323 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100324
Steve Block3ce2e202009-11-05 08:53:23 +0000325 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000326
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 // Clobber clobbered input registers when running with the debug-code flag
328 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100329 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 Move(value, kZapValue, Assembler::RelocInfoNone());
331 Move(dst, kZapValue, Assembler::RelocInfoNone());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100332 }
333}
334
335
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000336void MacroAssembler::RecordWriteArray(
337 Register object,
338 Register value,
339 Register index,
340 SaveFPRegsMode save_fp,
341 RememberedSetAction remembered_set_action,
342 SmiCheck smi_check,
343 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100344 // First, check if a write barrier is even needed. The tests below
345 // catch stores of Smis.
346 Label done;
347
348 // Skip barrier if writing a smi.
349 if (smi_check == INLINE_SMI_CHECK) {
350 JumpIfSmi(value, &done);
351 }
352
353 // Array access: calculate the destination address. Index is not a smi.
354 Register dst = index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355 leap(dst, Operand(object, index, times_pointer_size,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100356 FixedArray::kHeaderSize - kHeapObjectTag));
357
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 RecordWrite(object, dst, value, save_fp, remembered_set_action,
359 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100360
361 bind(&done);
362
363 // Clobber clobbered input registers when running with the debug-code flag
364 // turned on to provoke errors.
365 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366 Move(value, kZapValue, Assembler::RelocInfoNone());
367 Move(index, kZapValue, Assembler::RelocInfoNone());
Leon Clarke4515c472010-02-03 11:58:03 +0000368 }
Steve Block3ce2e202009-11-05 08:53:23 +0000369}
370
371
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000372void MacroAssembler::RecordWriteForMap(Register object,
373 Register map,
374 Register dst,
375 SaveFPRegsMode fp_mode) {
376 DCHECK(!object.is(kScratchRegister));
377 DCHECK(!object.is(map));
378 DCHECK(!object.is(dst));
379 DCHECK(!map.is(dst));
380 AssertNotSmi(object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100381
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100382 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 Label ok;
384 if (map.is(kScratchRegister)) pushq(map);
385 CompareMap(map, isolate()->factory()->meta_map());
386 if (map.is(kScratchRegister)) popq(map);
387 j(equal, &ok, Label::kNear);
388 int3();
389 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100390 }
391
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000392 if (!FLAG_incremental_marking) {
393 return;
394 }
395
396 if (emit_debug_code()) {
397 Label ok;
398 if (map.is(kScratchRegister)) pushq(map);
399 cmpp(map, FieldOperand(object, HeapObject::kMapOffset));
400 if (map.is(kScratchRegister)) popq(map);
401 j(equal, &ok, Label::kNear);
402 int3();
403 bind(&ok);
404 }
405
406 // Compute the address.
407 leap(dst, FieldOperand(object, HeapObject::kMapOffset));
408
409 // First, check if a write barrier is even needed. The tests below
410 // catch stores of smis and stores into the young generation.
411 Label done;
412
413 // A single check of the map's pages interesting flag suffices, since it is
414 // only set during incremental collection, and then it's also guaranteed that
415 // the from object's page's interesting flag is also set. This optimization
416 // relies on the fact that maps can never be in new space.
417 CheckPageFlag(map,
418 map, // Used as scratch.
419 MemoryChunk::kPointersToHereAreInterestingMask,
420 zero,
421 &done,
422 Label::kNear);
423
424 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
425 fp_mode);
426 CallStub(&stub);
427
428 bind(&done);
429
430 // Count number of write barriers in generated code.
431 isolate()->counters()->write_barriers_static()->Increment();
432 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
433
434 // Clobber clobbered registers when running with the debug-code flag
435 // turned on to provoke errors.
436 if (emit_debug_code()) {
437 Move(dst, kZapValue, Assembler::RelocInfoNone());
438 Move(map, kZapValue, Assembler::RelocInfoNone());
439 }
440}
441
442
443void MacroAssembler::RecordWrite(
444 Register object,
445 Register address,
446 Register value,
447 SaveFPRegsMode fp_mode,
448 RememberedSetAction remembered_set_action,
449 SmiCheck smi_check,
450 PointersToHereCheck pointers_to_here_check_for_value) {
451 DCHECK(!object.is(value));
452 DCHECK(!object.is(address));
453 DCHECK(!value.is(address));
454 AssertNotSmi(object);
455
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 if (remembered_set_action == OMIT_REMEMBERED_SET &&
457 !FLAG_incremental_marking) {
458 return;
459 }
460
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000461 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100462 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 cmpp(value, Operand(address, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100464 j(equal, &ok, Label::kNear);
465 int3();
466 bind(&ok);
467 }
Steve Block8defd9f2010-07-08 12:39:36 +0100468
469 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100470 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100471 Label done;
Steve Block8defd9f2010-07-08 12:39:36 +0100472
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100473 if (smi_check == INLINE_SMI_CHECK) {
474 // Skip barrier if writing a smi.
475 JumpIfSmi(value, &done);
476 }
Steve Block8defd9f2010-07-08 12:39:36 +0100477
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
479 CheckPageFlag(value,
480 value, // Used as scratch.
481 MemoryChunk::kPointersToHereAreInterestingMask,
482 zero,
483 &done,
484 Label::kNear);
485 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100486
487 CheckPageFlag(object,
488 value, // Used as scratch.
489 MemoryChunk::kPointersFromHereAreInterestingMask,
490 zero,
491 &done,
492 Label::kNear);
493
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
495 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100496 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100497
498 bind(&done);
499
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 // Count number of write barriers in generated code.
501 isolate()->counters()->write_barriers_static()->Increment();
502 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
503
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100504 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100505 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100506 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 Move(address, kZapValue, Assembler::RelocInfoNone());
508 Move(value, kZapValue, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +0100509 }
510}
511
512
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
514 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000515}
516
517
Iain Merrick75681382010-08-19 15:07:18 +0100518void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100519 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000520 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100521 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
522 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000523 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100524 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000525 Heap::kFixedDoubleArrayMapRootIndex);
526 j(equal, &ok, Label::kNear);
527 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100528 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000529 j(equal, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +0100531 bind(&ok);
532 }
533}
534
535
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000537 Label L;
538 j(cc, &L, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000539 Abort(reason);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100540 // Control will not return here.
Steve Blocka7e24c12009-10-30 11:49:00 +0000541 bind(&L);
542}
543
544
Steve Block6ded16b2010-05-10 14:33:55 +0100545void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +0100547 int frame_alignment_mask = frame_alignment - 1;
548 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000549 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000550 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000551 testp(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000552 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100553 // Abort if stack is not aligned.
554 int3();
555 bind(&alignment_as_expected);
556 }
557}
558
559
Steve Blocka7e24c12009-10-30 11:49:00 +0000560void MacroAssembler::NegativeZeroTest(Register result,
561 Register op,
562 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000563 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000565 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566 testl(op, op);
567 j(sign, then_label);
568 bind(&ok);
569}
570
571
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000572void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000573#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575 if (msg != NULL) {
576 RecordComment("Abort message: ");
577 RecordComment(msg);
578 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579
580 if (FLAG_trap_on_abort) {
581 int3();
582 return;
583 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000584#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585
586 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)),
587 Assembler::RelocInfoNone());
588 Push(kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100589
590 if (!has_frame_) {
591 // We don't actually want to generate a pile of code for this, so just
592 // claim there is a stack frame, without generating one.
593 FrameScope scope(this, StackFrame::NONE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100595 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000596 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100597 }
598 // Control will not return here.
Steve Blockd0582a62009-12-15 09:54:21 +0000599 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000600}
601
602
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
604 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000605 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000606}
607
608
Leon Clarkee46be812010-01-19 14:06:41 +0000609void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000610 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
611}
612
613
Steve Blocka7e24c12009-10-30 11:49:00 +0000614void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +0000616 ret((argc - 1) * kPointerSize);
617}
618
619
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000622}
623
624
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100625void MacroAssembler::IndexFromHash(Register hash, Register index) {
626 // The assert checks that the constants for the maximum number of digits
627 // for an array index cached in the hash field and the number of bits
628 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100630 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631 if (!hash.is(index)) {
632 movl(index, hash);
633 }
634 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Steve Block1e0659c2011-05-24 12:43:12 +0100635}
636
637
Steve Block44f0eee2011-05-26 01:26:41 +0100638void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 int num_arguments,
640 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 // If the expected number of arguments of the runtime function is
642 // constant, we check that the actual number of arguments match the
643 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000645
Leon Clarke4515c472010-02-03 11:58:03 +0000646 // TODO(1236192): Most runtime routines don't need the number of
647 // arguments passed in because it is constant. At some point we
648 // should remove this need and make the runtime routine entry code
649 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100650 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100651 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 CEntryStub ces(isolate(), f->result_size, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +0000653 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000654}
655
656
Andrei Popescu402d9372010-02-26 13:31:12 +0000657void MacroAssembler::CallExternalReference(const ExternalReference& ext,
658 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100659 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100660 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000661
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000662 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000663 CallStub(&stub);
664}
665
666
Steve Block6ded16b2010-05-10 14:33:55 +0100667void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
668 int num_arguments,
669 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000670 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671 // -- rsp[0] : return address
672 // -- rsp[8] : argument num_arguments - 1
Steve Blocka7e24c12009-10-30 11:49:00 +0000673 // ...
674 // -- rsp[8 * num_arguments] : argument 0 (receiver)
675 // -----------------------------------
676
677 // TODO(1236192): Most runtime routines don't need the number of
678 // arguments passed in because it is constant. At some point we
679 // should remove this need and make the runtime routine entry code
680 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100681 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100682 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000683}
684
685
Steve Block6ded16b2010-05-10 14:33:55 +0100686void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
687 int num_arguments,
688 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100689 TailCallExternalReference(ExternalReference(fid, isolate()),
690 num_arguments,
691 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100692}
693
694
Ben Murdochbb769b22010-08-11 14:56:33 +0100695static int Offset(ExternalReference ref0, ExternalReference ref1) {
696 int64_t offset = (ref0.address() - ref1.address());
697 // Check that fits into int.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698 DCHECK(static_cast<int>(offset) == offset);
Ben Murdochbb769b22010-08-11 14:56:33 +0100699 return static_cast<int>(offset);
700}
701
702
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800703void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800704 EnterApiExitFrame(arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +0100705}
706
707
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708void MacroAssembler::CallApiFunctionAndReturn(
709 Register function_address,
710 ExternalReference thunk_ref,
711 Register thunk_last_arg,
712 int stack_space,
713 Operand return_value_operand,
714 Operand* context_restore_operand) {
John Reck59135872010-11-02 12:39:01 -0700715 Label prologue;
716 Label promote_scheduled_exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717 Label exception_handled;
John Reck59135872010-11-02 12:39:01 -0700718 Label delete_allocated_handles;
719 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100720 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100721
Ben Murdoch257744e2011-11-30 15:57:28 +0000722 Factory* factory = isolate()->factory();
John Reck59135872010-11-02 12:39:01 -0700723 ExternalReference next_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724 ExternalReference::handle_scope_next_address(isolate());
John Reck59135872010-11-02 12:39:01 -0700725 const int kNextOffset = 0;
726 const int kLimitOffset = Offset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000727 ExternalReference::handle_scope_limit_address(isolate()),
John Reck59135872010-11-02 12:39:01 -0700728 next_address);
729 const int kLevelOffset = Offset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000730 ExternalReference::handle_scope_level_address(isolate()),
John Reck59135872010-11-02 12:39:01 -0700731 next_address);
732 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100733 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100734
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 DCHECK(rdx.is(function_address) || r8.is(function_address));
John Reck59135872010-11-02 12:39:01 -0700736 // Allocate HandleScope in callee-save registers.
737 Register prev_next_address_reg = r14;
738 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100739 Register base_reg = r15;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740 Move(base_reg, next_address);
741 movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
742 movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
John Reck59135872010-11-02 12:39:01 -0700743 addl(Operand(base_reg, kLevelOffset), Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000744
745 if (FLAG_log_timer_events) {
746 FrameScope frame(this, StackFrame::MANUAL);
747 PushSafepointRegisters();
748 PrepareCallCFunction(1);
749 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
750 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
751 PopSafepointRegisters();
752 }
753
754
755 Label profiler_disabled;
756 Label end_profiler_check;
757 Move(rax, ExternalReference::is_profiling_address(isolate()));
758 cmpb(Operand(rax, 0), Immediate(0));
759 j(zero, &profiler_disabled);
760
761 // Third parameter is the address of the actual getter function.
762 Move(thunk_last_arg, function_address);
763 Move(rax, thunk_ref);
764 jmp(&end_profiler_check);
765
766 bind(&profiler_disabled);
John Reck59135872010-11-02 12:39:01 -0700767 // Call the api function!
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000768 Move(rax, function_address);
769
770 bind(&end_profiler_check);
771
772 // Call the api function!
John Reck59135872010-11-02 12:39:01 -0700773 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100774
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 if (FLAG_log_timer_events) {
776 FrameScope frame(this, StackFrame::MANUAL);
777 PushSafepointRegisters();
778 PrepareCallCFunction(1);
779 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
780 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
781 PopSafepointRegisters();
782 }
783
784 // Load the value from ReturnValue
785 movp(rax, return_value_operand);
John Reck59135872010-11-02 12:39:01 -0700786 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100787
John Reck59135872010-11-02 12:39:01 -0700788 // No more valid handles (the result handle was the last one). Restore
789 // previous handle scope.
790 subl(Operand(base_reg, kLevelOffset), Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791 movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
792 cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
John Reck59135872010-11-02 12:39:01 -0700793 j(not_equal, &delete_allocated_handles);
794 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100795
John Reck59135872010-11-02 12:39:01 -0700796 // Check if the function scheduled an exception.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000797 Move(rsi, scheduled_exception_address);
Ben Murdoch257744e2011-11-30 15:57:28 +0000798 Cmp(Operand(rsi, 0), factory->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700799 j(not_equal, &promote_scheduled_exception);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000800 bind(&exception_handled);
Ben Murdochbb769b22010-08-11 14:56:33 +0100801
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802#if ENABLE_EXTRA_CHECKS
803 // Check if the function returned a valid JavaScript value.
804 Label ok;
805 Register return_value = rax;
806 Register map = rcx;
807
808 JumpIfSmi(return_value, &ok, Label::kNear);
809 movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
810
811 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
812 j(below, &ok, Label::kNear);
813
814 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
815 j(above_equal, &ok, Label::kNear);
816
817 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
818 j(equal, &ok, Label::kNear);
819
820 CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
821 j(equal, &ok, Label::kNear);
822
823 CompareRoot(return_value, Heap::kTrueValueRootIndex);
824 j(equal, &ok, Label::kNear);
825
826 CompareRoot(return_value, Heap::kFalseValueRootIndex);
827 j(equal, &ok, Label::kNear);
828
829 CompareRoot(return_value, Heap::kNullValueRootIndex);
830 j(equal, &ok, Label::kNear);
831
832 Abort(kAPICallReturnedInvalidObject);
833
834 bind(&ok);
835#endif
836
837 bool restore_context = context_restore_operand != NULL;
838 if (restore_context) {
839 movp(rsi, *context_restore_operand);
840 }
841 LeaveApiExitFrame(!restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800842 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700843
844 bind(&promote_scheduled_exception);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 {
846 FrameScope frame(this, StackFrame::INTERNAL);
847 CallRuntime(Runtime::kPromoteScheduledException, 0);
848 }
849 jmp(&exception_handled);
John Reck59135872010-11-02 12:39:01 -0700850
851 // HandleScope limit has changed. Delete allocated extensions.
852 bind(&delete_allocated_handles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000853 movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
854 movp(prev_limit_reg, rax);
855 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
Steve Block44f0eee2011-05-26 01:26:41 +0100856 LoadAddress(rax,
857 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700858 call(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000859 movp(rax, prev_limit_reg);
John Reck59135872010-11-02 12:39:01 -0700860 jmp(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100861}
862
863
Steve Block6ded16b2010-05-10 14:33:55 +0100864void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
865 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000866 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100867 LoadAddress(rbx, ext);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 CEntryStub ces(isolate(), result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000869 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000870}
871
872
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100873void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
874 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000875 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100876 // You can't call a builtin without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877 DCHECK(flag == JUMP_FUNCTION || has_frame());
Steve Blocka7e24c12009-10-30 11:49:00 +0000878
Andrei Popescu402d9372010-02-26 13:31:12 +0000879 // Rely on the assertion to check that the number of provided
880 // arguments match the expected number of arguments. Fake a
881 // parameter count to avoid emitting code to do the check.
882 ParameterCount expected(0);
883 GetBuiltinEntry(rdx, id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 InvokeCode(rdx, expected, expected, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +0000885}
886
Andrei Popescu402d9372010-02-26 13:31:12 +0000887
Steve Block791712a2010-08-27 10:21:07 +0100888void MacroAssembler::GetBuiltinFunction(Register target,
889 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100890 // Load the builtins object into target register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000891 movp(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
892 movp(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
893 movp(target, FieldOperand(target,
Steve Block791712a2010-08-27 10:21:07 +0100894 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
895}
Steve Block6ded16b2010-05-10 14:33:55 +0100896
Steve Block791712a2010-08-27 10:21:07 +0100897
898void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 DCHECK(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000900 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100901 GetBuiltinFunction(rdi, id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 movp(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000903}
904
905
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100906#define REG(Name) { kRegister_ ## Name ## _Code }
907
908static const Register saved_regs[] = {
909 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
910 REG(r9), REG(r10), REG(r11)
911};
912
913#undef REG
914
915static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
916
917
918void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
919 Register exclusion1,
920 Register exclusion2,
921 Register exclusion3) {
922 // We don't allow a GC during a store buffer overflow so there is no need to
923 // store the registers in any particular way, but we do have to store and
924 // restore them.
925 for (int i = 0; i < kNumberOfSavedRegs; i++) {
926 Register reg = saved_regs[i];
927 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928 pushq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100929 }
930 }
931 // R12 to r15 are callee save on all platforms.
932 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 subp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
934 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100935 XMMRegister reg = XMMRegister::from_code(i);
936 movsd(Operand(rsp, i * kDoubleSize), reg);
937 }
938 }
939}
940
941
942void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
943 Register exclusion1,
944 Register exclusion2,
945 Register exclusion3) {
946 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000947 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100948 XMMRegister reg = XMMRegister::from_code(i);
949 movsd(reg, Operand(rsp, i * kDoubleSize));
950 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000951 addp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100952 }
953 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
954 Register reg = saved_regs[i];
955 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000956 popq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100957 }
958 }
959}
960
961
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000962void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
963 xorps(dst, dst);
964 cvtlsi2sd(dst, src);
965}
966
967
968void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
969 xorps(dst, dst);
970 cvtlsi2sd(dst, src);
971}
972
973
974void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
975 DCHECK(!r.IsDouble());
976 if (r.IsInteger8()) {
977 movsxbq(dst, src);
978 } else if (r.IsUInteger8()) {
979 movzxbl(dst, src);
980 } else if (r.IsInteger16()) {
981 movsxwq(dst, src);
982 } else if (r.IsUInteger16()) {
983 movzxwl(dst, src);
984 } else if (r.IsInteger32()) {
985 movl(dst, src);
986 } else {
987 movp(dst, src);
988 }
989}
990
991
992void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
993 DCHECK(!r.IsDouble());
994 if (r.IsInteger8() || r.IsUInteger8()) {
995 movb(dst, src);
996 } else if (r.IsInteger16() || r.IsUInteger16()) {
997 movw(dst, src);
998 } else if (r.IsInteger32()) {
999 movl(dst, src);
1000 } else {
1001 if (r.IsHeapObject()) {
1002 AssertNotSmi(src);
1003 } else if (r.IsSmi()) {
1004 AssertSmi(src);
1005 }
1006 movp(dst, src);
1007 }
1008}
1009
1010
Steve Blocka7e24c12009-10-30 11:49:00 +00001011void MacroAssembler::Set(Register dst, int64_t x) {
1012 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001013 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +00001015 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001016 } else if (is_int32(x)) {
1017 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001018 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019 movq(dst, x);
Steve Blocka7e24c12009-10-30 11:49:00 +00001020 }
1021}
1022
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023
1024void MacroAssembler::Set(const Operand& dst, intptr_t x) {
1025 if (kPointerSize == kInt64Size) {
1026 if (is_int32(x)) {
1027 movp(dst, Immediate(static_cast<int32_t>(x)));
1028 } else {
1029 Set(kScratchRegister, x);
1030 movp(dst, kScratchRegister);
1031 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001032 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001033 movp(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001034 }
1035}
1036
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001037
Steve Blocka7e24c12009-10-30 11:49:00 +00001038// ----------------------------------------------------------------------------
1039// Smi tagging, untagging and tag detection.
1040
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041bool MacroAssembler::IsUnsafeInt(const int32_t x) {
1042 static const int kMaxBits = 17;
1043 return !is_intn(x, kMaxBits);
1044}
1045
1046
1047void MacroAssembler::SafeMove(Register dst, Smi* src) {
1048 DCHECK(!dst.is(kScratchRegister));
1049 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1050 if (SmiValuesAre32Bits()) {
1051 // JIT cookie can be converted to Smi.
1052 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1053 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1054 xorp(dst, kScratchRegister);
1055 } else {
1056 DCHECK(SmiValuesAre31Bits());
1057 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1058 movp(dst, Immediate(value ^ jit_cookie()));
1059 xorp(dst, Immediate(jit_cookie()));
1060 }
1061 } else {
1062 Move(dst, src);
1063 }
1064}
1065
1066
1067void MacroAssembler::SafePush(Smi* src) {
1068 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1069 if (SmiValuesAre32Bits()) {
1070 // JIT cookie can be converted to Smi.
1071 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1072 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1073 xorp(Operand(rsp, 0), kScratchRegister);
1074 } else {
1075 DCHECK(SmiValuesAre31Bits());
1076 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1077 Push(Immediate(value ^ jit_cookie()));
1078 xorp(Operand(rsp, 0), Immediate(jit_cookie()));
1079 }
1080 } else {
1081 Push(src);
1082 }
1083}
1084
1085
Steve Block8defd9f2010-07-08 12:39:36 +01001086Register MacroAssembler::GetSmiConstant(Smi* source) {
1087 int value = source->value();
1088 if (value == 0) {
1089 xorl(kScratchRegister, kScratchRegister);
1090 return kScratchRegister;
1091 }
1092 if (value == 1) {
1093 return kSmiConstantRegister;
1094 }
1095 LoadSmiConstant(kScratchRegister, source);
1096 return kScratchRegister;
1097}
1098
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001099
Steve Block8defd9f2010-07-08 12:39:36 +01001100void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +01001101 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 Move(dst, Smi::FromInt(kSmiConstantRegisterValue),
1103 Assembler::RelocInfoNone());
1104 cmpp(dst, kSmiConstantRegister);
1105 Assert(equal, kUninitializedKSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001106 }
Steve Block44f0eee2011-05-26 01:26:41 +01001107 int value = source->value();
1108 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001109 xorl(dst, dst);
1110 return;
1111 }
Steve Block8defd9f2010-07-08 12:39:36 +01001112 bool negative = value < 0;
1113 unsigned int uvalue = negative ? -value : value;
1114
1115 switch (uvalue) {
1116 case 9:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001117 leap(dst,
1118 Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001119 break;
1120 case 8:
1121 xorl(dst, dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001122 leap(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001123 break;
1124 case 4:
1125 xorl(dst, dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001126 leap(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001127 break;
1128 case 5:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129 leap(dst,
1130 Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001131 break;
1132 case 3:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001133 leap(dst,
1134 Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001135 break;
1136 case 2:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137 leap(dst,
1138 Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001139 break;
1140 case 1:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001141 movp(dst, kSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001142 break;
1143 case 0:
1144 UNREACHABLE();
1145 return;
1146 default:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001147 Move(dst, source, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +01001148 return;
1149 }
1150 if (negative) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151 negp(dst);
Steve Block8defd9f2010-07-08 12:39:36 +01001152 }
1153}
1154
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001155
Steve Blocka7e24c12009-10-30 11:49:00 +00001156void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001157 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001158 if (!dst.is(src)) {
1159 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001160 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001161 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001162}
1163
1164
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001165void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001166 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001167 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +00001168 Label ok;
1169 j(zero, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001170 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001171 bind(&ok);
1172 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173
1174 if (SmiValuesAre32Bits()) {
1175 DCHECK(kSmiShift % kBitsPerByte == 0);
1176 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1177 } else {
1178 DCHECK(SmiValuesAre31Bits());
1179 Integer32ToSmi(kScratchRegister, src);
1180 movp(dst, kScratchRegister);
1181 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001182}
1183
1184
Steve Block3ce2e202009-11-05 08:53:23 +00001185void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1186 Register src,
1187 int constant) {
1188 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001189 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001190 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001191 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001192 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001193 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001194}
1195
1196
1197void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001198 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001199 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001200 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001201 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001202
1203 if (SmiValuesAre32Bits()) {
1204 shrp(dst, Immediate(kSmiShift));
1205 } else {
1206 DCHECK(SmiValuesAre31Bits());
1207 sarl(dst, Immediate(kSmiShift));
1208 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001209}
1210
1211
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001212void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001213 if (SmiValuesAre32Bits()) {
1214 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1215 } else {
1216 DCHECK(SmiValuesAre31Bits());
1217 movl(dst, src);
1218 sarl(dst, Immediate(kSmiShift));
1219 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001220}
1221
1222
Steve Blocka7e24c12009-10-30 11:49:00 +00001223void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001224 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001225 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001226 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001227 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228 sarp(dst, Immediate(kSmiShift));
1229 if (kPointerSize == kInt32Size) {
1230 // Sign extend to 64-bit.
1231 movsxlq(dst, dst);
1232 }
Steve Block3ce2e202009-11-05 08:53:23 +00001233}
1234
1235
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001236void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001237 if (SmiValuesAre32Bits()) {
1238 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1239 } else {
1240 DCHECK(SmiValuesAre31Bits());
1241 movp(dst, src);
1242 SmiToInteger64(dst, dst);
1243 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001244}
1245
1246
Steve Block3ce2e202009-11-05 08:53:23 +00001247void MacroAssembler::SmiTest(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001248 AssertSmi(src);
1249 testp(src, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001250}
1251
1252
Steve Block44f0eee2011-05-26 01:26:41 +01001253void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001254 AssertSmi(smi1);
1255 AssertSmi(smi2);
1256 cmpp(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001257}
1258
1259
1260void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001261 AssertSmi(dst);
Steve Block44f0eee2011-05-26 01:26:41 +01001262 Cmp(dst, src);
1263}
1264
1265
1266void MacroAssembler::Cmp(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001267 DCHECK(!dst.is(kScratchRegister));
Steve Block3ce2e202009-11-05 08:53:23 +00001268 if (src->value() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269 testp(dst, dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001270 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001271 Register constant_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001272 cmpp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001273 }
1274}
1275
1276
Leon Clarkef7060e22010-06-03 12:02:55 +01001277void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278 AssertSmi(dst);
1279 AssertSmi(src);
1280 cmpp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001281}
1282
1283
Steve Block3ce2e202009-11-05 08:53:23 +00001284void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 AssertSmi(dst);
1286 AssertSmi(src);
1287 cmpp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001288}
1289
1290
1291void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001292 AssertSmi(dst);
1293 if (SmiValuesAre32Bits()) {
1294 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1295 } else {
1296 DCHECK(SmiValuesAre31Bits());
1297 cmpl(dst, Immediate(src));
Steve Block44f0eee2011-05-26 01:26:41 +01001298 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001299}
1300
1301
Steve Block44f0eee2011-05-26 01:26:41 +01001302void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1303 // The Operand cannot use the smi register.
1304 Register smi_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001305 DCHECK(!dst.AddressUsesRegister(smi_reg));
1306 cmpp(dst, smi_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001307}
1308
1309
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001310void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311 if (SmiValuesAre32Bits()) {
1312 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1313 } else {
1314 DCHECK(SmiValuesAre31Bits());
1315 SmiToInteger32(kScratchRegister, dst);
1316 cmpl(kScratchRegister, src);
1317 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001318}
1319
1320
Steve Blocka7e24c12009-10-30 11:49:00 +00001321void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1322 Register src,
1323 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324 DCHECK(power >= 0);
1325 DCHECK(power < 64);
Steve Blocka7e24c12009-10-30 11:49:00 +00001326 if (power == 0) {
1327 SmiToInteger64(dst, src);
1328 return;
1329 }
Steve Block3ce2e202009-11-05 08:53:23 +00001330 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001331 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001332 }
1333 if (power < kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001334 sarp(dst, Immediate(kSmiShift - power));
Steve Block3ce2e202009-11-05 08:53:23 +00001335 } else if (power > kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001336 shlp(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001337 }
1338}
1339
1340
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001341void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1342 Register src,
1343 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001344 DCHECK((0 <= power) && (power < 32));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001345 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 shrp(dst, Immediate(power + kSmiShift));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001347 } else {
1348 UNIMPLEMENTED(); // Not used.
1349 }
1350}
1351
1352
Ben Murdoch257744e2011-11-30 15:57:28 +00001353void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1354 Label* on_not_smis,
1355 Label::Distance near_jump) {
1356 if (dst.is(src1) || dst.is(src2)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 DCHECK(!src1.is(kScratchRegister));
1358 DCHECK(!src2.is(kScratchRegister));
1359 movp(kScratchRegister, src1);
1360 orp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001361 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362 movp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001363 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001364 movp(dst, src1);
1365 orp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001366 JumpIfNotSmi(dst, on_not_smis, near_jump);
1367 }
1368}
1369
1370
Steve Blocka7e24c12009-10-30 11:49:00 +00001371Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001372 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001373 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001374 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001375}
1376
1377
Steve Block1e0659c2011-05-24 12:43:12 +01001378Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001379 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001380 testb(src, Immediate(kSmiTagMask));
1381 return zero;
1382}
1383
1384
Ben Murdochf87a2032010-10-22 12:50:53 +01001385Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001386 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001387 // Test that both bits of the mask 0x8000000000000001 are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388 movp(kScratchRegister, src);
1389 rolp(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001390 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001391 return zero;
1392}
1393
1394
Steve Blocka7e24c12009-10-30 11:49:00 +00001395Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1396 if (first.is(second)) {
1397 return CheckSmi(first);
1398 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001399 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001400 if (SmiValuesAre32Bits()) {
1401 leal(kScratchRegister, Operand(first, second, times_1, 0));
1402 testb(kScratchRegister, Immediate(0x03));
1403 } else {
1404 DCHECK(SmiValuesAre31Bits());
1405 movl(kScratchRegister, first);
1406 orl(kScratchRegister, second);
1407 testb(kScratchRegister, Immediate(kSmiTagMask));
1408 }
Steve Block3ce2e202009-11-05 08:53:23 +00001409 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001410}
1411
1412
Ben Murdochf87a2032010-10-22 12:50:53 +01001413Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1414 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001415 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001416 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001417 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001418 movp(kScratchRegister, first);
1419 orp(kScratchRegister, second);
1420 rolp(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001421 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001422 return zero;
1423}
1424
1425
Ben Murdochbb769b22010-08-11 14:56:33 +01001426Condition MacroAssembler::CheckEitherSmi(Register first,
1427 Register second,
1428 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001429 if (first.is(second)) {
1430 return CheckSmi(first);
1431 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001432 if (scratch.is(second)) {
1433 andl(scratch, first);
1434 } else {
1435 if (!scratch.is(first)) {
1436 movl(scratch, first);
1437 }
1438 andl(scratch, second);
1439 }
1440 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001441 return zero;
1442}
1443
1444
Steve Blocka7e24c12009-10-30 11:49:00 +00001445Condition MacroAssembler::CheckIsMinSmi(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001446 DCHECK(!src.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001447 // If we overflow by subtracting one, it's the minimal smi value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001448 cmpp(src, kSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001449 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001450}
1451
Steve Blocka7e24c12009-10-30 11:49:00 +00001452
1453Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001454 if (SmiValuesAre32Bits()) {
1455 // A 32-bit integer value can always be converted to a smi.
1456 return always;
1457 } else {
1458 DCHECK(SmiValuesAre31Bits());
1459 cmpl(src, Immediate(0xc0000000));
1460 return positive;
1461 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001462}
1463
1464
Steve Block3ce2e202009-11-05 08:53:23 +00001465Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001466 if (SmiValuesAre32Bits()) {
1467 // An unsigned 32-bit integer value is valid as long as the high bit
1468 // is not set.
1469 testl(src, src);
1470 return positive;
1471 } else {
1472 DCHECK(SmiValuesAre31Bits());
1473 testl(src, Immediate(0xc0000000));
1474 return zero;
1475 }
Steve Block3ce2e202009-11-05 08:53:23 +00001476}
1477
1478
Steve Block1e0659c2011-05-24 12:43:12 +01001479void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1480 if (dst.is(src)) {
1481 andl(dst, Immediate(kSmiTagMask));
1482 } else {
1483 movl(dst, Immediate(kSmiTagMask));
1484 andl(dst, src);
1485 }
1486}
1487
1488
1489void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1490 if (!(src.AddressUsesRegister(dst))) {
1491 movl(dst, Immediate(kSmiTagMask));
1492 andl(dst, src);
1493 } else {
1494 movl(dst, src);
1495 andl(dst, Immediate(kSmiTagMask));
1496 }
1497}
1498
1499
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001500void MacroAssembler::JumpIfValidSmiValue(Register src,
1501 Label* on_valid,
1502 Label::Distance near_jump) {
1503 Condition is_valid = CheckInteger32ValidSmiValue(src);
1504 j(is_valid, on_valid, near_jump);
1505}
1506
1507
Ben Murdoch257744e2011-11-30 15:57:28 +00001508void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1509 Label* on_invalid,
1510 Label::Distance near_jump) {
1511 Condition is_valid = CheckInteger32ValidSmiValue(src);
1512 j(NegateCondition(is_valid), on_invalid, near_jump);
1513}
1514
1515
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001516void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1517 Label* on_valid,
1518 Label::Distance near_jump) {
1519 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1520 j(is_valid, on_valid, near_jump);
1521}
1522
1523
Ben Murdoch257744e2011-11-30 15:57:28 +00001524void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1525 Label* on_invalid,
1526 Label::Distance near_jump) {
1527 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1528 j(NegateCondition(is_valid), on_invalid, near_jump);
1529}
1530
1531
1532void MacroAssembler::JumpIfSmi(Register src,
1533 Label* on_smi,
1534 Label::Distance near_jump) {
1535 Condition smi = CheckSmi(src);
1536 j(smi, on_smi, near_jump);
1537}
1538
1539
1540void MacroAssembler::JumpIfNotSmi(Register src,
1541 Label* on_not_smi,
1542 Label::Distance near_jump) {
1543 Condition smi = CheckSmi(src);
1544 j(NegateCondition(smi), on_not_smi, near_jump);
1545}
1546
1547
1548void MacroAssembler::JumpUnlessNonNegativeSmi(
1549 Register src, Label* on_not_smi_or_negative,
1550 Label::Distance near_jump) {
1551 Condition non_negative_smi = CheckNonNegativeSmi(src);
1552 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1553}
1554
1555
1556void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1557 Smi* constant,
1558 Label* on_equals,
1559 Label::Distance near_jump) {
1560 SmiCompare(src, constant);
1561 j(equal, on_equals, near_jump);
1562}
1563
1564
1565void MacroAssembler::JumpIfNotBothSmi(Register src1,
1566 Register src2,
1567 Label* on_not_both_smi,
1568 Label::Distance near_jump) {
1569 Condition both_smi = CheckBothSmi(src1, src2);
1570 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1571}
1572
1573
1574void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1575 Register src2,
1576 Label* on_not_both_smi,
1577 Label::Distance near_jump) {
1578 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1579 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1580}
1581
1582
Steve Block3ce2e202009-11-05 08:53:23 +00001583void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1584 if (constant->value() == 0) {
1585 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001586 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001587 }
Steve Block8defd9f2010-07-08 12:39:36 +01001588 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001589 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001590 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001591 switch (constant->value()) {
1592 case 1:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001593 addp(dst, kSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001594 return;
1595 case 2:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001596 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001597 return;
1598 case 4:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001599 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001600 return;
1601 case 8:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001602 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001603 return;
1604 default:
1605 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001606 addp(dst, constant_reg);
Steve Block8defd9f2010-07-08 12:39:36 +01001607 return;
1608 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001609 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001610 switch (constant->value()) {
1611 case 1:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612 leap(dst, Operand(src, kSmiConstantRegister, times_1, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001613 return;
1614 case 2:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001615 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001616 return;
1617 case 4:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001619 return;
1620 case 8:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001621 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001622 return;
1623 default:
1624 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001625 addp(dst, src);
Steve Block8defd9f2010-07-08 12:39:36 +01001626 return;
1627 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001628 }
1629}
1630
1631
Leon Clarkef7060e22010-06-03 12:02:55 +01001632void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1633 if (constant->value() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634 if (SmiValuesAre32Bits()) {
1635 addl(Operand(dst, kSmiShift / kBitsPerByte),
1636 Immediate(constant->value()));
1637 } else {
1638 DCHECK(SmiValuesAre31Bits());
1639 addp(dst, Immediate(constant));
1640 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001641 }
1642}
1643
1644
Ben Murdoch257744e2011-11-30 15:57:28 +00001645void MacroAssembler::SmiAddConstant(Register dst,
1646 Register src,
1647 Smi* constant,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 SmiOperationExecutionMode mode,
1649 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001650 Label::Distance near_jump) {
1651 if (constant->value() == 0) {
1652 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001654 }
1655 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001656 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001657 LoadSmiConstant(kScratchRegister, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001658 addp(dst, kScratchRegister);
1659 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1660 j(no_overflow, bailout_label, near_jump);
1661 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1662 subp(dst, kScratchRegister);
1663 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
1664 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) {
1665 Label done;
1666 j(no_overflow, &done, Label::kNear);
1667 subp(dst, kScratchRegister);
1668 jmp(bailout_label, near_jump);
1669 bind(&done);
1670 } else {
1671 // Bailout if overflow without reserving src.
1672 j(overflow, bailout_label, near_jump);
1673 }
1674 } else {
1675 CHECK(mode.IsEmpty());
1676 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001677 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001678 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1679 DCHECK(mode.Contains(BAILOUT_ON_OVERFLOW));
Ben Murdoch257744e2011-11-30 15:57:28 +00001680 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001681 addp(dst, src);
1682 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001683 }
1684}
1685
1686
Steve Block3ce2e202009-11-05 08:53:23 +00001687void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1688 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001689 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001691 }
Steve Block3ce2e202009-11-05 08:53:23 +00001692 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001694 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001695 subp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001696 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001697 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001698 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001699 // Adding and subtracting the min-value gives the same result, it only
1700 // differs on the overflow bit, which we don't check here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001701 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001702 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001703 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001704 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001705 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001706 }
1707 }
1708}
1709
1710
Ben Murdoch257744e2011-11-30 15:57:28 +00001711void MacroAssembler::SmiSubConstant(Register dst,
1712 Register src,
1713 Smi* constant,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001714 SmiOperationExecutionMode mode,
1715 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001716 Label::Distance near_jump) {
1717 if (constant->value() == 0) {
1718 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001719 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001720 }
1721 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 DCHECK(!dst.is(kScratchRegister));
1723 LoadSmiConstant(kScratchRegister, constant);
1724 subp(dst, kScratchRegister);
1725 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1726 j(no_overflow, bailout_label, near_jump);
1727 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1728 addp(dst, kScratchRegister);
1729 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
1730 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) {
1731 Label done;
1732 j(no_overflow, &done, Label::kNear);
1733 addp(dst, kScratchRegister);
1734 jmp(bailout_label, near_jump);
1735 bind(&done);
1736 } else {
1737 // Bailout if overflow without reserving src.
1738 j(overflow, bailout_label, near_jump);
1739 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001740 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001741 CHECK(mode.IsEmpty());
Ben Murdoch257744e2011-11-30 15:57:28 +00001742 }
1743 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001744 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1745 DCHECK(mode.Contains(BAILOUT_ON_OVERFLOW));
Ben Murdoch257744e2011-11-30 15:57:28 +00001746 if (constant->value() == Smi::kMinValue) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001747 DCHECK(!dst.is(kScratchRegister));
1748 movp(dst, src);
1749 LoadSmiConstant(kScratchRegister, constant);
1750 subp(dst, kScratchRegister);
1751 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001752 } else {
1753 // Subtract by adding the negation.
1754 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 addp(dst, src);
1756 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001757 }
1758 }
1759}
1760
1761
1762void MacroAssembler::SmiNeg(Register dst,
1763 Register src,
1764 Label* on_smi_result,
1765 Label::Distance near_jump) {
1766 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767 DCHECK(!dst.is(kScratchRegister));
1768 movp(kScratchRegister, src);
1769 negp(dst); // Low 32 bits are retained as zero by negation.
Ben Murdoch257744e2011-11-30 15:57:28 +00001770 // Test if result is zero or Smi::kMinValue.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 cmpp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001772 j(not_equal, on_smi_result, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001773 movp(src, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001774 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001775 movp(dst, src);
1776 negp(dst);
1777 cmpp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001778 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1779 j(not_equal, on_smi_result, near_jump);
1780 }
1781}
1782
1783
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784template<class T>
1785static void SmiAddHelper(MacroAssembler* masm,
1786 Register dst,
1787 Register src1,
1788 T src2,
1789 Label* on_not_smi_result,
1790 Label::Distance near_jump) {
1791 if (dst.is(src1)) {
1792 Label done;
1793 masm->addp(dst, src2);
1794 masm->j(no_overflow, &done, Label::kNear);
1795 // Restore src1.
1796 masm->subp(dst, src2);
1797 masm->jmp(on_not_smi_result, near_jump);
1798 masm->bind(&done);
1799 } else {
1800 masm->movp(dst, src1);
1801 masm->addp(dst, src2);
1802 masm->j(overflow, on_not_smi_result, near_jump);
1803 }
1804}
1805
1806
Ben Murdoch257744e2011-11-30 15:57:28 +00001807void MacroAssembler::SmiAdd(Register dst,
1808 Register src1,
1809 Register src2,
1810 Label* on_not_smi_result,
1811 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 DCHECK_NOT_NULL(on_not_smi_result);
1813 DCHECK(!dst.is(src2));
1814 SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001815}
1816
1817
1818void MacroAssembler::SmiAdd(Register dst,
1819 Register src1,
1820 const Operand& src2,
1821 Label* on_not_smi_result,
1822 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001823 DCHECK_NOT_NULL(on_not_smi_result);
1824 DCHECK(!src2.AddressUsesRegister(dst));
1825 SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001826}
1827
1828
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001829void MacroAssembler::SmiAdd(Register dst,
1830 Register src1,
1831 Register src2) {
1832 // No overflow checking. Use only when it's known that
1833 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001834 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001835 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 movp(kScratchRegister, src1);
1837 addp(kScratchRegister, src2);
1838 Check(no_overflow, kSmiAdditionOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001839 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001840 leap(dst, Operand(src1, src2, times_1, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001841 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001842 addp(dst, src2);
1843 Assert(no_overflow, kSmiAdditionOverflow);
1844 }
1845}
1846
1847
1848template<class T>
1849static void SmiSubHelper(MacroAssembler* masm,
1850 Register dst,
1851 Register src1,
1852 T src2,
1853 Label* on_not_smi_result,
1854 Label::Distance near_jump) {
1855 if (dst.is(src1)) {
1856 Label done;
1857 masm->subp(dst, src2);
1858 masm->j(no_overflow, &done, Label::kNear);
1859 // Restore src1.
1860 masm->addp(dst, src2);
1861 masm->jmp(on_not_smi_result, near_jump);
1862 masm->bind(&done);
1863 } else {
1864 masm->movp(dst, src1);
1865 masm->subp(dst, src2);
1866 masm->j(overflow, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001867 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001868}
1869
1870
1871void MacroAssembler::SmiSub(Register dst,
1872 Register src1,
1873 Register src2,
1874 Label* on_not_smi_result,
1875 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001876 DCHECK_NOT_NULL(on_not_smi_result);
1877 DCHECK(!dst.is(src2));
1878 SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001879}
1880
1881
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001882void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001884 const Operand& src2,
1885 Label* on_not_smi_result,
1886 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 DCHECK_NOT_NULL(on_not_smi_result);
1888 DCHECK(!src2.AddressUsesRegister(dst));
1889 SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1890}
1891
1892
1893template<class T>
1894static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1895 Register dst,
1896 Register src1,
1897 T src2) {
1898 // No overflow checking. Use only when it's known that
1899 // overflowing is impossible (e.g., subtracting two positive smis).
1900 if (!dst.is(src1)) {
1901 masm->movp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001902 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001903 masm->subp(dst, src2);
1904 masm->Assert(no_overflow, kSmiSubtractionOverflow);
1905}
1906
1907
1908void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1909 DCHECK(!dst.is(src2));
1910 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001911}
1912
1913
1914void MacroAssembler::SmiSub(Register dst,
1915 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001916 const Operand& src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917 SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001918}
1919
1920
Ben Murdoch257744e2011-11-30 15:57:28 +00001921void MacroAssembler::SmiMul(Register dst,
1922 Register src1,
1923 Register src2,
1924 Label* on_not_smi_result,
1925 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001926 DCHECK(!dst.is(src2));
1927 DCHECK(!dst.is(kScratchRegister));
1928 DCHECK(!src1.is(kScratchRegister));
1929 DCHECK(!src2.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001930
1931 if (dst.is(src1)) {
1932 Label failure, zero_correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001933 movp(kScratchRegister, src1); // Create backup for later testing.
Ben Murdoch257744e2011-11-30 15:57:28 +00001934 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001935 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001936 j(overflow, &failure, Label::kNear);
1937
1938 // Check for negative zero result. If product is zero, and one
1939 // argument is negative, go to slow case.
1940 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001941 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001942 j(not_zero, &correct_result, Label::kNear);
1943
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001944 movp(dst, kScratchRegister);
1945 xorp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001946 // Result was positive zero.
1947 j(positive, &zero_correct_result, Label::kNear);
1948
1949 bind(&failure); // Reused failure exit, restores src1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001950 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001951 jmp(on_not_smi_result, near_jump);
1952
1953 bind(&zero_correct_result);
1954 Set(dst, 0);
1955
1956 bind(&correct_result);
1957 } else {
1958 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001960 j(overflow, on_not_smi_result, near_jump);
1961 // Check for negative zero result. If product is zero, and one
1962 // argument is negative, go to slow case.
1963 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001964 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001965 j(not_zero, &correct_result, Label::kNear);
1966 // One of src1 and src2 is zero, the check whether the other is
1967 // negative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968 movp(kScratchRegister, src1);
1969 xorp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001970 j(negative, on_not_smi_result, near_jump);
1971 bind(&correct_result);
1972 }
1973}
1974
1975
1976void MacroAssembler::SmiDiv(Register dst,
1977 Register src1,
1978 Register src2,
1979 Label* on_not_smi_result,
1980 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001981 DCHECK(!src1.is(kScratchRegister));
1982 DCHECK(!src2.is(kScratchRegister));
1983 DCHECK(!dst.is(kScratchRegister));
1984 DCHECK(!src2.is(rax));
1985 DCHECK(!src2.is(rdx));
1986 DCHECK(!src1.is(rdx));
Ben Murdoch257744e2011-11-30 15:57:28 +00001987
1988 // Check for 0 divisor (result is +/-Infinity).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001989 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001990 j(zero, on_not_smi_result, near_jump);
1991
1992 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001994 }
1995 SmiToInteger32(rax, src1);
1996 // We need to rule out dividing Smi::kMinValue by -1, since that would
1997 // overflow in idiv and raise an exception.
1998 // We combine this with negative zero test (negative zero only happens
1999 // when dividing zero by a negative number).
2000
2001 // We overshoot a little and go to slow case if we divide min-value
2002 // by any negative value, not just -1.
2003 Label safe_div;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002004 testl(rax, Immediate(~Smi::kMinValue));
Ben Murdoch257744e2011-11-30 15:57:28 +00002005 j(not_zero, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002007 if (src1.is(rax)) {
2008 j(positive, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002009 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002010 jmp(on_not_smi_result, near_jump);
2011 } else {
2012 j(negative, on_not_smi_result, near_jump);
2013 }
2014 bind(&safe_div);
2015
2016 SmiToInteger32(src2, src2);
2017 // Sign extend src1 into edx:eax.
2018 cdq();
2019 idivl(src2);
2020 Integer32ToSmi(src2, src2);
2021 // Check that the remainder is zero.
2022 testl(rdx, rdx);
2023 if (src1.is(rax)) {
2024 Label smi_result;
2025 j(zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002026 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002027 jmp(on_not_smi_result, near_jump);
2028 bind(&smi_result);
2029 } else {
2030 j(not_zero, on_not_smi_result, near_jump);
2031 }
2032 if (!dst.is(src1) && src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002033 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002034 }
2035 Integer32ToSmi(dst, rax);
2036}
2037
2038
2039void MacroAssembler::SmiMod(Register dst,
2040 Register src1,
2041 Register src2,
2042 Label* on_not_smi_result,
2043 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002044 DCHECK(!dst.is(kScratchRegister));
2045 DCHECK(!src1.is(kScratchRegister));
2046 DCHECK(!src2.is(kScratchRegister));
2047 DCHECK(!src2.is(rax));
2048 DCHECK(!src2.is(rdx));
2049 DCHECK(!src1.is(rdx));
2050 DCHECK(!src1.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002051
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002052 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002053 j(zero, on_not_smi_result, near_jump);
2054
2055 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002057 }
2058 SmiToInteger32(rax, src1);
2059 SmiToInteger32(src2, src2);
2060
2061 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
2062 Label safe_div;
2063 cmpl(rax, Immediate(Smi::kMinValue));
2064 j(not_equal, &safe_div, Label::kNear);
2065 cmpl(src2, Immediate(-1));
2066 j(not_equal, &safe_div, Label::kNear);
2067 // Retag inputs and go slow case.
2068 Integer32ToSmi(src2, src2);
2069 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002070 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002071 }
2072 jmp(on_not_smi_result, near_jump);
2073 bind(&safe_div);
2074
2075 // Sign extend eax into edx:eax.
2076 cdq();
2077 idivl(src2);
2078 // Restore smi tags on inputs.
2079 Integer32ToSmi(src2, src2);
2080 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002081 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002082 }
2083 // Check for a negative zero result. If the result is zero, and the
2084 // dividend is negative, go slow to return a floating point negative zero.
2085 Label smi_result;
2086 testl(rdx, rdx);
2087 j(not_zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002088 testp(src1, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002089 j(negative, on_not_smi_result, near_jump);
2090 bind(&smi_result);
2091 Integer32ToSmi(dst, rdx);
2092}
2093
2094
Steve Blocka7e24c12009-10-30 11:49:00 +00002095void MacroAssembler::SmiNot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096 DCHECK(!dst.is(kScratchRegister));
2097 DCHECK(!src.is(kScratchRegister));
2098 if (SmiValuesAre32Bits()) {
2099 // Set tag and padding bits before negating, so that they are zero
2100 // afterwards.
2101 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002102 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002103 DCHECK(SmiValuesAre31Bits());
2104 movl(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002105 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002106 if (dst.is(src)) {
2107 xorp(dst, kScratchRegister);
2108 } else {
2109 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2110 }
2111 notp(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002112}
2113
2114
2115void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002116 DCHECK(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002117 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002119 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120 andp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002121}
2122
2123
Steve Block3ce2e202009-11-05 08:53:23 +00002124void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2125 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01002126 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00002127 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002128 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002129 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002130 andp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002131 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002132 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002133 andp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002134 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002135}
2136
2137
2138void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2139 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002140 DCHECK(!src1.is(src2));
2141 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002142 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002143 orp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002144}
2145
2146
Steve Block3ce2e202009-11-05 08:53:23 +00002147void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2148 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002149 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002150 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002151 orp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002152 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002153 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002154 orp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002156}
2157
Steve Block3ce2e202009-11-05 08:53:23 +00002158
Steve Blocka7e24c12009-10-30 11:49:00 +00002159void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2160 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002161 DCHECK(!src1.is(src2));
2162 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002163 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164 xorp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002165}
2166
2167
Steve Block3ce2e202009-11-05 08:53:23 +00002168void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2169 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002170 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002171 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002172 xorp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002173 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002174 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002175 xorp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002176 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002177}
2178
2179
Steve Blocka7e24c12009-10-30 11:49:00 +00002180void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2181 Register src,
2182 int shift_value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002183 DCHECK(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002184 if (shift_value > 0) {
2185 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002186 sarp(dst, Immediate(shift_value + kSmiShift));
2187 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002188 } else {
2189 UNIMPLEMENTED(); // Not used.
2190 }
2191 }
2192}
2193
2194
Steve Blocka7e24c12009-10-30 11:49:00 +00002195void MacroAssembler::SmiShiftLeftConstant(Register dst,
2196 Register src,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002197 int shift_value,
2198 Label* on_not_smi_result,
2199 Label::Distance near_jump) {
2200 if (SmiValuesAre32Bits()) {
2201 if (!dst.is(src)) {
2202 movp(dst, src);
2203 }
2204 if (shift_value > 0) {
2205 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2206 shlq(dst, Immediate(shift_value & 0x1f));
2207 }
2208 } else {
2209 DCHECK(SmiValuesAre31Bits());
2210 if (dst.is(src)) {
2211 UNIMPLEMENTED(); // Not used.
2212 } else {
2213 SmiToInteger32(dst, src);
2214 shll(dst, Immediate(shift_value));
2215 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2216 Integer32ToSmi(dst, dst);
2217 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002218 }
2219}
2220
2221
Ben Murdoch257744e2011-11-30 15:57:28 +00002222void MacroAssembler::SmiShiftLogicalRightConstant(
2223 Register dst, Register src, int shift_value,
2224 Label* on_not_smi_result, Label::Distance near_jump) {
2225 // Logic right shift interprets its result as an *unsigned* number.
2226 if (dst.is(src)) {
2227 UNIMPLEMENTED(); // Not used.
2228 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002229 if (shift_value == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002230 testp(src, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00002231 j(negative, on_not_smi_result, near_jump);
2232 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002233 if (SmiValuesAre32Bits()) {
2234 movp(dst, src);
2235 shrp(dst, Immediate(shift_value + kSmiShift));
2236 shlp(dst, Immediate(kSmiShift));
2237 } else {
2238 DCHECK(SmiValuesAre31Bits());
2239 SmiToInteger32(dst, src);
2240 shrp(dst, Immediate(shift_value));
2241 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2242 Integer32ToSmi(dst, dst);
2243 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002244 }
2245}
2246
2247
Steve Blocka7e24c12009-10-30 11:49:00 +00002248void MacroAssembler::SmiShiftLeft(Register dst,
2249 Register src1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002250 Register src2,
2251 Label* on_not_smi_result,
2252 Label::Distance near_jump) {
2253 if (SmiValuesAre32Bits()) {
2254 DCHECK(!dst.is(rcx));
2255 if (!dst.is(src1)) {
2256 movp(dst, src1);
2257 }
2258 // Untag shift amount.
2259 SmiToInteger32(rcx, src2);
2260 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2261 andp(rcx, Immediate(0x1f));
2262 shlq_cl(dst);
2263 } else {
2264 DCHECK(SmiValuesAre31Bits());
2265 DCHECK(!dst.is(kScratchRegister));
2266 DCHECK(!src1.is(kScratchRegister));
2267 DCHECK(!src2.is(kScratchRegister));
2268 DCHECK(!dst.is(src2));
2269 DCHECK(!dst.is(rcx));
2270
2271 if (src1.is(rcx) || src2.is(rcx)) {
2272 movq(kScratchRegister, rcx);
2273 }
2274 if (dst.is(src1)) {
2275 UNIMPLEMENTED(); // Not used.
2276 } else {
2277 Label valid_result;
2278 SmiToInteger32(dst, src1);
2279 SmiToInteger32(rcx, src2);
2280 shll_cl(dst);
2281 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2282 // As src1 or src2 could not be dst, we do not need to restore them for
2283 // clobbering dst.
2284 if (src1.is(rcx) || src2.is(rcx)) {
2285 if (src1.is(rcx)) {
2286 movq(src1, kScratchRegister);
2287 } else {
2288 movq(src2, kScratchRegister);
2289 }
2290 }
2291 jmp(on_not_smi_result, near_jump);
2292 bind(&valid_result);
2293 Integer32ToSmi(dst, dst);
2294 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002295 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002296}
2297
2298
Ben Murdoch257744e2011-11-30 15:57:28 +00002299void MacroAssembler::SmiShiftLogicalRight(Register dst,
2300 Register src1,
2301 Register src2,
2302 Label* on_not_smi_result,
2303 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002304 DCHECK(!dst.is(kScratchRegister));
2305 DCHECK(!src1.is(kScratchRegister));
2306 DCHECK(!src2.is(kScratchRegister));
2307 DCHECK(!dst.is(src2));
2308 DCHECK(!dst.is(rcx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002309 if (src1.is(rcx) || src2.is(rcx)) {
2310 movq(kScratchRegister, rcx);
2311 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002312 if (dst.is(src1)) {
2313 UNIMPLEMENTED(); // Not used.
Ben Murdoch257744e2011-11-30 15:57:28 +00002314 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002315 Label valid_result;
2316 SmiToInteger32(dst, src1);
2317 SmiToInteger32(rcx, src2);
2318 shrl_cl(dst);
2319 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2320 // As src1 or src2 could not be dst, we do not need to restore them for
2321 // clobbering dst.
2322 if (src1.is(rcx) || src2.is(rcx)) {
2323 if (src1.is(rcx)) {
2324 movq(src1, kScratchRegister);
2325 } else {
2326 movq(src2, kScratchRegister);
2327 }
2328 }
2329 jmp(on_not_smi_result, near_jump);
2330 bind(&valid_result);
2331 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00002332 }
2333}
2334
2335
Steve Blocka7e24c12009-10-30 11:49:00 +00002336void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2337 Register src1,
2338 Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002339 DCHECK(!dst.is(kScratchRegister));
2340 DCHECK(!src1.is(kScratchRegister));
2341 DCHECK(!src2.is(kScratchRegister));
2342 DCHECK(!dst.is(rcx));
2343
Steve Blocka7e24c12009-10-30 11:49:00 +00002344 SmiToInteger32(rcx, src2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002345 if (!dst.is(src1)) {
2346 movp(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00002347 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348 SmiToInteger32(dst, dst);
2349 sarl_cl(dst);
2350 Integer32ToSmi(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002351}
2352
2353
Ben Murdoch257744e2011-11-30 15:57:28 +00002354void MacroAssembler::SelectNonSmi(Register dst,
2355 Register src1,
2356 Register src2,
2357 Label* on_not_smis,
2358 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002359 DCHECK(!dst.is(kScratchRegister));
2360 DCHECK(!src1.is(kScratchRegister));
2361 DCHECK(!src2.is(kScratchRegister));
2362 DCHECK(!dst.is(src1));
2363 DCHECK(!dst.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002364 // Both operands must not be smis.
2365#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002366 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2367 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
Ben Murdoch257744e2011-11-30 15:57:28 +00002368#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002369 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002370 DCHECK_EQ(0, Smi::FromInt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002371 movl(kScratchRegister, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372 andp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002373 testl(kScratchRegister, src2);
2374 // If non-zero then both are smis.
2375 j(not_zero, on_not_smis, near_jump);
2376
2377 // Exactly one operand is a smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378 DCHECK_EQ(1, static_cast<int>(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002379 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002380 subp(kScratchRegister, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002381 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002382 movp(dst, src1);
2383 xorp(dst, src2);
2384 andp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002385 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002386 xorp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002387 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2388}
2389
2390
Steve Block3ce2e202009-11-05 08:53:23 +00002391SmiIndex MacroAssembler::SmiToIndex(Register dst,
2392 Register src,
2393 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002394 if (SmiValuesAre32Bits()) {
2395 DCHECK(is_uint6(shift));
2396 // There is a possible optimization if shift is in the range 60-63, but that
2397 // will (and must) never happen.
2398 if (!dst.is(src)) {
2399 movp(dst, src);
2400 }
2401 if (shift < kSmiShift) {
2402 sarp(dst, Immediate(kSmiShift - shift));
2403 } else {
2404 shlp(dst, Immediate(shift - kSmiShift));
2405 }
2406 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002407 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002408 DCHECK(SmiValuesAre31Bits());
2409 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2410 if (!dst.is(src)) {
2411 movp(dst, src);
2412 }
2413 // We have to sign extend the index register to 64-bit as the SMI might
2414 // be negative.
2415 movsxlq(dst, dst);
2416 if (shift == times_1) {
2417 sarq(dst, Immediate(kSmiShift));
2418 return SmiIndex(dst, times_1);
2419 }
2420 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002421 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002422}
2423
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002424
Steve Blocka7e24c12009-10-30 11:49:00 +00002425SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2426 Register src,
2427 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002428 if (SmiValuesAre32Bits()) {
2429 // Register src holds a positive smi.
2430 DCHECK(is_uint6(shift));
2431 if (!dst.is(src)) {
2432 movp(dst, src);
2433 }
2434 negp(dst);
2435 if (shift < kSmiShift) {
2436 sarp(dst, Immediate(kSmiShift - shift));
2437 } else {
2438 shlp(dst, Immediate(shift - kSmiShift));
2439 }
2440 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002441 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 DCHECK(SmiValuesAre31Bits());
2443 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2444 if (!dst.is(src)) {
2445 movp(dst, src);
2446 }
2447 negq(dst);
2448 if (shift == times_1) {
2449 sarq(dst, Immediate(kSmiShift));
2450 return SmiIndex(dst, times_1);
2451 }
2452 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Block3ce2e202009-11-05 08:53:23 +00002453 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002454}
2455
2456
Steve Block44f0eee2011-05-26 01:26:41 +01002457void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002458 if (SmiValuesAre32Bits()) {
2459 DCHECK_EQ(0, kSmiShift % kBitsPerByte);
2460 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2461 } else {
2462 DCHECK(SmiValuesAre31Bits());
2463 SmiToInteger32(kScratchRegister, src);
2464 addl(dst, kScratchRegister);
2465 }
2466}
2467
2468
2469void MacroAssembler::Push(Smi* source) {
2470 intptr_t smi = reinterpret_cast<intptr_t>(source);
2471 if (is_int32(smi)) {
2472 Push(Immediate(static_cast<int32_t>(smi)));
2473 } else {
2474 Register constant = GetSmiConstant(source);
2475 Push(constant);
2476 }
2477}
2478
2479
2480void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
2481 DCHECK(!src.is(scratch));
2482 movp(scratch, src);
2483 // High bits.
2484 shrp(src, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2485 shlp(src, Immediate(kSmiShift));
2486 Push(src);
2487 // Low bits.
2488 shlp(scratch, Immediate(kSmiShift));
2489 Push(scratch);
2490}
2491
2492
2493void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2494 DCHECK(!dst.is(scratch));
2495 Pop(scratch);
2496 // Low bits.
2497 shrp(scratch, Immediate(kSmiShift));
2498 Pop(dst);
2499 shrp(dst, Immediate(kSmiShift));
2500 // High bits.
2501 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2502 orp(dst, scratch);
2503}
2504
2505
2506void MacroAssembler::Test(const Operand& src, Smi* source) {
2507 if (SmiValuesAre32Bits()) {
2508 testl(Operand(src, kIntSize), Immediate(source->value()));
2509 } else {
2510 DCHECK(SmiValuesAre31Bits());
2511 testl(src, Immediate(source));
2512 }
2513}
2514
2515
2516// ----------------------------------------------------------------------------
2517
2518
2519void MacroAssembler::LookupNumberStringCache(Register object,
2520 Register result,
2521 Register scratch1,
2522 Register scratch2,
2523 Label* not_found) {
2524 // Use of registers. Register result is used as a temporary.
2525 Register number_string_cache = result;
2526 Register mask = scratch1;
2527 Register scratch = scratch2;
2528
2529 // Load the number string cache.
2530 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2531
2532 // Make the hash mask from the length of the number string cache. It
2533 // contains two elements (number and string) for each cache entry.
2534 SmiToInteger32(
2535 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2536 shrl(mask, Immediate(1));
2537 subp(mask, Immediate(1)); // Make mask.
2538
2539 // Calculate the entry in the number string cache. The hash value in the
2540 // number string cache for smis is just the smi value, and the hash for
2541 // doubles is the xor of the upper and lower words. See
2542 // Heap::GetNumberStringCache.
2543 Label is_smi;
2544 Label load_result_from_cache;
2545 JumpIfSmi(object, &is_smi);
2546 CheckMap(object,
2547 isolate()->factory()->heap_number_map(),
2548 not_found,
2549 DONT_DO_SMI_CHECK);
2550
2551 STATIC_ASSERT(8 == kDoubleSize);
2552 movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2553 xorp(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2554 andp(scratch, mask);
2555 // Each entry in string cache consists of two pointer sized fields,
2556 // but times_twice_pointer_size (multiplication by 16) scale factor
2557 // is not supported by addrmode on x64 platform.
2558 // So we have to premultiply entry index before lookup.
2559 shlp(scratch, Immediate(kPointerSizeLog2 + 1));
2560
2561 Register index = scratch;
2562 Register probe = mask;
2563 movp(probe,
2564 FieldOperand(number_string_cache,
2565 index,
2566 times_1,
2567 FixedArray::kHeaderSize));
2568 JumpIfSmi(probe, not_found);
2569 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2570 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2571 j(parity_even, not_found); // Bail out if NaN is involved.
2572 j(not_equal, not_found); // The cache did not contain this value.
2573 jmp(&load_result_from_cache);
2574
2575 bind(&is_smi);
2576 SmiToInteger32(scratch, object);
2577 andp(scratch, mask);
2578 // Each entry in string cache consists of two pointer sized fields,
2579 // but times_twice_pointer_size (multiplication by 16) scale factor
2580 // is not supported by addrmode on x64 platform.
2581 // So we have to premultiply entry index before lookup.
2582 shlp(scratch, Immediate(kPointerSizeLog2 + 1));
2583
2584 // Check if the entry is the smi we are looking for.
2585 cmpp(object,
2586 FieldOperand(number_string_cache,
2587 index,
2588 times_1,
2589 FixedArray::kHeaderSize));
2590 j(not_equal, not_found);
2591
2592 // Get the result from the cache.
2593 bind(&load_result_from_cache);
2594 movp(result,
2595 FieldOperand(number_string_cache,
2596 index,
2597 times_1,
2598 FixedArray::kHeaderSize + kPointerSize));
2599 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
Steve Block44f0eee2011-05-26 01:26:41 +01002600}
2601
2602
Ben Murdoch257744e2011-11-30 15:57:28 +00002603void MacroAssembler::JumpIfNotString(Register object,
2604 Register object_map,
2605 Label* not_string,
2606 Label::Distance near_jump) {
2607 Condition is_smi = CheckSmi(object);
2608 j(is_smi, not_string, near_jump);
2609 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2610 j(above_equal, not_string, near_jump);
2611}
2612
2613
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002614void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(
2615 Register first_object, Register second_object, Register scratch1,
2616 Register scratch2, Label* on_fail, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002617 // Check that both objects are not smis.
2618 Condition either_smi = CheckEitherSmi(first_object, second_object);
2619 j(either_smi, on_fail, near_jump);
2620
2621 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002622 movp(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2623 movp(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002624 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2625 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2626
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002627 // Check that both are flat one-byte strings.
2628 DCHECK(kNotStringTag != 0);
2629 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002630 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002631 const int kFlatOneByteStringTag =
2632 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002633
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002634 andl(scratch1, Immediate(kFlatOneByteStringMask));
2635 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002636 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002637 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2638 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002639 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002640 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002641 j(not_equal, on_fail, near_jump);
2642}
2643
2644
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002645void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2646 Register instance_type, Register scratch, Label* failure,
Ben Murdoch257744e2011-11-30 15:57:28 +00002647 Label::Distance near_jump) {
2648 if (!scratch.is(instance_type)) {
2649 movl(scratch, instance_type);
2650 }
2651
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002652 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002653 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2654
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655 andl(scratch, Immediate(kFlatOneByteStringMask));
2656 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002657 j(not_equal, failure, near_jump);
2658}
2659
2660
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002661void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
2662 Register first_object_instance_type, Register second_object_instance_type,
2663 Register scratch1, Register scratch2, Label* on_fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002664 Label::Distance near_jump) {
2665 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002666 movp(scratch1, first_object_instance_type);
2667 movp(scratch2, second_object_instance_type);
Ben Murdoch257744e2011-11-30 15:57:28 +00002668
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002669 // Check that both are flat one-byte strings.
2670 DCHECK(kNotStringTag != 0);
2671 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002672 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002673 const int kFlatOneByteStringTag =
2674 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002675
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002676 andl(scratch1, Immediate(kFlatOneByteStringMask));
2677 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002678 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002679 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2680 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002681 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002682 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002683 j(not_equal, on_fail, near_jump);
2684}
2685
2686
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002687template<class T>
2688static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
2689 T operand_or_register,
2690 Label* not_unique_name,
2691 Label::Distance distance) {
2692 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2693 Label succeed;
2694 masm->testb(operand_or_register,
2695 Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2696 masm->j(zero, &succeed, Label::kNear);
2697 masm->cmpb(operand_or_register, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
2698 masm->j(not_equal, not_unique_name, distance);
2699
2700 masm->bind(&succeed);
2701}
2702
2703
2704void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2705 Label* not_unique_name,
2706 Label::Distance distance) {
2707 JumpIfNotUniqueNameHelper<Operand>(this, operand, not_unique_name, distance);
2708}
2709
2710
2711void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2712 Label* not_unique_name,
2713 Label::Distance distance) {
2714 JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance);
2715}
2716
Steve Block44f0eee2011-05-26 01:26:41 +01002717
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002718void MacroAssembler::Move(Register dst, Register src) {
2719 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002720 movp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002721 }
Steve Block6ded16b2010-05-10 14:33:55 +01002722}
2723
2724
Steve Blocka7e24c12009-10-30 11:49:00 +00002725void MacroAssembler::Move(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002726 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002727 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002728 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002729 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002730 MoveHeapObject(dst, source);
Steve Blocka7e24c12009-10-30 11:49:00 +00002731 }
2732}
2733
2734
2735void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002736 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002737 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002738 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002739 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002740 MoveHeapObject(kScratchRegister, source);
2741 movp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002742 }
2743}
2744
2745
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002746void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2747 if (src == 0) {
2748 xorps(dst, dst);
2749 } else {
2750 unsigned cnt = base::bits::CountPopulation32(src);
2751 unsigned nlz = base::bits::CountLeadingZeros32(src);
2752 unsigned ntz = base::bits::CountTrailingZeros32(src);
2753 if (nlz + cnt + ntz == 32) {
2754 pcmpeqd(dst, dst);
2755 if (ntz == 0) {
2756 psrld(dst, 32 - cnt);
2757 } else {
2758 pslld(dst, 32 - cnt);
2759 if (nlz != 0) psrld(dst, nlz);
2760 }
2761 } else {
2762 movl(kScratchRegister, Immediate(src));
2763 movq(dst, kScratchRegister);
2764 }
2765 }
2766}
2767
2768
2769void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
2770 uint32_t lower = static_cast<uint32_t>(src);
2771 uint32_t upper = static_cast<uint32_t>(src >> 32);
2772 if (upper == 0) {
2773 Move(dst, lower);
2774 } else {
2775 unsigned cnt = base::bits::CountPopulation64(src);
2776 unsigned nlz = base::bits::CountLeadingZeros64(src);
2777 unsigned ntz = base::bits::CountTrailingZeros64(src);
2778 if (nlz + cnt + ntz == 64) {
2779 pcmpeqd(dst, dst);
2780 if (ntz == 0) {
2781 psrlq(dst, 64 - cnt);
2782 } else {
2783 psllq(dst, 64 - cnt);
2784 if (nlz != 0) psrlq(dst, nlz);
2785 }
2786 } else if (lower == 0) {
2787 Move(dst, upper);
2788 psllq(dst, 32);
2789 } else {
2790 movq(kScratchRegister, src);
2791 movq(dst, kScratchRegister);
2792 }
2793 }
2794}
2795
2796
Steve Blocka7e24c12009-10-30 11:49:00 +00002797void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002798 AllowDeferredHandleDereference smi_check;
Steve Block3ce2e202009-11-05 08:53:23 +00002799 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002800 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002801 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002802 MoveHeapObject(kScratchRegister, source);
2803 cmpp(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00002804 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002805}
2806
2807
2808void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002809 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002810 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002811 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002812 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002813 MoveHeapObject(kScratchRegister, source);
2814 cmpp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002815 }
2816}
2817
2818
2819void MacroAssembler::Push(Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002820 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002821 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002822 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002823 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002824 MoveHeapObject(kScratchRegister, source);
2825 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002826 }
2827}
2828
2829
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002830void MacroAssembler::MoveHeapObject(Register result,
2831 Handle<Object> object) {
2832 AllowDeferredHandleDereference using_raw_address;
2833 DCHECK(object->IsHeapObject());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002834 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002835 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2836 Move(result, cell, RelocInfo::CELL);
2837 movp(result, Operand(result, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002838 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002839 Move(result, object, RelocInfo::EMBEDDED_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002840 }
2841}
2842
2843
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002844void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002845 if (dst.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002846 AllowDeferredHandleDereference embedding_raw_address;
2847 load_rax(cell.location(), RelocInfo::CELL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002848 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002849 Move(dst, cell, RelocInfo::CELL);
2850 movp(dst, Operand(dst, 0));
Steve Block3ce2e202009-11-05 08:53:23 +00002851 }
2852}
2853
2854
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002855void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2856 Register scratch) {
2857 Move(scratch, cell, RelocInfo::EMBEDDED_OBJECT);
2858 cmpp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2859}
2860
2861
2862void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2863 Label* miss) {
2864 Move(value, cell, RelocInfo::EMBEDDED_OBJECT);
2865 movp(value, FieldOperand(value, WeakCell::kValueOffset));
2866 JumpIfSmi(value, miss);
2867}
2868
2869
Leon Clarkee46be812010-01-19 14:06:41 +00002870void MacroAssembler::Drop(int stack_elements) {
2871 if (stack_elements > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002872 addp(rsp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00002873 }
2874}
2875
2876
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002877void MacroAssembler::DropUnderReturnAddress(int stack_elements,
2878 Register scratch) {
2879 DCHECK(stack_elements > 0);
2880 if (kPointerSize == kInt64Size && stack_elements == 1) {
2881 popq(MemOperand(rsp, 0));
2882 return;
2883 }
2884
2885 PopReturnAddressTo(scratch);
2886 Drop(stack_elements);
2887 PushReturnAddressFrom(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002888}
2889
2890
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002891void MacroAssembler::Push(Register src) {
2892 if (kPointerSize == kInt64Size) {
2893 pushq(src);
2894 } else {
2895 // x32 uses 64-bit push for rbp in the prologue.
2896 DCHECK(src.code() != rbp.code());
2897 leal(rsp, Operand(rsp, -4));
2898 movp(Operand(rsp, 0), src);
2899 }
2900}
2901
2902
2903void MacroAssembler::Push(const Operand& src) {
2904 if (kPointerSize == kInt64Size) {
2905 pushq(src);
2906 } else {
2907 movp(kScratchRegister, src);
2908 leal(rsp, Operand(rsp, -4));
2909 movp(Operand(rsp, 0), kScratchRegister);
2910 }
2911}
2912
2913
2914void MacroAssembler::PushQuad(const Operand& src) {
2915 if (kPointerSize == kInt64Size) {
2916 pushq(src);
2917 } else {
2918 movp(kScratchRegister, src);
2919 pushq(kScratchRegister);
2920 }
2921}
2922
2923
2924void MacroAssembler::Push(Immediate value) {
2925 if (kPointerSize == kInt64Size) {
2926 pushq(value);
2927 } else {
2928 leal(rsp, Operand(rsp, -4));
2929 movp(Operand(rsp, 0), value);
2930 }
2931}
2932
2933
2934void MacroAssembler::PushImm32(int32_t imm32) {
2935 if (kPointerSize == kInt64Size) {
2936 pushq_imm32(imm32);
2937 } else {
2938 leal(rsp, Operand(rsp, -4));
2939 movp(Operand(rsp, 0), Immediate(imm32));
2940 }
2941}
2942
2943
2944void MacroAssembler::Pop(Register dst) {
2945 if (kPointerSize == kInt64Size) {
2946 popq(dst);
2947 } else {
2948 // x32 uses 64-bit pop for rbp in the epilogue.
2949 DCHECK(dst.code() != rbp.code());
2950 movp(dst, Operand(rsp, 0));
2951 leal(rsp, Operand(rsp, 4));
2952 }
2953}
2954
2955
2956void MacroAssembler::Pop(const Operand& dst) {
2957 if (kPointerSize == kInt64Size) {
2958 popq(dst);
2959 } else {
2960 Register scratch = dst.AddressUsesRegister(kScratchRegister)
2961 ? kSmiConstantRegister : kScratchRegister;
2962 movp(scratch, Operand(rsp, 0));
2963 movp(dst, scratch);
2964 leal(rsp, Operand(rsp, 4));
2965 if (scratch.is(kSmiConstantRegister)) {
2966 // Restore kSmiConstantRegister.
2967 movp(kSmiConstantRegister,
2968 reinterpret_cast<void*>(Smi::FromInt(kSmiConstantRegisterValue)),
2969 Assembler::RelocInfoNone());
2970 }
2971 }
2972}
2973
2974
2975void MacroAssembler::PopQuad(const Operand& dst) {
2976 if (kPointerSize == kInt64Size) {
2977 popq(dst);
2978 } else {
2979 popq(kScratchRegister);
2980 movp(dst, kScratchRegister);
2981 }
2982}
2983
2984
2985void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
2986 Register base,
2987 int offset) {
2988 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
2989 offset <= SharedFunctionInfo::kSize &&
2990 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
2991 if (kPointerSize == kInt64Size) {
2992 movsxlq(dst, FieldOperand(base, offset));
2993 } else {
2994 movp(dst, FieldOperand(base, offset));
2995 SmiToInteger32(dst, dst);
2996 }
2997}
2998
2999
3000void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base,
3001 int offset,
3002 int bits) {
3003 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
3004 offset <= SharedFunctionInfo::kSize &&
3005 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
3006 if (kPointerSize == kInt32Size) {
3007 // On x32, this field is represented by SMI.
3008 bits += kSmiShift;
3009 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003010 int byte_offset = bits / kBitsPerByte;
3011 int bit_in_byte = bits & (kBitsPerByte - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003012 testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003013}
3014
3015
Steve Blocka7e24c12009-10-30 11:49:00 +00003016void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003017 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003018 jmp(kScratchRegister);
3019}
3020
3021
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003022void MacroAssembler::Jump(const Operand& op) {
3023 if (kPointerSize == kInt64Size) {
3024 jmp(op);
3025 } else {
3026 movp(kScratchRegister, op);
3027 jmp(kScratchRegister);
3028 }
3029}
3030
3031
Steve Blocka7e24c12009-10-30 11:49:00 +00003032void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003033 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003034 jmp(kScratchRegister);
3035}
3036
3037
3038void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00003039 // TODO(X64): Inline this
3040 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003041}
3042
3043
Steve Block44f0eee2011-05-26 01:26:41 +01003044int MacroAssembler::CallSize(ExternalReference ext) {
3045 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003046 return LoadAddressSize(ext) +
3047 Assembler::kCallScratchRegisterInstructionLength;
Steve Block44f0eee2011-05-26 01:26:41 +01003048}
3049
3050
Steve Blocka7e24c12009-10-30 11:49:00 +00003051void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01003052#ifdef DEBUG
3053 int end_position = pc_offset() + CallSize(ext);
3054#endif
3055 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00003056 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003057#ifdef DEBUG
3058 CHECK_EQ(end_position, pc_offset());
3059#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003060}
3061
3062
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003063void MacroAssembler::Call(const Operand& op) {
3064 if (kPointerSize == kInt64Size) {
3065 call(op);
3066 } else {
3067 movp(kScratchRegister, op);
3068 call(kScratchRegister);
3069 }
3070}
3071
3072
Steve Blocka7e24c12009-10-30 11:49:00 +00003073void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003074#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003075 int end_position = pc_offset() + CallSize(destination);
Steve Block44f0eee2011-05-26 01:26:41 +01003076#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003077 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003078 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003079#ifdef DEBUG
3080 CHECK_EQ(pc_offset(), end_position);
3081#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003082}
3083
3084
Ben Murdoch257744e2011-11-30 15:57:28 +00003085void MacroAssembler::Call(Handle<Code> code_object,
3086 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003087 TypeFeedbackId ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01003088#ifdef DEBUG
3089 int end_position = pc_offset() + CallSize(code_object);
3090#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003091 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
3092 rmode == RelocInfo::CODE_AGE_SEQUENCE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003093 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01003094#ifdef DEBUG
3095 CHECK_EQ(end_position, pc_offset());
3096#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003097}
3098
3099
Steve Block1e0659c2011-05-24 12:43:12 +01003100void MacroAssembler::Pushad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003101 Push(rax);
3102 Push(rcx);
3103 Push(rdx);
3104 Push(rbx);
Steve Block1e0659c2011-05-24 12:43:12 +01003105 // Not pushing rsp or rbp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003106 Push(rsi);
3107 Push(rdi);
3108 Push(r8);
3109 Push(r9);
Steve Block1e0659c2011-05-24 12:43:12 +01003110 // r10 is kScratchRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003111 Push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01003112 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01003113 // r13 is kRootRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003114 Push(r14);
3115 Push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003116 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
3117 // Use lea for symmetry with Popad.
3118 int sp_delta =
3119 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003120 leap(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01003121}
3122
3123
3124void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003125 // Popad must not change the flags, so use lea instead of addq.
3126 int sp_delta =
3127 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003128 leap(rsp, Operand(rsp, sp_delta));
3129 Pop(r15);
3130 Pop(r14);
3131 Pop(r11);
3132 Pop(r9);
3133 Pop(r8);
3134 Pop(rdi);
3135 Pop(rsi);
3136 Pop(rbx);
3137 Pop(rdx);
3138 Pop(rcx);
3139 Pop(rax);
Steve Block1e0659c2011-05-24 12:43:12 +01003140}
3141
3142
3143void MacroAssembler::Dropad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003144 addp(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01003145}
3146
3147
3148// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01003149// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003150const int
3151MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
Steve Block1e0659c2011-05-24 12:43:12 +01003152 0,
3153 1,
3154 2,
3155 3,
3156 -1,
3157 -1,
3158 4,
3159 5,
3160 6,
3161 7,
3162 -1,
3163 8,
Steve Block1e0659c2011-05-24 12:43:12 +01003164 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01003165 -1,
3166 9,
3167 10
Steve Block1e0659c2011-05-24 12:43:12 +01003168};
3169
3170
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003171void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3172 const Immediate& imm) {
3173 movp(SafepointRegisterSlot(dst), imm);
3174}
3175
3176
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003177void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003178 movp(SafepointRegisterSlot(dst), src);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003179}
3180
3181
3182void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003183 movp(dst, SafepointRegisterSlot(src));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003184}
3185
3186
3187Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
3188 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
3189}
3190
3191
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003192void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
3193 int handler_index) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003194 // Adjust this code if not the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003195 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
3196 kFPOnStackSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003197 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3198 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3199 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3200 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3201 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003202
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003203 // We will build up the handler from the bottom by pushing on the stack.
3204 // First push the frame pointer and context.
3205 if (kind == StackHandler::JS_ENTRY) {
3206 // The frame pointer does not point to a JS frame so we save NULL for
3207 // rbp. We expect the code throwing an exception to check rbp before
3208 // dereferencing it to restore the context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003209 pushq(Immediate(0)); // NULL frame pointer.
Ben Murdoch85b71792012-04-11 18:30:58 +01003210 Push(Smi::FromInt(0)); // No context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003211 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003212 pushq(rbp);
3213 Push(rsi);
Steve Blocka7e24c12009-10-30 11:49:00 +00003214 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003215
3216 // Push the state and the code object.
3217 unsigned state =
3218 StackHandler::IndexField::encode(handler_index) |
3219 StackHandler::KindField::encode(kind);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003220 Push(Immediate(state));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003221 Push(CodeObject());
3222
3223 // Link the current handler as the next handler.
3224 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003225 Push(ExternalOperand(handler_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003226 // Set this new handler as the current one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003227 movp(ExternalOperand(handler_address), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00003228}
3229
3230
Leon Clarkee46be812010-01-19 14:06:41 +00003231void MacroAssembler::PopTryHandler() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003232 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3233 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003234 Pop(ExternalOperand(handler_address));
3235 addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003236}
3237
3238
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003239void MacroAssembler::JumpToHandlerEntry() {
3240 // Compute the handler entry address and jump to it. The handler table is
3241 // a fixed array of (smi-tagged) code offsets.
3242 // rax = exception, rdi = code object, rdx = state.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243 movp(rbx, FieldOperand(rdi, Code::kHandlerTableOffset));
3244 shrp(rdx, Immediate(StackHandler::kKindWidth));
3245 movp(rdx,
3246 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003247 SmiToInteger64(rdx, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003248 leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003249 jmp(rdi);
3250}
3251
3252
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003253void MacroAssembler::Throw(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003254 // Adjust this code if not the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003255 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
3256 kFPOnStackSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003257 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3258 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3259 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3260 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3261 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
3262
3263 // The exception is expected in rax.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003264 if (!value.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003265 movp(rax, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003266 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003267 // Drop the stack pointer to the top of the top handler.
Ben Murdoch589d6972011-11-30 16:04:58 +00003268 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003269 movp(rsp, ExternalOperand(handler_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003270 // Restore the next handler.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003271 Pop(ExternalOperand(handler_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003272
3273 // Remove the code object and state, compute the handler address in rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003274 Pop(rdi); // Code object.
3275 Pop(rdx); // Offset and state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003276
3277 // Restore the context and frame pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003278 Pop(rsi); // Context.
3279 popq(rbp); // Frame pointer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003280
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003281 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003282 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either
3283 // rbp or rsi.
Ben Murdoch257744e2011-11-30 15:57:28 +00003284 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003285 testp(rsi, rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003286 j(zero, &skip, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003287 movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003288 bind(&skip);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003289
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003290 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003291}
3292
3293
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003294void MacroAssembler::ThrowUncatchable(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003295 // Adjust this code if not the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003296 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
3297 kFPOnStackSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003298 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3299 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3300 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3301 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3302 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
3303
3304 // The exception is expected in rax.
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003305 if (!value.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003306 movp(rax, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003307 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003308 // Drop the stack pointer to the top of the top stack handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003309 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
3310 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003311
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003312 // Unwind the handlers until the top ENTRY handler is found.
3313 Label fetch_next, check_kind;
3314 jmp(&check_kind, Label::kNear);
3315 bind(&fetch_next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003316 movp(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003317
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003318 bind(&check_kind);
3319 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
3320 testl(Operand(rsp, StackHandlerConstants::kStateOffset),
3321 Immediate(StackHandler::KindField::kMask));
3322 j(not_zero, &fetch_next);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003323
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003324 // Set the top handler address to next handler past the top ENTRY handler.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003325 Pop(ExternalOperand(handler_address));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003326
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003327 // Remove the code object and state, compute the handler address in rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003328 Pop(rdi); // Code object.
3329 Pop(rdx); // Offset and state.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003330
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003331 // Clear the context pointer and frame pointer (0 was saved in the handler).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003332 Pop(rsi);
3333 popq(rbp);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003334
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003335 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003336}
3337
3338
Steve Blocka7e24c12009-10-30 11:49:00 +00003339void MacroAssembler::Ret() {
3340 ret(0);
3341}
3342
3343
Steve Block1e0659c2011-05-24 12:43:12 +01003344void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
3345 if (is_uint16(bytes_dropped)) {
3346 ret(bytes_dropped);
3347 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003348 PopReturnAddressTo(scratch);
3349 addp(rsp, Immediate(bytes_dropped));
3350 PushReturnAddressFrom(scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003351 ret(0);
3352 }
3353}
3354
3355
Steve Blocka7e24c12009-10-30 11:49:00 +00003356void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00003357 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01003358 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003359}
3360
3361
3362void MacroAssembler::CmpObjectType(Register heap_object,
3363 InstanceType type,
3364 Register map) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003365 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003366 CmpInstanceType(map, type);
3367}
3368
3369
3370void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
3371 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
3372 Immediate(static_cast<int8_t>(type)));
3373}
3374
3375
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003376void MacroAssembler::CheckFastElements(Register map,
3377 Label* fail,
3378 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003379 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3380 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3381 STATIC_ASSERT(FAST_ELEMENTS == 2);
3382 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003383 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003384 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003385 j(above, fail, distance);
3386}
3387
3388
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003389void MacroAssembler::CheckFastObjectElements(Register map,
3390 Label* fail,
3391 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003392 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3393 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3394 STATIC_ASSERT(FAST_ELEMENTS == 2);
3395 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003396 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003397 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003398 j(below_equal, fail, distance);
3399 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003400 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003401 j(above, fail, distance);
3402}
3403
3404
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003405void MacroAssembler::CheckFastSmiElements(Register map,
3406 Label* fail,
3407 Label::Distance distance) {
3408 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3409 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003410 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003411 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003412 j(above, fail, distance);
3413}
3414
3415
3416void MacroAssembler::StoreNumberToDoubleElements(
3417 Register maybe_number,
3418 Register elements,
3419 Register index,
3420 XMMRegister xmm_scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003421 Label* fail,
3422 int elements_offset) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003423 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
3424
3425 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
3426
3427 CheckMap(maybe_number,
3428 isolate()->factory()->heap_number_map(),
3429 fail,
3430 DONT_DO_SMI_CHECK);
3431
3432 // Double value, canonicalize NaN.
3433 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
3434 cmpl(FieldOperand(maybe_number, offset),
3435 Immediate(kNaNOrInfinityLowerBoundUpper32));
3436 j(greater_equal, &maybe_nan, Label::kNear);
3437
3438 bind(&not_nan);
3439 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
3440 bind(&have_double_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003441 movsd(FieldOperand(elements, index, times_8,
3442 FixedDoubleArray::kHeaderSize - elements_offset),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003443 xmm_scratch);
3444 jmp(&done);
3445
3446 bind(&maybe_nan);
3447 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
3448 // it's an Infinity, and the non-NaN code path applies.
3449 j(greater, &is_nan, Label::kNear);
3450 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
3451 j(zero, &not_nan);
3452 bind(&is_nan);
3453 // Convert all NaNs to the same canonical NaN value when they are stored in
3454 // the double array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003455 Set(kScratchRegister,
3456 bit_cast<uint64_t>(
3457 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003458 movq(xmm_scratch, kScratchRegister);
3459 jmp(&have_double_value, Label::kNear);
3460
3461 bind(&smi_value);
3462 // Value is a smi. convert to a double and store.
3463 // Preserve original value.
3464 SmiToInteger32(kScratchRegister, maybe_number);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003465 Cvtlsi2sd(xmm_scratch, kScratchRegister);
3466 movsd(FieldOperand(elements, index, times_8,
3467 FixedDoubleArray::kHeaderSize - elements_offset),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003468 xmm_scratch);
3469 bind(&done);
3470}
3471
3472
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003473void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003474 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003475}
3476
3477
Andrei Popescu31002712010-02-23 13:46:05 +00003478void MacroAssembler::CheckMap(Register obj,
3479 Handle<Map> map,
3480 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003481 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003482 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00003483 JumpIfSmi(obj, fail);
3484 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003485
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003486 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +00003487 j(not_equal, fail);
3488}
3489
3490
Ben Murdoch257744e2011-11-30 15:57:28 +00003491void MacroAssembler::ClampUint8(Register reg) {
3492 Label done;
3493 testl(reg, Immediate(0xFFFFFF00));
3494 j(zero, &done, Label::kNear);
3495 setcc(negative, reg); // 1 if negative, 0 if positive.
3496 decb(reg); // 0 if negative, 255 if positive.
3497 bind(&done);
3498}
3499
3500
3501void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
3502 XMMRegister temp_xmm_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003503 Register result_reg) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003504 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003505 Label conv_failure;
Ben Murdoch257744e2011-11-30 15:57:28 +00003506 xorps(temp_xmm_reg, temp_xmm_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003507 cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003508 testl(result_reg, Immediate(0xFFFFFF00));
3509 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003510 cmpl(result_reg, Immediate(1));
3511 j(overflow, &conv_failure, Label::kNear);
3512 movl(result_reg, Immediate(0));
3513 setcc(sign, result_reg);
3514 subl(result_reg, Immediate(1));
3515 andl(result_reg, Immediate(255));
3516 jmp(&done, Label::kNear);
3517 bind(&conv_failure);
3518 Set(result_reg, 0);
3519 ucomisd(input_reg, temp_xmm_reg);
3520 j(below, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003521 Set(result_reg, 255);
3522 bind(&done);
3523}
3524
3525
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003526void MacroAssembler::LoadUint32(XMMRegister dst,
3527 Register src) {
3528 if (FLAG_debug_code) {
3529 cmpq(src, Immediate(0xffffffff));
3530 Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
3531 }
3532 cvtqsi2sd(dst, src);
3533}
3534
3535
3536void MacroAssembler::SlowTruncateToI(Register result_reg,
3537 Register input_reg,
3538 int offset) {
3539 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
3540 call(stub.GetCode(), RelocInfo::CODE_TARGET);
3541}
3542
3543
3544void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
3545 Register input_reg) {
3546 Label done;
3547 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3548 cvttsd2siq(result_reg, xmm0);
3549 cmpq(result_reg, Immediate(1));
3550 j(no_overflow, &done, Label::kNear);
3551
3552 // Slow case.
3553 if (input_reg.is(result_reg)) {
3554 subp(rsp, Immediate(kDoubleSize));
3555 movsd(MemOperand(rsp, 0), xmm0);
3556 SlowTruncateToI(result_reg, rsp, 0);
3557 addp(rsp, Immediate(kDoubleSize));
3558 } else {
3559 SlowTruncateToI(result_reg, input_reg);
3560 }
3561
3562 bind(&done);
3563 // Keep our invariant that the upper 32 bits are zero.
3564 movl(result_reg, result_reg);
3565}
3566
3567
3568void MacroAssembler::TruncateDoubleToI(Register result_reg,
3569 XMMRegister input_reg) {
3570 Label done;
3571 cvttsd2siq(result_reg, input_reg);
3572 cmpq(result_reg, Immediate(1));
3573 j(no_overflow, &done, Label::kNear);
3574
3575 subp(rsp, Immediate(kDoubleSize));
3576 movsd(MemOperand(rsp, 0), input_reg);
3577 SlowTruncateToI(result_reg, rsp, 0);
3578 addp(rsp, Immediate(kDoubleSize));
3579
3580 bind(&done);
3581 // Keep our invariant that the upper 32 bits are zero.
3582 movl(result_reg, result_reg);
3583}
3584
3585
3586void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
3587 XMMRegister scratch,
3588 MinusZeroMode minus_zero_mode,
3589 Label* lost_precision, Label* is_nan,
3590 Label* minus_zero, Label::Distance dst) {
3591 cvttsd2si(result_reg, input_reg);
3592 Cvtlsi2sd(xmm0, result_reg);
3593 ucomisd(xmm0, input_reg);
3594 j(not_equal, lost_precision, dst);
3595 j(parity_even, is_nan, dst); // NaN.
3596 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
3597 Label done;
3598 // The integer converted back is equal to the original. We
3599 // only have to test if we got -0 as an input.
3600 testl(result_reg, result_reg);
3601 j(not_zero, &done, Label::kNear);
3602 movmskpd(result_reg, input_reg);
3603 // Bit 0 contains the sign of the double in input_reg.
3604 // If input was positive, we are ok and return 0, otherwise
3605 // jump to minus_zero.
3606 andl(result_reg, Immediate(1));
3607 j(not_zero, minus_zero, dst);
3608 bind(&done);
3609 }
3610}
3611
3612
Ben Murdoch257744e2011-11-30 15:57:28 +00003613void MacroAssembler::LoadInstanceDescriptors(Register map,
3614 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003615 movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
3616}
3617
3618
3619void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3620 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3621 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3622}
3623
3624
3625void MacroAssembler::EnumLength(Register dst, Register map) {
3626 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3627 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3628 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3629 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003630}
3631
3632
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003633void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
3634 Register scratch2, Handle<WeakCell> cell,
3635 Handle<Code> success,
3636 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003637 Label fail;
3638 if (smi_check_type == DO_SMI_CHECK) {
3639 JumpIfSmi(obj, &fail);
3640 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003641 movq(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
3642 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003643 j(equal, success, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00003644 bind(&fail);
3645}
3646
3647
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003648void MacroAssembler::AssertNumber(Register object) {
3649 if (emit_debug_code()) {
3650 Label ok;
3651 Condition is_smi = CheckSmi(object);
3652 j(is_smi, &ok, Label::kNear);
3653 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3654 isolate()->factory()->heap_number_map());
3655 Check(equal, kOperandIsNotANumber);
3656 bind(&ok);
3657 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003658}
3659
3660
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003661void MacroAssembler::AssertNotSmi(Register object) {
3662 if (emit_debug_code()) {
3663 Condition is_smi = CheckSmi(object);
3664 Check(NegateCondition(is_smi), kOperandIsASmi);
3665 }
Iain Merrick75681382010-08-19 15:07:18 +01003666}
3667
3668
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003669void MacroAssembler::AssertSmi(Register object) {
3670 if (emit_debug_code()) {
3671 Condition is_smi = CheckSmi(object);
3672 Check(is_smi, kOperandIsNotASmi);
3673 }
Steve Block44f0eee2011-05-26 01:26:41 +01003674}
3675
3676
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003677void MacroAssembler::AssertSmi(const Operand& object) {
3678 if (emit_debug_code()) {
3679 Condition is_smi = CheckSmi(object);
3680 Check(is_smi, kOperandIsNotASmi);
3681 }
Steve Block6ded16b2010-05-10 14:33:55 +01003682}
3683
3684
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003685void MacroAssembler::AssertZeroExtended(Register int32_register) {
3686 if (emit_debug_code()) {
3687 DCHECK(!int32_register.is(kScratchRegister));
3688 movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
3689 cmpq(kScratchRegister, int32_register);
3690 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3691 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003692}
3693
3694
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003695void MacroAssembler::AssertString(Register object) {
3696 if (emit_debug_code()) {
3697 testb(object, Immediate(kSmiTagMask));
3698 Check(not_equal, kOperandIsASmiAndNotAString);
3699 Push(object);
3700 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3701 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3702 Pop(object);
3703 Check(below, kOperandIsNotAString);
3704 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003705}
3706
3707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003708void MacroAssembler::AssertName(Register object) {
3709 if (emit_debug_code()) {
3710 testb(object, Immediate(kSmiTagMask));
3711 Check(not_equal, kOperandIsASmiAndNotAName);
3712 Push(object);
3713 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3714 CmpInstanceType(object, LAST_NAME_TYPE);
3715 Pop(object);
3716 Check(below_equal, kOperandIsNotAName);
3717 }
3718}
3719
3720
3721void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
3722 if (emit_debug_code()) {
3723 Label done_checking;
3724 AssertNotSmi(object);
3725 Cmp(object, isolate()->factory()->undefined_value());
3726 j(equal, &done_checking);
3727 Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
3728 Assert(equal, kExpectedUndefinedOrCell);
3729 bind(&done_checking);
3730 }
3731}
3732
3733
3734void MacroAssembler::AssertRootValue(Register src,
3735 Heap::RootListIndex root_value_index,
3736 BailoutReason reason) {
3737 if (emit_debug_code()) {
3738 DCHECK(!src.is(kScratchRegister));
3739 LoadRoot(kScratchRegister, root_value_index);
3740 cmpp(src, kScratchRegister);
3741 Check(equal, reason);
3742 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003743}
3744
3745
3746
Leon Clarked91b9f72010-01-27 17:25:45 +00003747Condition MacroAssembler::IsObjectStringType(Register heap_object,
3748 Register map,
3749 Register instance_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003750 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00003751 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003752 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00003753 testb(instance_type, Immediate(kIsNotStringMask));
3754 return zero;
3755}
3756
3757
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003758Condition MacroAssembler::IsObjectNameType(Register heap_object,
3759 Register map,
3760 Register instance_type) {
3761 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
3762 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3763 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE)));
3764 return below_equal;
3765}
3766
3767
Steve Blocka7e24c12009-10-30 11:49:00 +00003768void MacroAssembler::TryGetFunctionPrototype(Register function,
3769 Register result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003770 Label* miss,
3771 bool miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003772 Label non_instance;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003773 if (miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003774 // Check that the receiver isn't a smi.
3775 testl(function, Immediate(kSmiTagMask));
3776 j(zero, miss);
3777
3778 // Check that the function really is a function.
3779 CmpObjectType(function, JS_FUNCTION_TYPE, result);
3780 j(not_equal, miss);
3781
3782 movp(kScratchRegister,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003783 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3784 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
3785 // field).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003786 TestBitSharedFunctionInfoSpecialField(kScratchRegister,
3787 SharedFunctionInfo::kCompilerHintsOffset,
3788 SharedFunctionInfo::kBoundFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003789 j(not_zero, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003790
3791 // Make sure that the function has an instance prototype.
3792 testb(FieldOperand(result, Map::kBitFieldOffset),
3793 Immediate(1 << Map::kHasNonInstancePrototype));
3794 j(not_zero, &non_instance, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003795 }
3796
Steve Blocka7e24c12009-10-30 11:49:00 +00003797 // Get the prototype or initial map from the function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003798 movp(result,
Steve Blocka7e24c12009-10-30 11:49:00 +00003799 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
3800
3801 // If the prototype or initial map is the hole, don't return it and
3802 // simply miss the cache instead. This will allow us to allocate a
3803 // prototype object on-demand in the runtime system.
3804 CompareRoot(result, Heap::kTheHoleValueRootIndex);
3805 j(equal, miss);
3806
3807 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00003808 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00003809 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00003810 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00003811
3812 // Get the prototype from the initial map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003813 movp(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003814
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003815 if (miss_on_bound_function) {
3816 jmp(&done, Label::kNear);
3817
3818 // Non-instance prototype: Fetch prototype from constructor field
3819 // in initial map.
3820 bind(&non_instance);
3821 movp(result, FieldOperand(result, Map::kConstructorOffset));
3822 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003823
3824 // All done.
3825 bind(&done);
3826}
3827
3828
3829void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
3830 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003831 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003832 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00003833 }
3834}
3835
3836
3837void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003838 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003839 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003840 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00003841 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01003842 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003843 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003844 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00003845 }
3846 }
3847}
3848
3849
3850void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003851 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003852 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003853 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00003854 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01003855 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003856 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003857 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00003858 }
3859 }
3860}
3861
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003862
Andrei Popescu402d9372010-02-26 13:31:12 +00003863void MacroAssembler::DebugBreak() {
Steve Block9fac8402011-05-12 15:51:54 +01003864 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01003865 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003866 CEntryStub ces(isolate(), 1);
3867 DCHECK(AllowThisStubCall(&ces));
Andrei Popescu402d9372010-02-26 13:31:12 +00003868 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00003869}
Ben Murdoch257744e2011-11-30 15:57:28 +00003870
3871
Steve Blocka7e24c12009-10-30 11:49:00 +00003872void MacroAssembler::InvokeCode(Register code,
3873 const ParameterCount& expected,
3874 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003875 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003876 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003877 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003878 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003879
Ben Murdoch257744e2011-11-30 15:57:28 +00003880 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003881 bool definitely_mismatches = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003882 InvokePrologue(expected,
3883 actual,
3884 Handle<Code>::null(),
3885 code,
3886 &done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003887 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003888 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003889 Label::kNear,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003890 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003891 if (!definitely_mismatches) {
3892 if (flag == CALL_FUNCTION) {
3893 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003894 call(code);
3895 call_wrapper.AfterCall();
3896 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003897 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003898 jmp(code);
3899 }
3900 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003901 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003902}
3903
3904
Steve Blocka7e24c12009-10-30 11:49:00 +00003905void MacroAssembler::InvokeFunction(Register function,
3906 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003907 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003908 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003909 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003910 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003911
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003912 DCHECK(function.is(rdi));
3913 movp(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3914 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
3915 LoadSharedFunctionInfoSpecialField(rbx, rdx,
3916 SharedFunctionInfo::kFormalParameterCountOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +00003917 // Advances rdx to the end of the Code object header, to the start of
3918 // the executable code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003919 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003920
3921 ParameterCount expected(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003922 InvokeCode(rdx, expected, actual, flag, call_wrapper);
3923}
3924
3925
3926void MacroAssembler::InvokeFunction(Register function,
3927 const ParameterCount& expected,
3928 const ParameterCount& actual,
3929 InvokeFlag flag,
3930 const CallWrapper& call_wrapper) {
3931 // You can't call a function without a valid frame.
3932 DCHECK(flag == JUMP_FUNCTION || has_frame());
3933
3934 DCHECK(function.is(rdi));
3935 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
3936 // Advances rdx to the end of the Code object header, to the start of
3937 // the executable code.
3938 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3939
3940 InvokeCode(rdx, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00003941}
3942
3943
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003944void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003945 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00003946 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003947 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003948 const CallWrapper& call_wrapper) {
3949 Move(rdi, function);
3950 InvokeFunction(rdi, expected, actual, flag, call_wrapper);
Ben Murdoch257744e2011-11-30 15:57:28 +00003951}
3952
3953
3954void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3955 const ParameterCount& actual,
3956 Handle<Code> code_constant,
3957 Register code_register,
3958 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003959 bool* definitely_mismatches,
Ben Murdoch257744e2011-11-30 15:57:28 +00003960 InvokeFlag flag,
3961 Label::Distance near_jump,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003962 const CallWrapper& call_wrapper) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003963 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003964 *definitely_mismatches = false;
Ben Murdoch257744e2011-11-30 15:57:28 +00003965 Label invoke;
3966 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003967 DCHECK(actual.is_immediate());
Ben Murdoch257744e2011-11-30 15:57:28 +00003968 if (expected.immediate() == actual.immediate()) {
3969 definitely_matches = true;
3970 } else {
3971 Set(rax, actual.immediate());
3972 if (expected.immediate() ==
3973 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3974 // Don't worry about adapting arguments for built-ins that
3975 // don't want that done. Skip adaption code by making it look
3976 // like we have a match between expected and actual number of
3977 // arguments.
3978 definitely_matches = true;
3979 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003980 *definitely_mismatches = true;
Ben Murdoch257744e2011-11-30 15:57:28 +00003981 Set(rbx, expected.immediate());
3982 }
3983 }
3984 } else {
3985 if (actual.is_immediate()) {
3986 // Expected is in register, actual is immediate. This is the
3987 // case when we invoke function values without going through the
3988 // IC mechanism.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003989 cmpp(expected.reg(), Immediate(actual.immediate()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003990 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003991 DCHECK(expected.reg().is(rbx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003992 Set(rax, actual.immediate());
3993 } else if (!expected.reg().is(actual.reg())) {
3994 // Both expected and actual are in (different) registers. This
3995 // is the case when we invoke functions using call and apply.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003996 cmpp(expected.reg(), actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00003997 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003998 DCHECK(actual.reg().is(rax));
3999 DCHECK(expected.reg().is(rbx));
Ben Murdoch257744e2011-11-30 15:57:28 +00004000 }
4001 }
4002
4003 if (!definitely_matches) {
4004 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
4005 if (!code_constant.is_null()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004006 Move(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
4007 addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00004008 } else if (!code_register.is(rdx)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004009 movp(rdx, code_register);
Ben Murdoch257744e2011-11-30 15:57:28 +00004010 }
4011
4012 if (flag == CALL_FUNCTION) {
4013 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch257744e2011-11-30 15:57:28 +00004014 Call(adaptor, RelocInfo::CODE_TARGET);
4015 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004016 if (!*definitely_mismatches) {
4017 jmp(done, near_jump);
4018 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004019 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004020 Jump(adaptor, RelocInfo::CODE_TARGET);
4021 }
4022 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01004023 }
Andrei Popescu402d9372010-02-26 13:31:12 +00004024}
4025
4026
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004027void MacroAssembler::StubPrologue() {
4028 pushq(rbp); // Caller's frame pointer.
4029 movp(rbp, rsp);
4030 Push(rsi); // Callee's context.
4031 Push(Smi::FromInt(StackFrame::STUB));
4032}
4033
4034
4035void MacroAssembler::Prologue(bool code_pre_aging) {
4036 PredictableCodeSizeScope predictible_code_size_scope(this,
4037 kNoCodeAgeSequenceLength);
4038 if (code_pre_aging) {
4039 // Pre-age the code.
4040 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
4041 RelocInfo::CODE_AGE_SEQUENCE);
4042 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
4043 } else {
4044 pushq(rbp); // Caller's frame pointer.
4045 movp(rbp, rsp);
4046 Push(rsi); // Callee's context.
4047 Push(rdi); // Callee's JS function.
4048 }
4049}
4050
4051
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004052void MacroAssembler::EnterFrame(StackFrame::Type type,
4053 bool load_constant_pool_pointer_reg) {
4054 // Out-of-line constant pool not implemented on x64.
4055 UNREACHABLE();
4056}
4057
4058
Steve Blocka7e24c12009-10-30 11:49:00 +00004059void MacroAssembler::EnterFrame(StackFrame::Type type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004060 pushq(rbp);
4061 movp(rbp, rsp);
4062 Push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00004063 Push(Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004064 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4065 Push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01004066 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004067 Move(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00004068 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00004069 RelocInfo::EMBEDDED_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004070 cmpp(Operand(rsp, 0), kScratchRegister);
4071 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +00004072 }
4073}
4074
4075
4076void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01004077 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004078 Move(kScratchRegister, Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004079 cmpp(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
4080 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +00004081 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004082 movp(rsp, rbp);
4083 popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004084}
4085
4086
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004087void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004088 // Set up the frame structure on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00004089 // All constants are relative to the frame pointer of the exit frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004090 DCHECK(ExitFrameConstants::kCallerSPDisplacement ==
4091 kFPOnStackSize + kPCOnStackSize);
4092 DCHECK(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize);
4093 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
4094 pushq(rbp);
4095 movp(rbp, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004096
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004097 // Reserve room for entry stack pointer and push the code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004098 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
4099 Push(Immediate(0)); // Saved entry sp, patched before call.
4100 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4101 Push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00004102
4103 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01004104 if (save_rax) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004105 movp(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01004106 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004107
Ben Murdoch589d6972011-11-30 16:04:58 +00004108 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
4109 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004110 Store(ExternalReference(Isolate::kCFunctionAddress, isolate()), rbx);
Ben Murdochbb769b22010-08-11 14:56:33 +01004111}
Steve Blocka7e24c12009-10-30 11:49:00 +00004112
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004113
Steve Block1e0659c2011-05-24 12:43:12 +01004114void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
4115 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004116#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01004117 const int kShadowSpace = 4;
4118 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00004119#endif
Steve Block1e0659c2011-05-24 12:43:12 +01004120 // Optionally save all XMM registers.
4121 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004122 int space = XMMRegister::kMaxNumAllocatableRegisters * kDoubleSize +
4123 arg_stack_space * kRegisterSize;
4124 subp(rsp, Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +01004125 int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004126 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
Steve Block1e0659c2011-05-24 12:43:12 +01004127 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
4128 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
4129 }
4130 } else if (arg_stack_space > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004131 subp(rsp, Immediate(arg_stack_space * kRegisterSize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004132 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004133
4134 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004135 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00004136 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004137 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
4138 DCHECK(is_int8(kFrameAlignment));
4139 andp(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00004140 }
4141
4142 // Patch the saved entry sp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004143 movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004144}
4145
4146
Steve Block1e0659c2011-05-24 12:43:12 +01004147void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004148 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01004149
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004150 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01004151 // so it must be retained across the C-call.
4152 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004153 leap(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01004154
Steve Block1e0659c2011-05-24 12:43:12 +01004155 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01004156}
4157
4158
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004159void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004160 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01004161 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01004162}
4163
4164
Steve Block1e0659c2011-05-24 12:43:12 +01004165void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004166 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01004167 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01004168 if (save_doubles) {
4169 int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004170 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
Steve Block1e0659c2011-05-24 12:43:12 +01004171 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
4172 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
4173 }
4174 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004175 // Get the return address from the stack and restore the frame pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004176 movp(rcx, Operand(rbp, kFPOnStackSize));
4177 movp(rbp, Operand(rbp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004178
Steve Block1e0659c2011-05-24 12:43:12 +01004179 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00004180 // from the caller stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004181 leap(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004182
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004183 PushReturnAddressFrom(rcx);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004184
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004185 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004186}
4187
4188
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004189void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
4190 movp(rsp, rbp);
4191 popq(rbp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004192
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004193 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004194}
4195
4196
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004197void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004198 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00004199 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01004200 Operand context_operand = ExternalOperand(context_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004201 if (restore_context) {
4202 movp(rsi, context_operand);
4203 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004204#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004205 movp(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004206#endif
4207
Steve Blocka7e24c12009-10-30 11:49:00 +00004208 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00004209 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004210 isolate());
4211 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004212 movp(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004213}
4214
4215
Steve Blocka7e24c12009-10-30 11:49:00 +00004216void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
4217 Register scratch,
4218 Label* miss) {
4219 Label same_contexts;
4220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004221 DCHECK(!holder_reg.is(scratch));
4222 DCHECK(!scratch.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00004223 // Load current lexical context from the stack frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004224 movp(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004225
4226 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01004227 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004228 cmpp(scratch, Immediate(0));
4229 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004230 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004231 // Load the native context of the current context.
4232 int offset =
4233 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
4234 movp(scratch, FieldOperand(scratch, offset));
4235 movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004236
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004237 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004238 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004239 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004240 isolate()->factory()->native_context_map());
4241 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004242 }
4243
4244 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004245 cmpp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004246 j(equal, &same_contexts);
4247
4248 // Compare security tokens.
4249 // Check that the security token in the calling global object is
4250 // compatible with the security token in the receiving global
4251 // object.
4252
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004253 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004254 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004255 // Preserve original value of holder_reg.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004256 Push(holder_reg);
4257 movp(holder_reg,
4258 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004259 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004260 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00004261
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004262 // Read the first word and compare to native_context_map(),
4263 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
4264 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
4265 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
4266 Pop(holder_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00004267 }
4268
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004269 movp(kScratchRegister,
4270 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00004271 int token_offset =
4272 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004273 movp(scratch, FieldOperand(scratch, token_offset));
4274 cmpp(scratch, FieldOperand(kScratchRegister, token_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004275 j(not_equal, miss);
4276
4277 bind(&same_contexts);
4278}
4279
4280
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004281// Compute the hash code from the untagged key. This must be kept in sync with
4282// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
4283// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00004284void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
4285 // First of all we assign the hash seed to scratch.
4286 LoadRoot(scratch, Heap::kHashSeedRootIndex);
4287 SmiToInteger32(scratch, scratch);
4288
4289 // Xor original key with a seed.
4290 xorl(r0, scratch);
4291
4292 // Compute the hash code from the untagged key. This must be kept in sync
4293 // with ComputeIntegerHash in utils.h.
4294 //
4295 // hash = ~hash + (hash << 15);
4296 movl(scratch, r0);
4297 notl(r0);
4298 shll(scratch, Immediate(15));
4299 addl(r0, scratch);
4300 // hash = hash ^ (hash >> 12);
4301 movl(scratch, r0);
4302 shrl(scratch, Immediate(12));
4303 xorl(r0, scratch);
4304 // hash = hash + (hash << 2);
4305 leal(r0, Operand(r0, r0, times_4, 0));
4306 // hash = hash ^ (hash >> 4);
4307 movl(scratch, r0);
4308 shrl(scratch, Immediate(4));
4309 xorl(r0, scratch);
4310 // hash = hash * 2057;
4311 imull(r0, r0, Immediate(2057));
4312 // hash = hash ^ (hash >> 16);
4313 movl(scratch, r0);
4314 shrl(scratch, Immediate(16));
4315 xorl(r0, scratch);
4316}
4317
4318
4319
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004320void MacroAssembler::LoadFromNumberDictionary(Label* miss,
4321 Register elements,
4322 Register key,
4323 Register r0,
4324 Register r1,
4325 Register r2,
4326 Register result) {
4327 // Register use:
4328 //
4329 // elements - holds the slow-case elements of the receiver on entry.
4330 // Unchanged unless 'result' is the same register.
4331 //
4332 // key - holds the smi key on entry.
4333 // Unchanged unless 'result' is the same register.
4334 //
4335 // Scratch registers:
4336 //
4337 // r0 - holds the untagged key on entry and holds the hash once computed.
4338 //
4339 // r1 - used to hold the capacity mask of the dictionary
4340 //
4341 // r2 - used for the index into the dictionary.
4342 //
4343 // result - holds the result on exit if the load succeeded.
4344 // Allowed to be the same as 'key' or 'result'.
4345 // Unchanged on bailout so 'key' or 'result' can be used
4346 // in further computation.
4347
4348 Label done;
4349
Ben Murdochc7cc0282012-03-05 14:35:55 +00004350 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004351
4352 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00004353 SmiToInteger32(r1, FieldOperand(elements,
4354 SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004355 decl(r1);
4356
4357 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004358 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004359 // Use r2 for index calculations and keep the hash intact in r0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004360 movp(r2, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004361 // Compute the masked index: (hash + i + i * i) & mask.
4362 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00004363 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004364 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004365 andp(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004366
4367 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004368 DCHECK(SeededNumberDictionary::kEntrySize == 3);
4369 leap(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004370
4371 // Check if the key matches.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004372 cmpp(key, FieldOperand(elements,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004373 r2,
4374 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00004375 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004376 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004377 j(equal, &done);
4378 } else {
4379 j(not_equal, miss);
4380 }
4381 }
4382
4383 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004384 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004385 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004386 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004387 DCHECK_EQ(FIELD, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004388 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00004389 Smi::FromInt(PropertyDetails::TypeField::kMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004390 j(not_zero, miss);
4391
4392 // Get the value at the masked, scaled index.
4393 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004394 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004395 movp(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004396}
4397
4398
Steve Blocka7e24c12009-10-30 11:49:00 +00004399void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004400 Register scratch,
4401 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004402 ExternalReference allocation_top =
4403 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004404
4405 // Just return if allocation top is already known.
4406 if ((flags & RESULT_CONTAINS_TOP) != 0) {
4407 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004408 DCHECK(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00004409#ifdef DEBUG
4410 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004411 Operand top_operand = ExternalOperand(allocation_top);
4412 cmpp(result, top_operand);
4413 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004414#endif
4415 return;
4416 }
4417
Steve Block6ded16b2010-05-10 14:33:55 +01004418 // Move address of new object to result. Use scratch register if available,
4419 // and keep address in scratch until call to UpdateAllocationTopHelper.
4420 if (scratch.is_valid()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004421 LoadAddress(scratch, allocation_top);
4422 movp(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01004423 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004424 Load(result, allocation_top);
4425 }
4426}
4427
4428
4429void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
4430 Register scratch,
4431 Label* gc_required,
4432 AllocationFlags flags) {
4433 if (kPointerSize == kDoubleSize) {
4434 if (FLAG_debug_code) {
4435 testl(result, Immediate(kDoubleAlignmentMask));
4436 Check(zero, kAllocationIsNotDoubleAligned);
4437 }
4438 } else {
4439 // Align the next allocation. Storing the filler map without checking top
4440 // is safe in new-space because the limit of the heap is aligned there.
4441 DCHECK(kPointerSize * 2 == kDoubleSize);
4442 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
4443 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
4444 // Make sure scratch is not clobbered by this function as it might be
4445 // used in UpdateAllocationTopHelper later.
4446 DCHECK(!scratch.is(kScratchRegister));
4447 Label aligned;
4448 testl(result, Immediate(kDoubleAlignmentMask));
4449 j(zero, &aligned, Label::kNear);
4450 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
4451 ExternalReference allocation_limit =
4452 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4453 cmpp(result, ExternalOperand(allocation_limit));
4454 j(above_equal, gc_required);
4455 }
4456 LoadRoot(kScratchRegister, Heap::kOnePointerFillerMapRootIndex);
4457 movp(Operand(result, 0), kScratchRegister);
4458 addp(result, Immediate(kDoubleSize / 2));
4459 bind(&aligned);
Steve Blocka7e24c12009-10-30 11:49:00 +00004460 }
4461}
4462
4463
4464void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004465 Register scratch,
4466 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01004467 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004468 testp(result_end, Immediate(kObjectAlignmentMask));
4469 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00004470 }
4471
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004472 ExternalReference allocation_top =
4473 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004474
4475 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01004476 if (scratch.is_valid()) {
4477 // Scratch already contains address of allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004478 movp(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004479 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004480 Store(allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004481 }
4482}
4483
4484
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004485void MacroAssembler::Allocate(int object_size,
4486 Register result,
4487 Register result_end,
4488 Register scratch,
4489 Label* gc_required,
4490 AllocationFlags flags) {
4491 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
4492 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07004493 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004494 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004495 // Trash the registers to simulate an allocation failure.
4496 movl(result, Immediate(0x7091));
4497 if (result_end.is_valid()) {
4498 movl(result_end, Immediate(0x7191));
4499 }
4500 if (scratch.is_valid()) {
4501 movl(scratch, Immediate(0x7291));
4502 }
4503 }
4504 jmp(gc_required);
4505 return;
4506 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004507 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00004508
4509 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004510 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004511
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004512 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4513 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
4514 }
4515
Steve Blocka7e24c12009-10-30 11:49:00 +00004516 // Calculate new top and bail out if new space is exhausted.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004517 ExternalReference allocation_limit =
4518 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block6ded16b2010-05-10 14:33:55 +01004519
4520 Register top_reg = result_end.is_valid() ? result_end : result;
4521
Steve Block1e0659c2011-05-24 12:43:12 +01004522 if (!top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004523 movp(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01004524 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004525 addp(top_reg, Immediate(object_size));
Steve Block1e0659c2011-05-24 12:43:12 +01004526 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004527 Operand limit_operand = ExternalOperand(allocation_limit);
4528 cmpp(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004529 j(above, gc_required);
4530
4531 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004532 UpdateAllocationTopHelper(top_reg, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004533
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004534 bool tag_result = (flags & TAG_OBJECT) != 0;
Steve Block6ded16b2010-05-10 14:33:55 +01004535 if (top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004536 if (tag_result) {
4537 subp(result, Immediate(object_size - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01004538 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004539 subp(result, Immediate(object_size));
Steve Block6ded16b2010-05-10 14:33:55 +01004540 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004541 } else if (tag_result) {
Steve Block6ded16b2010-05-10 14:33:55 +01004542 // Tag the result if requested.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004543 DCHECK(kHeapObjectTag == 1);
4544 incp(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004545 }
4546}
4547
4548
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004549void MacroAssembler::Allocate(int header_size,
4550 ScaleFactor element_size,
4551 Register element_count,
4552 Register result,
4553 Register result_end,
4554 Register scratch,
4555 Label* gc_required,
4556 AllocationFlags flags) {
4557 DCHECK((flags & SIZE_IN_WORDS) == 0);
4558 leap(result_end, Operand(element_count, element_size, header_size));
4559 Allocate(result_end, result, result_end, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004560}
4561
4562
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004563void MacroAssembler::Allocate(Register object_size,
4564 Register result,
4565 Register result_end,
4566 Register scratch,
4567 Label* gc_required,
4568 AllocationFlags flags) {
4569 DCHECK((flags & SIZE_IN_WORDS) == 0);
John Reck59135872010-11-02 12:39:01 -07004570 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004571 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004572 // Trash the registers to simulate an allocation failure.
4573 movl(result, Immediate(0x7091));
4574 movl(result_end, Immediate(0x7191));
4575 if (scratch.is_valid()) {
4576 movl(scratch, Immediate(0x7291));
4577 }
4578 // object_size is left unchanged by this function.
4579 }
4580 jmp(gc_required);
4581 return;
4582 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004583 DCHECK(!result.is(result_end));
John Reck59135872010-11-02 12:39:01 -07004584
Steve Blocka7e24c12009-10-30 11:49:00 +00004585 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004586 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004587
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004588 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4589 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004590 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004591
4592 // Calculate new top and bail out if new space is exhausted.
4593 ExternalReference allocation_limit =
4594 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4595 if (!object_size.is(result_end)) {
4596 movp(result_end, object_size);
4597 }
4598 addp(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01004599 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004600 Operand limit_operand = ExternalOperand(allocation_limit);
4601 cmpp(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004602 j(above, gc_required);
4603
4604 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004605 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004606
4607 // Tag the result if requested.
4608 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004609 addp(result, Immediate(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00004610 }
4611}
4612
4613
4614void MacroAssembler::UndoAllocationInNewSpace(Register object) {
4615 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01004616 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00004617
4618 // Make sure the object has no tag before resetting top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004619 andp(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01004620 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00004621#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004622 cmpp(object, top_operand);
4623 Check(below, kUndoAllocationOfNonAllocatedMemory);
Steve Blocka7e24c12009-10-30 11:49:00 +00004624#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004625 movp(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00004626}
4627
4628
Steve Block3ce2e202009-11-05 08:53:23 +00004629void MacroAssembler::AllocateHeapNumber(Register result,
4630 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004631 Label* gc_required,
4632 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00004633 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004634 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
4635
4636 Heap::RootListIndex map_index = mode == MUTABLE
4637 ? Heap::kMutableHeapNumberMapRootIndex
4638 : Heap::kHeapNumberMapRootIndex;
Steve Block3ce2e202009-11-05 08:53:23 +00004639
4640 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004641 LoadRoot(kScratchRegister, map_index);
4642 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00004643}
4644
4645
Leon Clarkee46be812010-01-19 14:06:41 +00004646void MacroAssembler::AllocateTwoByteString(Register result,
4647 Register length,
4648 Register scratch1,
4649 Register scratch2,
4650 Register scratch3,
4651 Label* gc_required) {
4652 // Calculate the number of bytes needed for the characters in the string while
4653 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01004654 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
4655 kObjectAlignmentMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004656 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004657 // scratch1 = length * 2 + kObjectAlignmentMask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004658 leap(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
Steve Block6ded16b2010-05-10 14:33:55 +01004659 kHeaderAlignment));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004660 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01004661 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004662 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01004663 }
Leon Clarkee46be812010-01-19 14:06:41 +00004664
4665 // Allocate two byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004666 Allocate(SeqTwoByteString::kHeaderSize,
4667 times_1,
4668 scratch1,
4669 result,
4670 scratch2,
4671 scratch3,
4672 gc_required,
4673 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004674
4675 // Set the map, length and hash field.
4676 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004677 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01004678 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004679 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4680 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00004681 Immediate(String::kEmptyHashField));
4682}
4683
4684
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004685void MacroAssembler::AllocateOneByteString(Register result, Register length,
4686 Register scratch1, Register scratch2,
4687 Register scratch3,
4688 Label* gc_required) {
Leon Clarkee46be812010-01-19 14:06:41 +00004689 // Calculate the number of bytes needed for the characters in the string while
4690 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004691 const int kHeaderAlignment = SeqOneByteString::kHeaderSize &
Steve Block6ded16b2010-05-10 14:33:55 +01004692 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00004693 movl(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004694 DCHECK(kCharSize == 1);
4695 addp(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
4696 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01004697 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004698 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01004699 }
Leon Clarkee46be812010-01-19 14:06:41 +00004700
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004701 // Allocate one-byte string in new space.
4702 Allocate(SeqOneByteString::kHeaderSize,
4703 times_1,
4704 scratch1,
4705 result,
4706 scratch2,
4707 scratch3,
4708 gc_required,
4709 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004710
4711 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004712 LoadRoot(kScratchRegister, Heap::kOneByteStringMapRootIndex);
4713 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01004714 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004715 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4716 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00004717 Immediate(String::kEmptyHashField));
4718}
4719
4720
Ben Murdoch589d6972011-11-30 16:04:58 +00004721void MacroAssembler::AllocateTwoByteConsString(Register result,
Leon Clarkee46be812010-01-19 14:06:41 +00004722 Register scratch1,
4723 Register scratch2,
4724 Label* gc_required) {
4725 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004726 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
4727 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004728
4729 // Set the map. The other fields are left uninitialized.
4730 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004731 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00004732}
4733
4734
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004735void MacroAssembler::AllocateOneByteConsString(Register result,
4736 Register scratch1,
4737 Register scratch2,
4738 Label* gc_required) {
4739 Allocate(ConsString::kSize,
4740 result,
4741 scratch1,
4742 scratch2,
4743 gc_required,
4744 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004745
4746 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004747 LoadRoot(kScratchRegister, Heap::kConsOneByteStringMapRootIndex);
4748 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00004749}
4750
4751
Ben Murdoch589d6972011-11-30 16:04:58 +00004752void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4753 Register scratch1,
4754 Register scratch2,
4755 Label* gc_required) {
4756 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004757 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4758 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00004759
4760 // Set the map. The other fields are left uninitialized.
4761 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004762 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00004763}
4764
4765
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004766void MacroAssembler::AllocateOneByteSlicedString(Register result,
4767 Register scratch1,
4768 Register scratch2,
4769 Label* gc_required) {
Ben Murdoch589d6972011-11-30 16:04:58 +00004770 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004771 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4772 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00004773
4774 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004775 LoadRoot(kScratchRegister, Heap::kSlicedOneByteStringMapRootIndex);
4776 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00004777}
4778
4779
Steve Block44f0eee2011-05-26 01:26:41 +01004780// Copy memory, byte-by-byte, from source to destination. Not optimized for
4781// long or aligned copies. The contents of scratch and length are destroyed.
4782// Destination is incremented by length, source, length and scratch are
4783// clobbered.
4784// A simpler loop is faster on small copies, but slower on large ones.
4785// The cld() instruction must have been emitted, to set the direction flag(),
4786// before calling this function.
4787void MacroAssembler::CopyBytes(Register destination,
4788 Register source,
4789 Register length,
4790 int min_length,
4791 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004792 DCHECK(min_length >= 0);
4793 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004794 cmpl(length, Immediate(min_length));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004795 Assert(greater_equal, kInvalidMinLength);
Steve Block44f0eee2011-05-26 01:26:41 +01004796 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004797 Label short_loop, len8, len16, len24, done, short_string;
Steve Block44f0eee2011-05-26 01:26:41 +01004798
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004799 const int kLongStringLimit = 4 * kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01004800 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004801 cmpl(length, Immediate(kPointerSize));
4802 j(below, &short_string, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004803 }
4804
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004805 DCHECK(source.is(rsi));
4806 DCHECK(destination.is(rdi));
4807 DCHECK(length.is(rcx));
4808
4809 if (min_length <= kLongStringLimit) {
4810 cmpl(length, Immediate(2 * kPointerSize));
4811 j(below_equal, &len8, Label::kNear);
4812 cmpl(length, Immediate(3 * kPointerSize));
4813 j(below_equal, &len16, Label::kNear);
4814 cmpl(length, Immediate(4 * kPointerSize));
4815 j(below_equal, &len24, Label::kNear);
4816 }
Steve Block44f0eee2011-05-26 01:26:41 +01004817
4818 // Because source is 8-byte aligned in our uses of this function,
4819 // we keep source aligned for the rep movs operation by copying the odd bytes
4820 // at the end of the ranges.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004821 movp(scratch, length);
4822 shrl(length, Immediate(kPointerSizeLog2));
4823 repmovsp();
Steve Block44f0eee2011-05-26 01:26:41 +01004824 // Move remaining bytes of length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004825 andl(scratch, Immediate(kPointerSize - 1));
4826 movp(length, Operand(source, scratch, times_1, -kPointerSize));
4827 movp(Operand(destination, scratch, times_1, -kPointerSize), length);
4828 addp(destination, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01004829
4830 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004831 jmp(&done, Label::kNear);
4832 bind(&len24);
4833 movp(scratch, Operand(source, 2 * kPointerSize));
4834 movp(Operand(destination, 2 * kPointerSize), scratch);
4835 bind(&len16);
4836 movp(scratch, Operand(source, kPointerSize));
4837 movp(Operand(destination, kPointerSize), scratch);
4838 bind(&len8);
4839 movp(scratch, Operand(source, 0));
4840 movp(Operand(destination, 0), scratch);
4841 // Move remaining bytes of length.
4842 movp(scratch, Operand(source, length, times_1, -kPointerSize));
4843 movp(Operand(destination, length, times_1, -kPointerSize), scratch);
4844 addp(destination, length);
4845 jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004846
4847 bind(&short_string);
4848 if (min_length == 0) {
4849 testl(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004850 j(zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004851 }
Steve Block44f0eee2011-05-26 01:26:41 +01004852
4853 bind(&short_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004854 movb(scratch, Operand(source, 0));
4855 movb(Operand(destination, 0), scratch);
4856 incp(source);
4857 incp(destination);
4858 decl(length);
4859 j(not_zero, &short_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01004860 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004861
4862 bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01004863}
4864
4865
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004866void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4867 Register end_offset,
4868 Register filler) {
4869 Label loop, entry;
4870 jmp(&entry);
4871 bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004872 movp(Operand(start_offset, 0), filler);
4873 addp(start_offset, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004874 bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004875 cmpp(start_offset, end_offset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004876 j(less, &loop);
4877}
4878
4879
Steve Blockd0582a62009-12-15 09:54:21 +00004880void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4881 if (context_chain_length > 0) {
4882 // Move up the chain of contexts to the context containing the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004883 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004884 for (int i = 1; i < context_chain_length; i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004885 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004886 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004887 } else {
4888 // Slot is in the current function context. Move it into the
4889 // destination register in case we store into it (the write barrier
4890 // cannot be allowed to destroy the context in rsi).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004891 movp(dst, rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004892 }
4893
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004894 // We should not have found a with context by walking the context
4895 // chain (i.e., the static scope chain and runtime context chain do
4896 // not agree). A variable occurring in such a scope should have
4897 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01004898 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004899 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4900 Heap::kWithContextMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004901 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00004902 }
4903}
4904
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004905
4906void MacroAssembler::LoadTransitionedArrayMapConditional(
4907 ElementsKind expected_kind,
4908 ElementsKind transitioned_kind,
4909 Register map_in_out,
4910 Register scratch,
4911 Label* no_map_match) {
4912 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004913 movp(scratch,
4914 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4915 movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004916
4917 // Check that the function's map is the same as the expected cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004918 movp(scratch, Operand(scratch,
4919 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4920
4921 int offset = expected_kind * kPointerSize +
4922 FixedArrayBase::kHeaderSize;
4923 cmpp(map_in_out, FieldOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004924 j(not_equal, no_map_match);
4925
4926 // Use the transitioned cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004927 offset = transitioned_kind * kPointerSize +
4928 FixedArrayBase::kHeaderSize;
4929 movp(map_in_out, FieldOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004930}
4931
4932
Steve Block44f0eee2011-05-26 01:26:41 +01004933#ifdef _WIN64
4934static const int kRegisterPassedArguments = 4;
4935#else
4936static const int kRegisterPassedArguments = 6;
4937#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004938
Ben Murdochb0fe1622011-05-05 13:52:32 +01004939void MacroAssembler::LoadGlobalFunction(int index, Register function) {
4940 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004941 movp(function,
4942 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4943 // Load the native context from the global or builtins object.
4944 movp(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
4945 // Load the function from the native context.
4946 movp(function, Operand(function, Context::SlotOffset(index)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004947}
4948
4949
4950void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4951 Register map) {
4952 // Load the initial map. The global functions all have initial maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004953 movp(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004954 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004955 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00004956 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004957 jmp(&ok);
4958 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004959 Abort(kGlobalFunctionsMustHaveInitialMap);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004960 bind(&ok);
4961 }
4962}
4963
4964
Leon Clarke4515c472010-02-03 11:58:03 +00004965int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004966 // On Windows 64 stack slots are reserved by the caller for all arguments
4967 // including the ones passed in registers, and space is always allocated for
4968 // the four register arguments even if the function takes fewer than four
4969 // arguments.
4970 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
4971 // and the caller does not reserve stack slots for them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004972 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00004973#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01004974 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004975 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
4976 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004977#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004978 if (num_arguments < kRegisterPassedArguments) return 0;
4979 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004980#endif
Leon Clarke4515c472010-02-03 11:58:03 +00004981}
4982
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004983
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004984void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
4985 Register index,
4986 Register value,
4987 uint32_t encoding_mask) {
4988 Label is_object;
4989 JumpIfNotSmi(string, &is_object);
4990 Abort(kNonObject);
4991 bind(&is_object);
4992
4993 Push(value);
4994 movp(value, FieldOperand(string, HeapObject::kMapOffset));
4995 movzxbp(value, FieldOperand(value, Map::kInstanceTypeOffset));
4996
4997 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
4998 cmpp(value, Immediate(encoding_mask));
4999 Pop(value);
5000 Check(equal, kUnexpectedStringType);
5001
5002 // The index is assumed to be untagged coming in, tag it to compare with the
5003 // string length without using a temp register, it is restored at the end of
5004 // this function.
5005 Integer32ToSmi(index, index);
5006 SmiCompare(index, FieldOperand(string, String::kLengthOffset));
5007 Check(less, kIndexIsTooLarge);
5008
5009 SmiCompare(index, Smi::FromInt(0));
5010 Check(greater_equal, kIndexIsNegative);
5011
5012 // Restore the index
5013 SmiToInteger32(index, index);
5014}
5015
5016
Leon Clarke4515c472010-02-03 11:58:03 +00005017void MacroAssembler::PrepareCallCFunction(int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005018 int frame_alignment = base::OS::ActivationFrameAlignment();
5019 DCHECK(frame_alignment != 0);
5020 DCHECK(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005021
Leon Clarke4515c472010-02-03 11:58:03 +00005022 // Make stack end at alignment and allocate space for arguments and old rsp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005023 movp(kScratchRegister, rsp);
5024 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Leon Clarke4515c472010-02-03 11:58:03 +00005025 int argument_slots_on_stack =
5026 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005027 subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
5028 andp(rsp, Immediate(-frame_alignment));
5029 movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
Leon Clarke4515c472010-02-03 11:58:03 +00005030}
5031
5032
5033void MacroAssembler::CallCFunction(ExternalReference function,
5034 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01005035 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00005036 CallCFunction(rax, num_arguments);
5037}
5038
5039
5040void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005041 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01005042 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01005043 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005044 CheckStackAlignment();
5045 }
5046
Leon Clarke4515c472010-02-03 11:58:03 +00005047 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005048 DCHECK(base::OS::ActivationFrameAlignment() != 0);
5049 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00005050 int argument_slots_on_stack =
5051 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005052 movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
Leon Clarke4515c472010-02-03 11:58:03 +00005053}
5054
Steve Blockd0582a62009-12-15 09:54:21 +00005055
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005056#ifdef DEBUG
5057bool AreAliased(Register reg1,
5058 Register reg2,
5059 Register reg3,
5060 Register reg4,
5061 Register reg5,
5062 Register reg6,
5063 Register reg7,
5064 Register reg8) {
5065 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
5066 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
5067 reg7.is_valid() + reg8.is_valid();
5068
5069 RegList regs = 0;
5070 if (reg1.is_valid()) regs |= reg1.bit();
5071 if (reg2.is_valid()) regs |= reg2.bit();
5072 if (reg3.is_valid()) regs |= reg3.bit();
5073 if (reg4.is_valid()) regs |= reg4.bit();
5074 if (reg5.is_valid()) regs |= reg5.bit();
5075 if (reg6.is_valid()) regs |= reg6.bit();
5076 if (reg7.is_valid()) regs |= reg7.bit();
5077 if (reg8.is_valid()) regs |= reg8.bit();
5078 int n_of_non_aliasing_regs = NumRegs(regs);
5079
5080 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005081}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005082#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005083
5084
Steve Blocka7e24c12009-10-30 11:49:00 +00005085CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01005086 : address_(address),
5087 size_(size),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005088 masm_(NULL, address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005089 // Create a new macro assembler pointing to the address of the code to patch.
5090 // The size is adjusted with kGap on order for the assembler to generate size
5091 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005092 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005093}
5094
5095
5096CodePatcher::~CodePatcher() {
5097 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005098 CpuFeatures::FlushICache(address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005099
5100 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005101 DCHECK(masm_.pc_ == address_ + size_);
5102 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005103}
5104
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005105
5106void MacroAssembler::CheckPageFlag(
5107 Register object,
5108 Register scratch,
5109 int mask,
5110 Condition cc,
5111 Label* condition_met,
5112 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005113 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005114 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005115 andp(scratch, Immediate(~Page::kPageAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005116 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005117 movp(scratch, Immediate(~Page::kPageAlignmentMask));
5118 andp(scratch, object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005119 }
5120 if (mask < (1 << kBitsPerByte)) {
5121 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
5122 Immediate(static_cast<uint8_t>(mask)));
5123 } else {
5124 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
5125 }
5126 j(cc, condition_met, condition_met_distance);
5127}
5128
5129
5130void MacroAssembler::JumpIfBlack(Register object,
5131 Register bitmap_scratch,
5132 Register mask_scratch,
5133 Label* on_black,
5134 Label::Distance on_black_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005135 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005136 GetMarkBits(object, bitmap_scratch, mask_scratch);
5137
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005138 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005139 // The mask_scratch register contains a 1 at the position of the first bit
5140 // and a 0 at all other positions, including the position of the second bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005141 movp(rcx, mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005142 // Make rcx into a mask that covers both marking bits using the operation
5143 // rcx = mask | (mask << 1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005144 leap(rcx, Operand(mask_scratch, mask_scratch, times_2, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005145 // Note that we are using a 4-byte aligned 8-byte load.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005146 andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
5147 cmpp(mask_scratch, rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005148 j(equal, on_black, on_black_distance);
5149}
5150
5151
5152// Detect some, but not all, common pointer-free objects. This is used by the
5153// incremental write barrier which doesn't care about oddballs (they are always
5154// marked black immediately so this code is not hit).
5155void MacroAssembler::JumpIfDataObject(
5156 Register value,
5157 Register scratch,
5158 Label* not_data_object,
5159 Label::Distance not_data_object_distance) {
5160 Label is_data_object;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005161 movp(scratch, FieldOperand(value, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005162 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
5163 j(equal, &is_data_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005164 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
5165 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005166 // If it's a string and it's not a cons string then it's an object containing
5167 // no GC pointers.
5168 testb(FieldOperand(scratch, Map::kInstanceTypeOffset),
5169 Immediate(kIsIndirectStringMask | kIsNotStringMask));
5170 j(not_zero, not_data_object, not_data_object_distance);
5171 bind(&is_data_object);
5172}
5173
5174
5175void MacroAssembler::GetMarkBits(Register addr_reg,
5176 Register bitmap_reg,
5177 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005178 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
5179 movp(bitmap_reg, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005180 // Sign extended 32 bit immediate.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005181 andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
5182 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005183 int shift =
5184 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
5185 shrl(rcx, Immediate(shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005186 andp(rcx,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005187 Immediate((Page::kPageAlignmentMask >> shift) &
5188 ~(Bitmap::kBytesPerCell - 1)));
5189
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005190 addp(bitmap_reg, rcx);
5191 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005192 shrl(rcx, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005193 andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005194 movl(mask_reg, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005195 shlp_cl(mask_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005196}
5197
5198
5199void MacroAssembler::EnsureNotWhite(
5200 Register value,
5201 Register bitmap_scratch,
5202 Register mask_scratch,
5203 Label* value_is_white_and_not_data,
5204 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005205 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005206 GetMarkBits(value, bitmap_scratch, mask_scratch);
5207
5208 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005209 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
5210 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
5211 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
5212 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005213
5214 Label done;
5215
5216 // Since both black and grey have a 1 in the first position and white does
5217 // not have a 1 there we only need to check one bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005218 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005219 j(not_zero, &done, Label::kNear);
5220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005221 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005222 // Check for impossible bit pattern.
5223 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005224 Push(mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005225 // shl. May overflow making the check conservative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005226 addp(mask_scratch, mask_scratch);
5227 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005228 j(zero, &ok, Label::kNear);
5229 int3();
5230 bind(&ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005231 Pop(mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005232 }
5233
5234 // Value is white. We check whether it is data that doesn't need scanning.
5235 // Currently only checks for HeapNumber and non-cons strings.
5236 Register map = rcx; // Holds map while checking type.
5237 Register length = rcx; // Holds length of object after checking type.
5238 Label not_heap_number;
5239 Label is_data_object;
5240
5241 // Check for heap-number
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005242 movp(map, FieldOperand(value, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005243 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
5244 j(not_equal, &not_heap_number, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005245 movp(length, Immediate(HeapNumber::kSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005246 jmp(&is_data_object, Label::kNear);
5247
5248 bind(&not_heap_number);
5249 // Check for strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005250 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
5251 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005252 // If it's a string and it's not a cons string then it's an object containing
5253 // no GC pointers.
5254 Register instance_type = rcx;
5255 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
5256 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask));
5257 j(not_zero, value_is_white_and_not_data);
5258 // It's a non-indirect (non-cons and non-slice) string.
5259 // If it's external, the length is just ExternalString::kSize.
5260 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
5261 Label not_external;
5262 // External strings are the only ones with the kExternalStringTag bit
5263 // set.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005264 DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
5265 DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005266 testb(instance_type, Immediate(kExternalStringTag));
5267 j(zero, &not_external, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005268 movp(length, Immediate(ExternalString::kSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005269 jmp(&is_data_object, Label::kNear);
5270
5271 bind(&not_external);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005272 // Sequential string, either Latin1 or UC16.
5273 DCHECK(kOneByteStringTag == 0x04);
5274 andp(length, Immediate(kStringEncodingMask));
5275 xorp(length, Immediate(kStringEncodingMask));
5276 addp(length, Immediate(0x04));
5277 // Value now either 4 (if Latin1) or 8 (if UC16), i.e. char-size shifted by 2.
5278 imulp(length, FieldOperand(value, String::kLengthOffset));
5279 shrp(length, Immediate(2 + kSmiTagSize + kSmiShiftSize));
5280 addp(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
5281 andp(length, Immediate(~kObjectAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005282
5283 bind(&is_data_object);
5284 // Value is a data object, and it is white. Mark it black. Since we know
5285 // that the object is white we can make it black by flipping one bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005286 orp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005287
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005288 andp(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005289 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
5290
5291 bind(&done);
5292}
5293
5294
5295void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005296 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005297 Register empty_fixed_array_value = r8;
5298 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005299 movp(rcx, rax);
5300
5301 // Check if the enum length field is properly initialized, indicating that
5302 // there is an enum cache.
5303 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
5304
5305 EnumLength(rdx, rbx);
5306 Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
5307 j(equal, call_runtime);
5308
5309 jmp(&start);
5310
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005311 bind(&next);
5312
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005313 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005314
5315 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005316 EnumLength(rdx, rbx);
5317 Cmp(rdx, Smi::FromInt(0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005318 j(not_equal, call_runtime);
5319
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005320 bind(&start);
5321
5322 // Check that there are no elements. Register rcx contains the current JS
5323 // object we've reached through the prototype chain.
5324 Label no_elements;
5325 cmpp(empty_fixed_array_value,
5326 FieldOperand(rcx, JSObject::kElementsOffset));
5327 j(equal, &no_elements);
5328
5329 // Second chance, the object may be using the empty slow element dictionary.
5330 LoadRoot(kScratchRegister, Heap::kEmptySlowElementDictionaryRootIndex);
5331 cmpp(kScratchRegister, FieldOperand(rcx, JSObject::kElementsOffset));
5332 j(not_equal, call_runtime);
5333
5334 bind(&no_elements);
5335 movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
5336 cmpp(rcx, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005337 j(not_equal, &next);
5338}
5339
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005340void MacroAssembler::TestJSArrayForAllocationMemento(
5341 Register receiver_reg,
5342 Register scratch_reg,
5343 Label* no_memento_found) {
5344 ExternalReference new_space_start =
5345 ExternalReference::new_space_start(isolate());
5346 ExternalReference new_space_allocation_top =
5347 ExternalReference::new_space_allocation_top_address(isolate());
5348
5349 leap(scratch_reg, Operand(receiver_reg,
5350 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
5351 Move(kScratchRegister, new_space_start);
5352 cmpp(scratch_reg, kScratchRegister);
5353 j(less, no_memento_found);
5354 cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
5355 j(greater, no_memento_found);
5356 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
5357 Heap::kAllocationMementoMapRootIndex);
5358}
5359
5360
5361void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5362 Register object,
5363 Register scratch0,
5364 Register scratch1,
5365 Label* found) {
5366 DCHECK(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
5367 DCHECK(!scratch1.is(scratch0));
5368 Register current = scratch0;
5369 Label loop_again;
5370
5371 movp(current, object);
5372
5373 // Loop based on the map going up the prototype chain.
5374 bind(&loop_again);
5375 movp(current, FieldOperand(current, HeapObject::kMapOffset));
5376 movp(scratch1, FieldOperand(current, Map::kBitField2Offset));
5377 DecodeField<Map::ElementsKindBits>(scratch1);
5378 cmpp(scratch1, Immediate(DICTIONARY_ELEMENTS));
5379 j(equal, found);
5380 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5381 CompareRoot(current, Heap::kNullValueRootIndex);
5382 j(not_equal, &loop_again);
5383}
5384
5385
5386void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
5387 DCHECK(!dividend.is(rax));
5388 DCHECK(!dividend.is(rdx));
5389 base::MagicNumbersForDivision<uint32_t> mag =
5390 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
5391 movl(rax, Immediate(mag.multiplier));
5392 imull(dividend);
5393 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
5394 if (divisor > 0 && neg) addl(rdx, dividend);
5395 if (divisor < 0 && !neg && mag.multiplier > 0) subl(rdx, dividend);
5396 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift));
5397 movl(rax, dividend);
5398 shrl(rax, Immediate(31));
5399 addl(rdx, rax);
5400}
5401
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005402
Steve Blocka7e24c12009-10-30 11:49:00 +00005403} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01005404
5405#endif // V8_TARGET_ARCH_X64