blob: 503330340297e5b76877856ab7cf9b1528197c84 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
13#include "src/cpu-profiler.h"
14#include "src/debug.h"
15#include "src/heap/heap.h"
16#include "src/isolate-inl.h"
17#include "src/serialize.h"
18#include "src/x64/assembler-x64.h"
19#include "src/x64/macro-assembler-x64.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000020
21namespace v8 {
22namespace internal {
23
Ben Murdoch8b112d22011-06-08 16:22:53 +010024MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
25 : Assembler(arg_isolate, buffer, size),
Steve Block3ce2e202009-11-05 08:53:23 +000026 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010027 has_frame_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010028 root_array_available_(true) {
29 if (isolate() != NULL) {
30 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
31 isolate());
32 }
Steve Block44f0eee2011-05-26 01:26:41 +010033}
34
35
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036static const int64_t kInvalidRootRegisterDelta = -1;
37
38
39int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
40 if (predictable_code_size() &&
41 (other.address() < reinterpret_cast<Address>(isolate()) ||
42 other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
43 return kInvalidRootRegisterDelta;
44 }
Steve Block44f0eee2011-05-26 01:26:41 +010045 Address roots_register_value = kRootRegisterBias +
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
47
48 int64_t delta = kInvalidRootRegisterDelta; // Bogus initialization.
49 if (kPointerSize == kInt64Size) {
50 delta = other.address() - roots_register_value;
51 } else {
52 // For x32, zero extend the address to 64-bit and calculate the delta.
53 uint64_t o = static_cast<uint32_t>(
54 reinterpret_cast<intptr_t>(other.address()));
55 uint64_t r = static_cast<uint32_t>(
56 reinterpret_cast<intptr_t>(roots_register_value));
57 delta = o - r;
58 }
Steve Block44f0eee2011-05-26 01:26:41 +010059 return delta;
60}
61
62
63Operand MacroAssembler::ExternalOperand(ExternalReference target,
64 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065 if (root_array_available_ && !serializer_enabled()) {
66 int64_t delta = RootRegisterDelta(target);
67 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 return Operand(kRootRegister, static_cast<int32_t>(delta));
69 }
70 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 Move(scratch, target);
Steve Block44f0eee2011-05-26 01:26:41 +010072 return Operand(scratch, 0);
73}
74
75
76void MacroAssembler::Load(Register destination, ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 if (root_array_available_ && !serializer_enabled()) {
78 int64_t delta = RootRegisterDelta(source);
79 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
80 movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +010081 return;
82 }
83 }
84 // Safe code.
85 if (destination.is(rax)) {
86 load_rax(source);
87 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 Move(kScratchRegister, source);
89 movp(destination, Operand(kScratchRegister, 0));
Steve Block44f0eee2011-05-26 01:26:41 +010090 }
91}
92
93
94void MacroAssembler::Store(ExternalReference destination, Register source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 if (root_array_available_ && !serializer_enabled()) {
96 int64_t delta = RootRegisterDelta(destination);
97 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
98 movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
Steve Block44f0eee2011-05-26 01:26:41 +010099 return;
100 }
101 }
102 // Safe code.
103 if (source.is(rax)) {
104 store_rax(destination);
105 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 Move(kScratchRegister, destination);
107 movp(Operand(kScratchRegister, 0), source);
Steve Block44f0eee2011-05-26 01:26:41 +0100108 }
109}
110
111
112void MacroAssembler::LoadAddress(Register destination,
113 ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000114 if (root_array_available_ && !serializer_enabled()) {
115 int64_t delta = RootRegisterDelta(source);
116 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
117 leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
Steve Block44f0eee2011-05-26 01:26:41 +0100118 return;
119 }
120 }
121 // Safe code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000122 Move(destination, source);
Steve Block44f0eee2011-05-26 01:26:41 +0100123}
124
125
126int MacroAssembler::LoadAddressSize(ExternalReference source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 if (root_array_available_ && !serializer_enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100128 // This calculation depends on the internals of LoadAddress.
129 // It's correctness is ensured by the asserts in the Call
130 // instruction below.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 int64_t delta = RootRegisterDelta(source);
132 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
133 // Operand is leap(scratch, Operand(kRootRegister, delta));
Steve Block44f0eee2011-05-26 01:26:41 +0100134 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
135 int size = 4;
136 if (!is_int8(static_cast<int32_t>(delta))) {
137 size += 3; // Need full four-byte displacement in lea.
138 }
139 return size;
140 }
141 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 // Size of movp(destination, src);
143 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
144}
145
146
147void MacroAssembler::PushAddress(ExternalReference source) {
148 int64_t address = reinterpret_cast<int64_t>(source.address());
149 if (is_int32(address) && !serializer_enabled()) {
150 if (emit_debug_code()) {
151 Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone());
152 }
153 Push(Immediate(static_cast<int32_t>(address)));
154 return;
155 }
156 LoadAddress(kScratchRegister, source);
157 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000158}
159
160
Steve Block3ce2e202009-11-05 08:53:23 +0000161void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 DCHECK(root_array_available_);
163 movp(destination, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100164 (index << kPointerSizeLog2) - kRootRegisterBias));
165}
166
167
168void MacroAssembler::LoadRootIndexed(Register destination,
169 Register variable_offset,
170 int fixed_offset) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 DCHECK(root_array_available_);
172 movp(destination,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100173 Operand(kRootRegister,
174 variable_offset, times_pointer_size,
175 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000176}
177
178
Kristian Monsen25f61362010-05-21 11:50:48 +0100179void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 DCHECK(root_array_available_);
181 movp(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100182 source);
Kristian Monsen25f61362010-05-21 11:50:48 +0100183}
184
185
Steve Blocka7e24c12009-10-30 11:49:00 +0000186void MacroAssembler::PushRoot(Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 DCHECK(root_array_available_);
188 Push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000189}
190
191
Steve Block3ce2e202009-11-05 08:53:23 +0000192void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 DCHECK(root_array_available_);
194 cmpp(with, Operand(kRootRegister,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100195 (index << kPointerSizeLog2) - kRootRegisterBias));
Steve Blocka7e24c12009-10-30 11:49:00 +0000196}
197
198
Steve Block1e0659c2011-05-24 12:43:12 +0100199void MacroAssembler::CompareRoot(const Operand& with,
200 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000201 DCHECK(root_array_available_);
202 DCHECK(!with.AddressUsesRegister(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +0000203 LoadRoot(kScratchRegister, index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 cmpp(with, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +0000205}
206
207
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100208void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
209 Register addr,
210 Register scratch,
211 SaveFPRegsMode save_fp,
212 RememberedSetFinalAction and_then) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100214 Label ok;
215 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
216 int3();
217 bind(&ok);
Steve Block6ded16b2010-05-10 14:33:55 +0100218 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100219 // Load store buffer top.
220 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
221 // Store pointer to buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 movp(Operand(scratch, 0), addr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 // Increment buffer top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 addp(scratch, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100225 // Write back new top of buffer.
226 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
227 // Call stub on end of buffer.
228 Label done;
229 // Check for end of buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000230 testp(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100231 if (and_then == kReturnAtEnd) {
232 Label buffer_overflowed;
233 j(not_equal, &buffer_overflowed, Label::kNear);
234 ret(0);
235 bind(&buffer_overflowed);
236 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100238 j(equal, &done, Label::kNear);
239 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000240 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100241 CallStub(&store_buffer_overflow);
242 if (and_then == kReturnAtEnd) {
243 ret(0);
244 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100246 bind(&done);
247 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000248}
249
250
Ben Murdoch257744e2011-11-30 15:57:28 +0000251void MacroAssembler::InNewSpace(Register object,
252 Register scratch,
253 Condition cc,
254 Label* branch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100255 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 if (serializer_enabled()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000257 // Can't do arithmetic on external references if it might get serialized.
258 // The mask isn't really an address. We load it as an external reference in
259 // case the size of the new space is different between the snapshot maker
260 // and the running system.
261 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 Move(kScratchRegister, ExternalReference::new_space_mask(isolate()));
263 andp(scratch, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +0000264 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000265 Move(scratch, ExternalReference::new_space_mask(isolate()));
266 andp(scratch, object);
Ben Murdoch257744e2011-11-30 15:57:28 +0000267 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 Move(kScratchRegister, ExternalReference::new_space_start(isolate()));
269 cmpp(scratch, kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100270 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000271 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 DCHECK(kPointerSize == kInt64Size
273 ? is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask()))
274 : kPointerSize == kInt32Size);
Ben Murdoch257744e2011-11-30 15:57:28 +0000275 intptr_t new_space_start =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart());
277 Move(kScratchRegister, reinterpret_cast<Address>(-new_space_start),
278 Assembler::RelocInfoNone());
Ben Murdoch257744e2011-11-30 15:57:28 +0000279 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 addp(scratch, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +0000281 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 leap(scratch, Operand(object, kScratchRegister, times_1, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +0000283 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284 andp(scratch,
285 Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100286 j(cc, branch, distance);
Ben Murdoch257744e2011-11-30 15:57:28 +0000287 }
288}
289
290
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100291void MacroAssembler::RecordWriteField(
292 Register object,
293 int offset,
294 Register value,
295 Register dst,
296 SaveFPRegsMode save_fp,
297 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 SmiCheck smi_check,
299 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100300 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100301 // catch stores of Smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000302 Label done;
303
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100304 // Skip barrier if writing a smi.
305 if (smi_check == INLINE_SMI_CHECK) {
306 JumpIfSmi(value, &done);
307 }
308
309 // Although the object register is tagged, the offset is relative to the start
310 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000311 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100312
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000313 leap(dst, FieldOperand(object, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100314 if (emit_debug_code()) {
315 Label ok;
316 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
317 j(zero, &ok, Label::kNear);
318 int3();
319 bind(&ok);
320 }
321
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 RecordWrite(object, dst, value, save_fp, remembered_set_action,
323 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100324
Steve Block3ce2e202009-11-05 08:53:23 +0000325 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000326
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 // Clobber clobbered input registers when running with the debug-code flag
328 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100329 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 Move(value, kZapValue, Assembler::RelocInfoNone());
331 Move(dst, kZapValue, Assembler::RelocInfoNone());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100332 }
333}
334
335
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000336void MacroAssembler::RecordWriteArray(
337 Register object,
338 Register value,
339 Register index,
340 SaveFPRegsMode save_fp,
341 RememberedSetAction remembered_set_action,
342 SmiCheck smi_check,
343 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100344 // First, check if a write barrier is even needed. The tests below
345 // catch stores of Smis.
346 Label done;
347
348 // Skip barrier if writing a smi.
349 if (smi_check == INLINE_SMI_CHECK) {
350 JumpIfSmi(value, &done);
351 }
352
353 // Array access: calculate the destination address. Index is not a smi.
354 Register dst = index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355 leap(dst, Operand(object, index, times_pointer_size,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100356 FixedArray::kHeaderSize - kHeapObjectTag));
357
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 RecordWrite(object, dst, value, save_fp, remembered_set_action,
359 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100360
361 bind(&done);
362
363 // Clobber clobbered input registers when running with the debug-code flag
364 // turned on to provoke errors.
365 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366 Move(value, kZapValue, Assembler::RelocInfoNone());
367 Move(index, kZapValue, Assembler::RelocInfoNone());
Leon Clarke4515c472010-02-03 11:58:03 +0000368 }
Steve Block3ce2e202009-11-05 08:53:23 +0000369}
370
371
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000372void MacroAssembler::RecordWriteForMap(Register object,
373 Register map,
374 Register dst,
375 SaveFPRegsMode fp_mode) {
376 DCHECK(!object.is(kScratchRegister));
377 DCHECK(!object.is(map));
378 DCHECK(!object.is(dst));
379 DCHECK(!map.is(dst));
380 AssertNotSmi(object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100381
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100382 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 Label ok;
384 if (map.is(kScratchRegister)) pushq(map);
385 CompareMap(map, isolate()->factory()->meta_map());
386 if (map.is(kScratchRegister)) popq(map);
387 j(equal, &ok, Label::kNear);
388 int3();
389 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100390 }
391
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000392 if (!FLAG_incremental_marking) {
393 return;
394 }
395
396 if (emit_debug_code()) {
397 Label ok;
398 if (map.is(kScratchRegister)) pushq(map);
399 cmpp(map, FieldOperand(object, HeapObject::kMapOffset));
400 if (map.is(kScratchRegister)) popq(map);
401 j(equal, &ok, Label::kNear);
402 int3();
403 bind(&ok);
404 }
405
406 // Compute the address.
407 leap(dst, FieldOperand(object, HeapObject::kMapOffset));
408
409 // First, check if a write barrier is even needed. The tests below
410 // catch stores of smis and stores into the young generation.
411 Label done;
412
413 // A single check of the map's pages interesting flag suffices, since it is
414 // only set during incremental collection, and then it's also guaranteed that
415 // the from object's page's interesting flag is also set. This optimization
416 // relies on the fact that maps can never be in new space.
417 CheckPageFlag(map,
418 map, // Used as scratch.
419 MemoryChunk::kPointersToHereAreInterestingMask,
420 zero,
421 &done,
422 Label::kNear);
423
424 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
425 fp_mode);
426 CallStub(&stub);
427
428 bind(&done);
429
430 // Count number of write barriers in generated code.
431 isolate()->counters()->write_barriers_static()->Increment();
432 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
433
434 // Clobber clobbered registers when running with the debug-code flag
435 // turned on to provoke errors.
436 if (emit_debug_code()) {
437 Move(dst, kZapValue, Assembler::RelocInfoNone());
438 Move(map, kZapValue, Assembler::RelocInfoNone());
439 }
440}
441
442
443void MacroAssembler::RecordWrite(
444 Register object,
445 Register address,
446 Register value,
447 SaveFPRegsMode fp_mode,
448 RememberedSetAction remembered_set_action,
449 SmiCheck smi_check,
450 PointersToHereCheck pointers_to_here_check_for_value) {
451 DCHECK(!object.is(value));
452 DCHECK(!object.is(address));
453 DCHECK(!value.is(address));
454 AssertNotSmi(object);
455
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 if (remembered_set_action == OMIT_REMEMBERED_SET &&
457 !FLAG_incremental_marking) {
458 return;
459 }
460
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000461 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100462 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 cmpp(value, Operand(address, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100464 j(equal, &ok, Label::kNear);
465 int3();
466 bind(&ok);
467 }
Steve Block8defd9f2010-07-08 12:39:36 +0100468
469 // First, check if a write barrier is even needed. The tests below
Steve Block44f0eee2011-05-26 01:26:41 +0100470 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100471 Label done;
Steve Block8defd9f2010-07-08 12:39:36 +0100472
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100473 if (smi_check == INLINE_SMI_CHECK) {
474 // Skip barrier if writing a smi.
475 JumpIfSmi(value, &done);
476 }
Steve Block8defd9f2010-07-08 12:39:36 +0100477
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
479 CheckPageFlag(value,
480 value, // Used as scratch.
481 MemoryChunk::kPointersToHereAreInterestingMask,
482 zero,
483 &done,
484 Label::kNear);
485 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100486
487 CheckPageFlag(object,
488 value, // Used as scratch.
489 MemoryChunk::kPointersFromHereAreInterestingMask,
490 zero,
491 &done,
492 Label::kNear);
493
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
495 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100496 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100497
498 bind(&done);
499
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 // Count number of write barriers in generated code.
501 isolate()->counters()->write_barriers_static()->Increment();
502 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
503
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100504 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100505 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100506 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 Move(address, kZapValue, Assembler::RelocInfoNone());
508 Move(value, kZapValue, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +0100509 }
510}
511
512
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
514 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000515}
516
517
Iain Merrick75681382010-08-19 15:07:18 +0100518void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +0100519 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000520 Label ok;
Iain Merrick75681382010-08-19 15:07:18 +0100521 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
522 Heap::kFixedArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000523 j(equal, &ok, Label::kNear);
Iain Merrick75681382010-08-19 15:07:18 +0100524 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000525 Heap::kFixedDoubleArrayMapRootIndex);
526 j(equal, &ok, Label::kNear);
527 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Iain Merrick75681382010-08-19 15:07:18 +0100528 Heap::kFixedCOWArrayMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000529 j(equal, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +0100531 bind(&ok);
532 }
533}
534
535
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000537 Label L;
538 j(cc, &L, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000539 Abort(reason);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100540 // Control will not return here.
Steve Blocka7e24c12009-10-30 11:49:00 +0000541 bind(&L);
542}
543
544
Steve Block6ded16b2010-05-10 14:33:55 +0100545void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +0100547 int frame_alignment_mask = frame_alignment - 1;
548 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000549 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +0000550 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000551 testp(rsp, Immediate(frame_alignment_mask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000552 j(zero, &alignment_as_expected, Label::kNear);
Steve Block6ded16b2010-05-10 14:33:55 +0100553 // Abort if stack is not aligned.
554 int3();
555 bind(&alignment_as_expected);
556 }
557}
558
559
Steve Blocka7e24c12009-10-30 11:49:00 +0000560void MacroAssembler::NegativeZeroTest(Register result,
561 Register op,
562 Label* then_label) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000563 Label ok;
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 testl(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +0000565 j(not_zero, &ok, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566 testl(op, op);
567 j(sign, then_label);
568 bind(&ok);
569}
570
571
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000572void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000573#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575 if (msg != NULL) {
576 RecordComment("Abort message: ");
577 RecordComment(msg);
578 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579
580 if (FLAG_trap_on_abort) {
581 int3();
582 return;
583 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000584#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585
586 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)),
587 Assembler::RelocInfoNone());
588 Push(kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100589
590 if (!has_frame_) {
591 // We don't actually want to generate a pile of code for this, so just
592 // claim there is a stack frame, without generating one.
593 FrameScope scope(this, StackFrame::NONE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100595 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000596 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100597 }
598 // Control will not return here.
Steve Blockd0582a62009-12-15 09:54:21 +0000599 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +0000600}
601
602
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
604 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Ben Murdoch257744e2011-11-30 15:57:28 +0000605 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000606}
607
608
Leon Clarkee46be812010-01-19 14:06:41 +0000609void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000610 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
611}
612
613
Steve Blocka7e24c12009-10-30 11:49:00 +0000614void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +0000616 ret((argc - 1) * kPointerSize);
617}
618
619
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000622}
623
624
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100625void MacroAssembler::IndexFromHash(Register hash, Register index) {
626 // The assert checks that the constants for the maximum number of digits
627 // for an array index cached in the hash field and the number of bits
628 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100630 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631 if (!hash.is(index)) {
632 movl(index, hash);
633 }
634 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Steve Block1e0659c2011-05-24 12:43:12 +0100635}
636
637
Steve Block44f0eee2011-05-26 01:26:41 +0100638void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 int num_arguments,
640 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 // If the expected number of arguments of the runtime function is
642 // constant, we check that the actual number of arguments match the
643 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000645
Leon Clarke4515c472010-02-03 11:58:03 +0000646 // TODO(1236192): Most runtime routines don't need the number of
647 // arguments passed in because it is constant. At some point we
648 // should remove this need and make the runtime routine entry code
649 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100650 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100651 LoadAddress(rbx, ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 CEntryStub ces(isolate(), f->result_size, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +0000653 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +0000654}
655
656
Andrei Popescu402d9372010-02-26 13:31:12 +0000657void MacroAssembler::CallExternalReference(const ExternalReference& ext,
658 int num_arguments) {
Steve Block8defd9f2010-07-08 12:39:36 +0100659 Set(rax, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100660 LoadAddress(rbx, ext);
Andrei Popescu402d9372010-02-26 13:31:12 +0000661
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000662 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +0000663 CallStub(&stub);
664}
665
666
Steve Block6ded16b2010-05-10 14:33:55 +0100667void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
668 int num_arguments,
669 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000670 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671 // -- rsp[0] : return address
672 // -- rsp[8] : argument num_arguments - 1
Steve Blocka7e24c12009-10-30 11:49:00 +0000673 // ...
674 // -- rsp[8 * num_arguments] : argument 0 (receiver)
675 // -----------------------------------
676
677 // TODO(1236192): Most runtime routines don't need the number of
678 // arguments passed in because it is constant. At some point we
679 // should remove this need and make the runtime routine entry code
680 // smarter.
Steve Block8defd9f2010-07-08 12:39:36 +0100681 Set(rax, num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100682 JumpToExternalReference(ext, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000683}
684
685
Steve Block6ded16b2010-05-10 14:33:55 +0100686void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
687 int num_arguments,
688 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100689 TailCallExternalReference(ExternalReference(fid, isolate()),
690 num_arguments,
691 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +0100692}
693
694
Ben Murdochbb769b22010-08-11 14:56:33 +0100695static int Offset(ExternalReference ref0, ExternalReference ref1) {
696 int64_t offset = (ref0.address() - ref1.address());
697 // Check that fits into int.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698 DCHECK(static_cast<int>(offset) == offset);
Ben Murdochbb769b22010-08-11 14:56:33 +0100699 return static_cast<int>(offset);
700}
701
702
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800703void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800704 EnterApiExitFrame(arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +0100705}
706
707
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708void MacroAssembler::CallApiFunctionAndReturn(
709 Register function_address,
710 ExternalReference thunk_ref,
711 Register thunk_last_arg,
712 int stack_space,
713 Operand return_value_operand,
714 Operand* context_restore_operand) {
John Reck59135872010-11-02 12:39:01 -0700715 Label prologue;
716 Label promote_scheduled_exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717 Label exception_handled;
John Reck59135872010-11-02 12:39:01 -0700718 Label delete_allocated_handles;
719 Label leave_exit_frame;
Ben Murdochbb769b22010-08-11 14:56:33 +0100720 Label write_back;
Ben Murdochbb769b22010-08-11 14:56:33 +0100721
Ben Murdoch257744e2011-11-30 15:57:28 +0000722 Factory* factory = isolate()->factory();
John Reck59135872010-11-02 12:39:01 -0700723 ExternalReference next_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724 ExternalReference::handle_scope_next_address(isolate());
John Reck59135872010-11-02 12:39:01 -0700725 const int kNextOffset = 0;
726 const int kLimitOffset = Offset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000727 ExternalReference::handle_scope_limit_address(isolate()),
John Reck59135872010-11-02 12:39:01 -0700728 next_address);
729 const int kLevelOffset = Offset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000730 ExternalReference::handle_scope_level_address(isolate()),
John Reck59135872010-11-02 12:39:01 -0700731 next_address);
732 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +0100733 ExternalReference::scheduled_exception_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100734
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 DCHECK(rdx.is(function_address) || r8.is(function_address));
John Reck59135872010-11-02 12:39:01 -0700736 // Allocate HandleScope in callee-save registers.
737 Register prev_next_address_reg = r14;
738 Register prev_limit_reg = rbx;
Steve Block44f0eee2011-05-26 01:26:41 +0100739 Register base_reg = r15;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740 Move(base_reg, next_address);
741 movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
742 movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
John Reck59135872010-11-02 12:39:01 -0700743 addl(Operand(base_reg, kLevelOffset), Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000744
745 if (FLAG_log_timer_events) {
746 FrameScope frame(this, StackFrame::MANUAL);
747 PushSafepointRegisters();
748 PrepareCallCFunction(1);
749 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
750 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
751 PopSafepointRegisters();
752 }
753
754
755 Label profiler_disabled;
756 Label end_profiler_check;
757 Move(rax, ExternalReference::is_profiling_address(isolate()));
758 cmpb(Operand(rax, 0), Immediate(0));
759 j(zero, &profiler_disabled);
760
761 // Third parameter is the address of the actual getter function.
762 Move(thunk_last_arg, function_address);
763 Move(rax, thunk_ref);
764 jmp(&end_profiler_check);
765
766 bind(&profiler_disabled);
John Reck59135872010-11-02 12:39:01 -0700767 // Call the api function!
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000768 Move(rax, function_address);
769
770 bind(&end_profiler_check);
771
772 // Call the api function!
John Reck59135872010-11-02 12:39:01 -0700773 call(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +0100774
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 if (FLAG_log_timer_events) {
776 FrameScope frame(this, StackFrame::MANUAL);
777 PushSafepointRegisters();
778 PrepareCallCFunction(1);
779 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
780 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
781 PopSafepointRegisters();
782 }
783
784 // Load the value from ReturnValue
785 movp(rax, return_value_operand);
John Reck59135872010-11-02 12:39:01 -0700786 bind(&prologue);
Ben Murdochbb769b22010-08-11 14:56:33 +0100787
John Reck59135872010-11-02 12:39:01 -0700788 // No more valid handles (the result handle was the last one). Restore
789 // previous handle scope.
790 subl(Operand(base_reg, kLevelOffset), Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791 movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
792 cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
John Reck59135872010-11-02 12:39:01 -0700793 j(not_equal, &delete_allocated_handles);
794 bind(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100795
John Reck59135872010-11-02 12:39:01 -0700796 // Check if the function scheduled an exception.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000797 Move(rsi, scheduled_exception_address);
Ben Murdoch257744e2011-11-30 15:57:28 +0000798 Cmp(Operand(rsi, 0), factory->the_hole_value());
John Reck59135872010-11-02 12:39:01 -0700799 j(not_equal, &promote_scheduled_exception);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000800 bind(&exception_handled);
Ben Murdochbb769b22010-08-11 14:56:33 +0100801
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802#if ENABLE_EXTRA_CHECKS
803 // Check if the function returned a valid JavaScript value.
804 Label ok;
805 Register return_value = rax;
806 Register map = rcx;
807
808 JumpIfSmi(return_value, &ok, Label::kNear);
809 movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
810
811 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
812 j(below, &ok, Label::kNear);
813
814 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
815 j(above_equal, &ok, Label::kNear);
816
817 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
818 j(equal, &ok, Label::kNear);
819
820 CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
821 j(equal, &ok, Label::kNear);
822
823 CompareRoot(return_value, Heap::kTrueValueRootIndex);
824 j(equal, &ok, Label::kNear);
825
826 CompareRoot(return_value, Heap::kFalseValueRootIndex);
827 j(equal, &ok, Label::kNear);
828
829 CompareRoot(return_value, Heap::kNullValueRootIndex);
830 j(equal, &ok, Label::kNear);
831
832 Abort(kAPICallReturnedInvalidObject);
833
834 bind(&ok);
835#endif
836
837 bool restore_context = context_restore_operand != NULL;
838 if (restore_context) {
839 movp(rsi, *context_restore_operand);
840 }
841 LeaveApiExitFrame(!restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800842 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -0700843
844 bind(&promote_scheduled_exception);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 {
846 FrameScope frame(this, StackFrame::INTERNAL);
847 CallRuntime(Runtime::kPromoteScheduledException, 0);
848 }
849 jmp(&exception_handled);
John Reck59135872010-11-02 12:39:01 -0700850
851 // HandleScope limit has changed. Delete allocated extensions.
852 bind(&delete_allocated_handles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000853 movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
854 movp(prev_limit_reg, rax);
855 LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
Steve Block44f0eee2011-05-26 01:26:41 +0100856 LoadAddress(rax,
857 ExternalReference::delete_handle_scope_extensions(isolate()));
John Reck59135872010-11-02 12:39:01 -0700858 call(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000859 movp(rax, prev_limit_reg);
John Reck59135872010-11-02 12:39:01 -0700860 jmp(&leave_exit_frame);
Ben Murdochbb769b22010-08-11 14:56:33 +0100861}
862
863
Steve Block6ded16b2010-05-10 14:33:55 +0100864void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
865 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000866 // Set the entry point and jump to the C entry runtime stub.
Steve Block44f0eee2011-05-26 01:26:41 +0100867 LoadAddress(rbx, ext);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 CEntryStub ces(isolate(), result_size);
Steve Block3ce2e202009-11-05 08:53:23 +0000869 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +0000870}
871
872
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100873void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
874 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000875 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100876 // You can't call a builtin without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877 DCHECK(flag == JUMP_FUNCTION || has_frame());
Steve Blocka7e24c12009-10-30 11:49:00 +0000878
Andrei Popescu402d9372010-02-26 13:31:12 +0000879 // Rely on the assertion to check that the number of provided
880 // arguments match the expected number of arguments. Fake a
881 // parameter count to avoid emitting code to do the check.
882 ParameterCount expected(0);
883 GetBuiltinEntry(rdx, id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 InvokeCode(rdx, expected, expected, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +0000885}
886
Andrei Popescu402d9372010-02-26 13:31:12 +0000887
Steve Block791712a2010-08-27 10:21:07 +0100888void MacroAssembler::GetBuiltinFunction(Register target,
889 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +0100890 // Load the builtins object into target register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000891 movp(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
892 movp(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
893 movp(target, FieldOperand(target,
Steve Block791712a2010-08-27 10:21:07 +0100894 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
895}
Steve Block6ded16b2010-05-10 14:33:55 +0100896
Steve Block791712a2010-08-27 10:21:07 +0100897
898void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 DCHECK(!target.is(rdi));
Andrei Popescu402d9372010-02-26 13:31:12 +0000900 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +0100901 GetBuiltinFunction(rdi, id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 movp(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000903}
904
905
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100906#define REG(Name) { kRegister_ ## Name ## _Code }
907
908static const Register saved_regs[] = {
909 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
910 REG(r9), REG(r10), REG(r11)
911};
912
913#undef REG
914
915static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
916
917
918void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
919 Register exclusion1,
920 Register exclusion2,
921 Register exclusion3) {
922 // We don't allow a GC during a store buffer overflow so there is no need to
923 // store the registers in any particular way, but we do have to store and
924 // restore them.
925 for (int i = 0; i < kNumberOfSavedRegs; i++) {
926 Register reg = saved_regs[i];
927 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928 pushq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100929 }
930 }
931 // R12 to r15 are callee save on all platforms.
932 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 subp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
934 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100935 XMMRegister reg = XMMRegister::from_code(i);
936 movsd(Operand(rsp, i * kDoubleSize), reg);
937 }
938 }
939}
940
941
942void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
943 Register exclusion1,
944 Register exclusion2,
945 Register exclusion3) {
946 if (fp_mode == kSaveFPRegs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000947 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100948 XMMRegister reg = XMMRegister::from_code(i);
949 movsd(reg, Operand(rsp, i * kDoubleSize));
950 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000951 addp(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100952 }
953 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
954 Register reg = saved_regs[i];
955 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000956 popq(reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100957 }
958 }
959}
960
961
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000962void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
963 xorps(dst, dst);
964 cvtlsi2sd(dst, src);
965}
966
967
968void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
969 xorps(dst, dst);
970 cvtlsi2sd(dst, src);
971}
972
973
974void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
975 DCHECK(!r.IsDouble());
976 if (r.IsInteger8()) {
977 movsxbq(dst, src);
978 } else if (r.IsUInteger8()) {
979 movzxbl(dst, src);
980 } else if (r.IsInteger16()) {
981 movsxwq(dst, src);
982 } else if (r.IsUInteger16()) {
983 movzxwl(dst, src);
984 } else if (r.IsInteger32()) {
985 movl(dst, src);
986 } else {
987 movp(dst, src);
988 }
989}
990
991
992void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
993 DCHECK(!r.IsDouble());
994 if (r.IsInteger8() || r.IsUInteger8()) {
995 movb(dst, src);
996 } else if (r.IsInteger16() || r.IsUInteger16()) {
997 movw(dst, src);
998 } else if (r.IsInteger32()) {
999 movl(dst, src);
1000 } else {
1001 if (r.IsHeapObject()) {
1002 AssertNotSmi(src);
1003 } else if (r.IsSmi()) {
1004 AssertSmi(src);
1005 }
1006 movp(dst, src);
1007 }
1008}
1009
1010
Steve Blocka7e24c12009-10-30 11:49:00 +00001011void MacroAssembler::Set(Register dst, int64_t x) {
1012 if (x == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001013 xorl(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 } else if (is_uint32(x)) {
Steve Blockd0582a62009-12-15 09:54:21 +00001015 movl(dst, Immediate(static_cast<uint32_t>(x)));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001016 } else if (is_int32(x)) {
1017 movq(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001018 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019 movq(dst, x);
Steve Blocka7e24c12009-10-30 11:49:00 +00001020 }
1021}
1022
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023
1024void MacroAssembler::Set(const Operand& dst, intptr_t x) {
1025 if (kPointerSize == kInt64Size) {
1026 if (is_int32(x)) {
1027 movp(dst, Immediate(static_cast<int32_t>(x)));
1028 } else {
1029 Set(kScratchRegister, x);
1030 movp(dst, kScratchRegister);
1031 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001032 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001033 movp(dst, Immediate(static_cast<int32_t>(x)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001034 }
1035}
1036
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001037
Steve Blocka7e24c12009-10-30 11:49:00 +00001038// ----------------------------------------------------------------------------
1039// Smi tagging, untagging and tag detection.
1040
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041bool MacroAssembler::IsUnsafeInt(const int32_t x) {
1042 static const int kMaxBits = 17;
1043 return !is_intn(x, kMaxBits);
1044}
1045
1046
1047void MacroAssembler::SafeMove(Register dst, Smi* src) {
1048 DCHECK(!dst.is(kScratchRegister));
1049 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1050 if (SmiValuesAre32Bits()) {
1051 // JIT cookie can be converted to Smi.
1052 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1053 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1054 xorp(dst, kScratchRegister);
1055 } else {
1056 DCHECK(SmiValuesAre31Bits());
1057 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1058 movp(dst, Immediate(value ^ jit_cookie()));
1059 xorp(dst, Immediate(jit_cookie()));
1060 }
1061 } else {
1062 Move(dst, src);
1063 }
1064}
1065
1066
1067void MacroAssembler::SafePush(Smi* src) {
1068 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1069 if (SmiValuesAre32Bits()) {
1070 // JIT cookie can be converted to Smi.
1071 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1072 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1073 xorp(Operand(rsp, 0), kScratchRegister);
1074 } else {
1075 DCHECK(SmiValuesAre31Bits());
1076 int32_t value = static_cast<int32_t>(reinterpret_cast<intptr_t>(src));
1077 Push(Immediate(value ^ jit_cookie()));
1078 xorp(Operand(rsp, 0), Immediate(jit_cookie()));
1079 }
1080 } else {
1081 Push(src);
1082 }
1083}
1084
1085
Steve Block8defd9f2010-07-08 12:39:36 +01001086Register MacroAssembler::GetSmiConstant(Smi* source) {
1087 int value = source->value();
1088 if (value == 0) {
1089 xorl(kScratchRegister, kScratchRegister);
1090 return kScratchRegister;
1091 }
1092 if (value == 1) {
1093 return kSmiConstantRegister;
1094 }
1095 LoadSmiConstant(kScratchRegister, source);
1096 return kScratchRegister;
1097}
1098
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001099
Steve Block8defd9f2010-07-08 12:39:36 +01001100void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
Steve Block44f0eee2011-05-26 01:26:41 +01001101 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 Move(dst, Smi::FromInt(kSmiConstantRegisterValue),
1103 Assembler::RelocInfoNone());
1104 cmpp(dst, kSmiConstantRegister);
1105 Assert(equal, kUninitializedKSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001106 }
Steve Block44f0eee2011-05-26 01:26:41 +01001107 int value = source->value();
1108 if (value == 0) {
Steve Block8defd9f2010-07-08 12:39:36 +01001109 xorl(dst, dst);
1110 return;
1111 }
Steve Block8defd9f2010-07-08 12:39:36 +01001112 bool negative = value < 0;
1113 unsigned int uvalue = negative ? -value : value;
1114
1115 switch (uvalue) {
1116 case 9:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001117 leap(dst,
1118 Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001119 break;
1120 case 8:
1121 xorl(dst, dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001122 leap(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001123 break;
1124 case 4:
1125 xorl(dst, dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001126 leap(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001127 break;
1128 case 5:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129 leap(dst,
1130 Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001131 break;
1132 case 3:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001133 leap(dst,
1134 Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001135 break;
1136 case 2:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137 leap(dst,
1138 Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001139 break;
1140 case 1:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001141 movp(dst, kSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001142 break;
1143 case 0:
1144 UNREACHABLE();
1145 return;
1146 default:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001147 Move(dst, source, Assembler::RelocInfoNone());
Steve Block8defd9f2010-07-08 12:39:36 +01001148 return;
1149 }
1150 if (negative) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151 negp(dst);
Steve Block8defd9f2010-07-08 12:39:36 +01001152 }
1153}
1154
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001155
Steve Blocka7e24c12009-10-30 11:49:00 +00001156void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001157 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001158 if (!dst.is(src)) {
1159 movl(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001160 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001161 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001162}
1163
1164
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001165void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001166 if (emit_debug_code()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001167 testb(dst, Immediate(0x01));
Ben Murdoch257744e2011-11-30 15:57:28 +00001168 Label ok;
1169 j(zero, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001170 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001171 bind(&ok);
1172 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173
1174 if (SmiValuesAre32Bits()) {
1175 DCHECK(kSmiShift % kBitsPerByte == 0);
1176 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1177 } else {
1178 DCHECK(SmiValuesAre31Bits());
1179 Integer32ToSmi(kScratchRegister, src);
1180 movp(dst, kScratchRegister);
1181 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001182}
1183
1184
Steve Block3ce2e202009-11-05 08:53:23 +00001185void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1186 Register src,
1187 int constant) {
1188 if (dst.is(src)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001189 addl(dst, Immediate(constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001190 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001191 leal(dst, Operand(src, constant));
Steve Block3ce2e202009-11-05 08:53:23 +00001192 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001193 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001194}
1195
1196
1197void MacroAssembler::SmiToInteger32(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001198 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001199 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001200 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001201 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001202
1203 if (SmiValuesAre32Bits()) {
1204 shrp(dst, Immediate(kSmiShift));
1205 } else {
1206 DCHECK(SmiValuesAre31Bits());
1207 sarl(dst, Immediate(kSmiShift));
1208 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001209}
1210
1211
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001212void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001213 if (SmiValuesAre32Bits()) {
1214 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1215 } else {
1216 DCHECK(SmiValuesAre31Bits());
1217 movl(dst, src);
1218 sarl(dst, Immediate(kSmiShift));
1219 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001220}
1221
1222
Steve Blocka7e24c12009-10-30 11:49:00 +00001223void MacroAssembler::SmiToInteger64(Register dst, Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001224 STATIC_ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00001225 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001226 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001227 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228 sarp(dst, Immediate(kSmiShift));
1229 if (kPointerSize == kInt32Size) {
1230 // Sign extend to 64-bit.
1231 movsxlq(dst, dst);
1232 }
Steve Block3ce2e202009-11-05 08:53:23 +00001233}
1234
1235
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001236void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001237 if (SmiValuesAre32Bits()) {
1238 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1239 } else {
1240 DCHECK(SmiValuesAre31Bits());
1241 movp(dst, src);
1242 SmiToInteger64(dst, dst);
1243 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001244}
1245
1246
Steve Block3ce2e202009-11-05 08:53:23 +00001247void MacroAssembler::SmiTest(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001248 AssertSmi(src);
1249 testp(src, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001250}
1251
1252
Steve Block44f0eee2011-05-26 01:26:41 +01001253void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001254 AssertSmi(smi1);
1255 AssertSmi(smi2);
1256 cmpp(smi1, smi2);
Steve Block3ce2e202009-11-05 08:53:23 +00001257}
1258
1259
1260void MacroAssembler::SmiCompare(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001261 AssertSmi(dst);
Steve Block44f0eee2011-05-26 01:26:41 +01001262 Cmp(dst, src);
1263}
1264
1265
1266void MacroAssembler::Cmp(Register dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001267 DCHECK(!dst.is(kScratchRegister));
Steve Block3ce2e202009-11-05 08:53:23 +00001268 if (src->value() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269 testp(dst, dst);
Steve Block3ce2e202009-11-05 08:53:23 +00001270 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001271 Register constant_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001272 cmpp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001273 }
1274}
1275
1276
Leon Clarkef7060e22010-06-03 12:02:55 +01001277void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278 AssertSmi(dst);
1279 AssertSmi(src);
1280 cmpp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01001281}
1282
1283
Steve Block3ce2e202009-11-05 08:53:23 +00001284void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 AssertSmi(dst);
1286 AssertSmi(src);
1287 cmpp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001288}
1289
1290
1291void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001292 AssertSmi(dst);
1293 if (SmiValuesAre32Bits()) {
1294 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1295 } else {
1296 DCHECK(SmiValuesAre31Bits());
1297 cmpl(dst, Immediate(src));
Steve Block44f0eee2011-05-26 01:26:41 +01001298 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001299}
1300
1301
Steve Block44f0eee2011-05-26 01:26:41 +01001302void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1303 // The Operand cannot use the smi register.
1304 Register smi_reg = GetSmiConstant(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001305 DCHECK(!dst.AddressUsesRegister(smi_reg));
1306 cmpp(dst, smi_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001307}
1308
1309
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001310void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311 if (SmiValuesAre32Bits()) {
1312 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1313 } else {
1314 DCHECK(SmiValuesAre31Bits());
1315 SmiToInteger32(kScratchRegister, dst);
1316 cmpl(kScratchRegister, src);
1317 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001318}
1319
1320
Steve Blocka7e24c12009-10-30 11:49:00 +00001321void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1322 Register src,
1323 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324 DCHECK(power >= 0);
1325 DCHECK(power < 64);
Steve Blocka7e24c12009-10-30 11:49:00 +00001326 if (power == 0) {
1327 SmiToInteger64(dst, src);
1328 return;
1329 }
Steve Block3ce2e202009-11-05 08:53:23 +00001330 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001331 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001332 }
1333 if (power < kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001334 sarp(dst, Immediate(kSmiShift - power));
Steve Block3ce2e202009-11-05 08:53:23 +00001335 } else if (power > kSmiShift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001336 shlp(dst, Immediate(power - kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00001337 }
1338}
1339
1340
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001341void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1342 Register src,
1343 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001344 DCHECK((0 <= power) && (power < 32));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001345 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 shrp(dst, Immediate(power + kSmiShift));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001347 } else {
1348 UNIMPLEMENTED(); // Not used.
1349 }
1350}
1351
1352
Ben Murdoch257744e2011-11-30 15:57:28 +00001353void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1354 Label* on_not_smis,
1355 Label::Distance near_jump) {
1356 if (dst.is(src1) || dst.is(src2)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 DCHECK(!src1.is(kScratchRegister));
1358 DCHECK(!src2.is(kScratchRegister));
1359 movp(kScratchRegister, src1);
1360 orp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001361 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362 movp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001363 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001364 movp(dst, src1);
1365 orp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001366 JumpIfNotSmi(dst, on_not_smis, near_jump);
1367 }
1368}
1369
1370
Steve Blocka7e24c12009-10-30 11:49:00 +00001371Condition MacroAssembler::CheckSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001372 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001373 testb(src, Immediate(kSmiTagMask));
Steve Block3ce2e202009-11-05 08:53:23 +00001374 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001375}
1376
1377
Steve Block1e0659c2011-05-24 12:43:12 +01001378Condition MacroAssembler::CheckSmi(const Operand& src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001379 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01001380 testb(src, Immediate(kSmiTagMask));
1381 return zero;
1382}
1383
1384
Ben Murdochf87a2032010-10-22 12:50:53 +01001385Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001386 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001387 // Test that both bits of the mask 0x8000000000000001 are zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388 movp(kScratchRegister, src);
1389 rolp(kScratchRegister, Immediate(1));
Steve Block8defd9f2010-07-08 12:39:36 +01001390 testb(kScratchRegister, Immediate(3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001391 return zero;
1392}
1393
1394
Steve Blocka7e24c12009-10-30 11:49:00 +00001395Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1396 if (first.is(second)) {
1397 return CheckSmi(first);
1398 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001399 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001400 if (SmiValuesAre32Bits()) {
1401 leal(kScratchRegister, Operand(first, second, times_1, 0));
1402 testb(kScratchRegister, Immediate(0x03));
1403 } else {
1404 DCHECK(SmiValuesAre31Bits());
1405 movl(kScratchRegister, first);
1406 orl(kScratchRegister, second);
1407 testb(kScratchRegister, Immediate(kSmiTagMask));
1408 }
Steve Block3ce2e202009-11-05 08:53:23 +00001409 return zero;
Steve Blocka7e24c12009-10-30 11:49:00 +00001410}
1411
1412
Ben Murdochf87a2032010-10-22 12:50:53 +01001413Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1414 Register second) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001415 if (first.is(second)) {
Ben Murdochf87a2032010-10-22 12:50:53 +01001416 return CheckNonNegativeSmi(first);
Leon Clarked91b9f72010-01-27 17:25:45 +00001417 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001418 movp(kScratchRegister, first);
1419 orp(kScratchRegister, second);
1420 rolp(kScratchRegister, Immediate(1));
Ben Murdochf87a2032010-10-22 12:50:53 +01001421 testl(kScratchRegister, Immediate(3));
Leon Clarked91b9f72010-01-27 17:25:45 +00001422 return zero;
1423}
1424
1425
Ben Murdochbb769b22010-08-11 14:56:33 +01001426Condition MacroAssembler::CheckEitherSmi(Register first,
1427 Register second,
1428 Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +00001429 if (first.is(second)) {
1430 return CheckSmi(first);
1431 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001432 if (scratch.is(second)) {
1433 andl(scratch, first);
1434 } else {
1435 if (!scratch.is(first)) {
1436 movl(scratch, first);
1437 }
1438 andl(scratch, second);
1439 }
1440 testb(scratch, Immediate(kSmiTagMask));
Leon Clarkee46be812010-01-19 14:06:41 +00001441 return zero;
1442}
1443
1444
Steve Blocka7e24c12009-10-30 11:49:00 +00001445Condition MacroAssembler::CheckIsMinSmi(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001446 DCHECK(!src.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001447 // If we overflow by subtracting one, it's the minimal smi value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001448 cmpp(src, kSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001449 return overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00001450}
1451
Steve Blocka7e24c12009-10-30 11:49:00 +00001452
1453Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001454 if (SmiValuesAre32Bits()) {
1455 // A 32-bit integer value can always be converted to a smi.
1456 return always;
1457 } else {
1458 DCHECK(SmiValuesAre31Bits());
1459 cmpl(src, Immediate(0xc0000000));
1460 return positive;
1461 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001462}
1463
1464
Steve Block3ce2e202009-11-05 08:53:23 +00001465Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001466 if (SmiValuesAre32Bits()) {
1467 // An unsigned 32-bit integer value is valid as long as the high bit
1468 // is not set.
1469 testl(src, src);
1470 return positive;
1471 } else {
1472 DCHECK(SmiValuesAre31Bits());
1473 testl(src, Immediate(0xc0000000));
1474 return zero;
1475 }
Steve Block3ce2e202009-11-05 08:53:23 +00001476}
1477
1478
Steve Block1e0659c2011-05-24 12:43:12 +01001479void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1480 if (dst.is(src)) {
1481 andl(dst, Immediate(kSmiTagMask));
1482 } else {
1483 movl(dst, Immediate(kSmiTagMask));
1484 andl(dst, src);
1485 }
1486}
1487
1488
1489void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1490 if (!(src.AddressUsesRegister(dst))) {
1491 movl(dst, Immediate(kSmiTagMask));
1492 andl(dst, src);
1493 } else {
1494 movl(dst, src);
1495 andl(dst, Immediate(kSmiTagMask));
1496 }
1497}
1498
1499
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001500void MacroAssembler::JumpIfValidSmiValue(Register src,
1501 Label* on_valid,
1502 Label::Distance near_jump) {
1503 Condition is_valid = CheckInteger32ValidSmiValue(src);
1504 j(is_valid, on_valid, near_jump);
1505}
1506
1507
Ben Murdoch257744e2011-11-30 15:57:28 +00001508void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1509 Label* on_invalid,
1510 Label::Distance near_jump) {
1511 Condition is_valid = CheckInteger32ValidSmiValue(src);
1512 j(NegateCondition(is_valid), on_invalid, near_jump);
1513}
1514
1515
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001516void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1517 Label* on_valid,
1518 Label::Distance near_jump) {
1519 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1520 j(is_valid, on_valid, near_jump);
1521}
1522
1523
Ben Murdoch257744e2011-11-30 15:57:28 +00001524void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1525 Label* on_invalid,
1526 Label::Distance near_jump) {
1527 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1528 j(NegateCondition(is_valid), on_invalid, near_jump);
1529}
1530
1531
1532void MacroAssembler::JumpIfSmi(Register src,
1533 Label* on_smi,
1534 Label::Distance near_jump) {
1535 Condition smi = CheckSmi(src);
1536 j(smi, on_smi, near_jump);
1537}
1538
1539
1540void MacroAssembler::JumpIfNotSmi(Register src,
1541 Label* on_not_smi,
1542 Label::Distance near_jump) {
1543 Condition smi = CheckSmi(src);
1544 j(NegateCondition(smi), on_not_smi, near_jump);
1545}
1546
1547
1548void MacroAssembler::JumpUnlessNonNegativeSmi(
1549 Register src, Label* on_not_smi_or_negative,
1550 Label::Distance near_jump) {
1551 Condition non_negative_smi = CheckNonNegativeSmi(src);
1552 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1553}
1554
1555
1556void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1557 Smi* constant,
1558 Label* on_equals,
1559 Label::Distance near_jump) {
1560 SmiCompare(src, constant);
1561 j(equal, on_equals, near_jump);
1562}
1563
1564
1565void MacroAssembler::JumpIfNotBothSmi(Register src1,
1566 Register src2,
1567 Label* on_not_both_smi,
1568 Label::Distance near_jump) {
1569 Condition both_smi = CheckBothSmi(src1, src2);
1570 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1571}
1572
1573
1574void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1575 Register src2,
1576 Label* on_not_both_smi,
1577 Label::Distance near_jump) {
1578 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1579 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1580}
1581
1582
Steve Block3ce2e202009-11-05 08:53:23 +00001583void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1584 if (constant->value() == 0) {
1585 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001586 movp(dst, src);
Steve Block3ce2e202009-11-05 08:53:23 +00001587 }
Steve Block8defd9f2010-07-08 12:39:36 +01001588 return;
Steve Block3ce2e202009-11-05 08:53:23 +00001589 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001590 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001591 switch (constant->value()) {
1592 case 1:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001593 addp(dst, kSmiConstantRegister);
Steve Block8defd9f2010-07-08 12:39:36 +01001594 return;
1595 case 2:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001596 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001597 return;
1598 case 4:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001599 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001600 return;
1601 case 8:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001602 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001603 return;
1604 default:
1605 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001606 addp(dst, constant_reg);
Steve Block8defd9f2010-07-08 12:39:36 +01001607 return;
1608 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001609 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01001610 switch (constant->value()) {
1611 case 1:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612 leap(dst, Operand(src, kSmiConstantRegister, times_1, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001613 return;
1614 case 2:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001615 leap(dst, Operand(src, kSmiConstantRegister, times_2, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001616 return;
1617 case 4:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 leap(dst, Operand(src, kSmiConstantRegister, times_4, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001619 return;
1620 case 8:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001621 leap(dst, Operand(src, kSmiConstantRegister, times_8, 0));
Steve Block8defd9f2010-07-08 12:39:36 +01001622 return;
1623 default:
1624 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001625 addp(dst, src);
Steve Block8defd9f2010-07-08 12:39:36 +01001626 return;
1627 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001628 }
1629}
1630
1631
Leon Clarkef7060e22010-06-03 12:02:55 +01001632void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1633 if (constant->value() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634 if (SmiValuesAre32Bits()) {
1635 addl(Operand(dst, kSmiShift / kBitsPerByte),
1636 Immediate(constant->value()));
1637 } else {
1638 DCHECK(SmiValuesAre31Bits());
1639 addp(dst, Immediate(constant));
1640 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001641 }
1642}
1643
1644
Ben Murdoch257744e2011-11-30 15:57:28 +00001645void MacroAssembler::SmiAddConstant(Register dst,
1646 Register src,
1647 Smi* constant,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 SmiOperationExecutionMode mode,
1649 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001650 Label::Distance near_jump) {
1651 if (constant->value() == 0) {
1652 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001654 }
1655 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001656 DCHECK(!dst.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001657 LoadSmiConstant(kScratchRegister, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001658 addp(dst, kScratchRegister);
1659 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1660 j(no_overflow, bailout_label, near_jump);
1661 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1662 subp(dst, kScratchRegister);
1663 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
1664 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) {
1665 Label done;
1666 j(no_overflow, &done, Label::kNear);
1667 subp(dst, kScratchRegister);
1668 jmp(bailout_label, near_jump);
1669 bind(&done);
1670 } else {
1671 // Bailout if overflow without reserving src.
1672 j(overflow, bailout_label, near_jump);
1673 }
1674 } else {
1675 CHECK(mode.IsEmpty());
1676 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001677 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001678 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1679 DCHECK(mode.Contains(BAILOUT_ON_OVERFLOW));
Ben Murdoch257744e2011-11-30 15:57:28 +00001680 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001681 addp(dst, src);
1682 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001683 }
1684}
1685
1686
Steve Block3ce2e202009-11-05 08:53:23 +00001687void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1688 if (constant->value() == 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001689 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690 movp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001691 }
Steve Block3ce2e202009-11-05 08:53:23 +00001692 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01001694 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001695 subp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00001696 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00001697 if (constant->value() == Smi::kMinValue) {
Steve Block8defd9f2010-07-08 12:39:36 +01001698 LoadSmiConstant(dst, constant);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001699 // Adding and subtracting the min-value gives the same result, it only
1700 // differs on the overflow bit, which we don't check here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001701 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001702 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001703 // Subtract by adding the negation.
Steve Block8defd9f2010-07-08 12:39:36 +01001704 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001705 addp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00001706 }
1707 }
1708}
1709
1710
Ben Murdoch257744e2011-11-30 15:57:28 +00001711void MacroAssembler::SmiSubConstant(Register dst,
1712 Register src,
1713 Smi* constant,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001714 SmiOperationExecutionMode mode,
1715 Label* bailout_label,
Ben Murdoch257744e2011-11-30 15:57:28 +00001716 Label::Distance near_jump) {
1717 if (constant->value() == 0) {
1718 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001719 movp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001720 }
1721 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 DCHECK(!dst.is(kScratchRegister));
1723 LoadSmiConstant(kScratchRegister, constant);
1724 subp(dst, kScratchRegister);
1725 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) {
1726 j(no_overflow, bailout_label, near_jump);
1727 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1728 addp(dst, kScratchRegister);
1729 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) {
1730 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) {
1731 Label done;
1732 j(no_overflow, &done, Label::kNear);
1733 addp(dst, kScratchRegister);
1734 jmp(bailout_label, near_jump);
1735 bind(&done);
1736 } else {
1737 // Bailout if overflow without reserving src.
1738 j(overflow, bailout_label, near_jump);
1739 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001740 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001741 CHECK(mode.IsEmpty());
Ben Murdoch257744e2011-11-30 15:57:28 +00001742 }
1743 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001744 DCHECK(mode.Contains(PRESERVE_SOURCE_REGISTER));
1745 DCHECK(mode.Contains(BAILOUT_ON_OVERFLOW));
Ben Murdoch257744e2011-11-30 15:57:28 +00001746 if (constant->value() == Smi::kMinValue) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001747 DCHECK(!dst.is(kScratchRegister));
1748 movp(dst, src);
1749 LoadSmiConstant(kScratchRegister, constant);
1750 subp(dst, kScratchRegister);
1751 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001752 } else {
1753 // Subtract by adding the negation.
1754 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 addp(dst, src);
1756 j(overflow, bailout_label, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001757 }
1758 }
1759}
1760
1761
1762void MacroAssembler::SmiNeg(Register dst,
1763 Register src,
1764 Label* on_smi_result,
1765 Label::Distance near_jump) {
1766 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767 DCHECK(!dst.is(kScratchRegister));
1768 movp(kScratchRegister, src);
1769 negp(dst); // Low 32 bits are retained as zero by negation.
Ben Murdoch257744e2011-11-30 15:57:28 +00001770 // Test if result is zero or Smi::kMinValue.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 cmpp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001772 j(not_equal, on_smi_result, near_jump);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001773 movp(src, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001774 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001775 movp(dst, src);
1776 negp(dst);
1777 cmpp(dst, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00001778 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1779 j(not_equal, on_smi_result, near_jump);
1780 }
1781}
1782
1783
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784template<class T>
1785static void SmiAddHelper(MacroAssembler* masm,
1786 Register dst,
1787 Register src1,
1788 T src2,
1789 Label* on_not_smi_result,
1790 Label::Distance near_jump) {
1791 if (dst.is(src1)) {
1792 Label done;
1793 masm->addp(dst, src2);
1794 masm->j(no_overflow, &done, Label::kNear);
1795 // Restore src1.
1796 masm->subp(dst, src2);
1797 masm->jmp(on_not_smi_result, near_jump);
1798 masm->bind(&done);
1799 } else {
1800 masm->movp(dst, src1);
1801 masm->addp(dst, src2);
1802 masm->j(overflow, on_not_smi_result, near_jump);
1803 }
1804}
1805
1806
Ben Murdoch257744e2011-11-30 15:57:28 +00001807void MacroAssembler::SmiAdd(Register dst,
1808 Register src1,
1809 Register src2,
1810 Label* on_not_smi_result,
1811 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 DCHECK_NOT_NULL(on_not_smi_result);
1813 DCHECK(!dst.is(src2));
1814 SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001815}
1816
1817
1818void MacroAssembler::SmiAdd(Register dst,
1819 Register src1,
1820 const Operand& src2,
1821 Label* on_not_smi_result,
1822 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001823 DCHECK_NOT_NULL(on_not_smi_result);
1824 DCHECK(!src2.AddressUsesRegister(dst));
1825 SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
Ben Murdoch257744e2011-11-30 15:57:28 +00001826}
1827
1828
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001829void MacroAssembler::SmiAdd(Register dst,
1830 Register src1,
1831 Register src2) {
1832 // No overflow checking. Use only when it's known that
1833 // overflowing is impossible.
Steve Block44f0eee2011-05-26 01:26:41 +01001834 if (!dst.is(src1)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001835 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 movp(kScratchRegister, src1);
1837 addp(kScratchRegister, src2);
1838 Check(no_overflow, kSmiAdditionOverflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001839 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001840 leap(dst, Operand(src1, src2, times_1, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001841 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001842 addp(dst, src2);
1843 Assert(no_overflow, kSmiAdditionOverflow);
1844 }
1845}
1846
1847
1848template<class T>
1849static void SmiSubHelper(MacroAssembler* masm,
1850 Register dst,
1851 Register src1,
1852 T src2,
1853 Label* on_not_smi_result,
1854 Label::Distance near_jump) {
1855 if (dst.is(src1)) {
1856 Label done;
1857 masm->subp(dst, src2);
1858 masm->j(no_overflow, &done, Label::kNear);
1859 // Restore src1.
1860 masm->addp(dst, src2);
1861 masm->jmp(on_not_smi_result, near_jump);
1862 masm->bind(&done);
1863 } else {
1864 masm->movp(dst, src1);
1865 masm->subp(dst, src2);
1866 masm->j(overflow, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001867 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001868}
1869
1870
1871void MacroAssembler::SmiSub(Register dst,
1872 Register src1,
1873 Register src2,
1874 Label* on_not_smi_result,
1875 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001876 DCHECK_NOT_NULL(on_not_smi_result);
1877 DCHECK(!dst.is(src2));
1878 SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
Steve Blocka7e24c12009-10-30 11:49:00 +00001879}
1880
1881
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001882void MacroAssembler::SmiSub(Register dst,
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 Register src1,
Ben Murdoch257744e2011-11-30 15:57:28 +00001884 const Operand& src2,
1885 Label* on_not_smi_result,
1886 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 DCHECK_NOT_NULL(on_not_smi_result);
1888 DCHECK(!src2.AddressUsesRegister(dst));
1889 SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1890}
1891
1892
1893template<class T>
1894static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1895 Register dst,
1896 Register src1,
1897 T src2) {
1898 // No overflow checking. Use only when it's known that
1899 // overflowing is impossible (e.g., subtracting two positive smis).
1900 if (!dst.is(src1)) {
1901 masm->movp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001902 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001903 masm->subp(dst, src2);
1904 masm->Assert(no_overflow, kSmiSubtractionOverflow);
1905}
1906
1907
1908void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1909 DCHECK(!dst.is(src2));
1910 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001911}
1912
1913
1914void MacroAssembler::SmiSub(Register dst,
1915 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001916 const Operand& src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917 SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001918}
1919
1920
Ben Murdoch257744e2011-11-30 15:57:28 +00001921void MacroAssembler::SmiMul(Register dst,
1922 Register src1,
1923 Register src2,
1924 Label* on_not_smi_result,
1925 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001926 DCHECK(!dst.is(src2));
1927 DCHECK(!dst.is(kScratchRegister));
1928 DCHECK(!src1.is(kScratchRegister));
1929 DCHECK(!src2.is(kScratchRegister));
Ben Murdoch257744e2011-11-30 15:57:28 +00001930
1931 if (dst.is(src1)) {
1932 Label failure, zero_correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001933 movp(kScratchRegister, src1); // Create backup for later testing.
Ben Murdoch257744e2011-11-30 15:57:28 +00001934 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001935 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001936 j(overflow, &failure, Label::kNear);
1937
1938 // Check for negative zero result. If product is zero, and one
1939 // argument is negative, go to slow case.
1940 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001941 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001942 j(not_zero, &correct_result, Label::kNear);
1943
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001944 movp(dst, kScratchRegister);
1945 xorp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001946 // Result was positive zero.
1947 j(positive, &zero_correct_result, Label::kNear);
1948
1949 bind(&failure); // Reused failure exit, restores src1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001950 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001951 jmp(on_not_smi_result, near_jump);
1952
1953 bind(&zero_correct_result);
1954 Set(dst, 0);
1955
1956 bind(&correct_result);
1957 } else {
1958 SmiToInteger64(dst, src1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 imulp(dst, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001960 j(overflow, on_not_smi_result, near_jump);
1961 // Check for negative zero result. If product is zero, and one
1962 // argument is negative, go to slow case.
1963 Label correct_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001964 testp(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001965 j(not_zero, &correct_result, Label::kNear);
1966 // One of src1 and src2 is zero, the check whether the other is
1967 // negative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968 movp(kScratchRegister, src1);
1969 xorp(kScratchRegister, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001970 j(negative, on_not_smi_result, near_jump);
1971 bind(&correct_result);
1972 }
1973}
1974
1975
1976void MacroAssembler::SmiDiv(Register dst,
1977 Register src1,
1978 Register src2,
1979 Label* on_not_smi_result,
1980 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001981 DCHECK(!src1.is(kScratchRegister));
1982 DCHECK(!src2.is(kScratchRegister));
1983 DCHECK(!dst.is(kScratchRegister));
1984 DCHECK(!src2.is(rax));
1985 DCHECK(!src2.is(rdx));
1986 DCHECK(!src1.is(rdx));
Ben Murdoch257744e2011-11-30 15:57:28 +00001987
1988 // Check for 0 divisor (result is +/-Infinity).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001989 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001990 j(zero, on_not_smi_result, near_jump);
1991
1992 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001994 }
1995 SmiToInteger32(rax, src1);
1996 // We need to rule out dividing Smi::kMinValue by -1, since that would
1997 // overflow in idiv and raise an exception.
1998 // We combine this with negative zero test (negative zero only happens
1999 // when dividing zero by a negative number).
2000
2001 // We overshoot a little and go to slow case if we divide min-value
2002 // by any negative value, not just -1.
2003 Label safe_div;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002004 testl(rax, Immediate(~Smi::kMinValue));
Ben Murdoch257744e2011-11-30 15:57:28 +00002005 j(not_zero, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002007 if (src1.is(rax)) {
2008 j(positive, &safe_div, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002009 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002010 jmp(on_not_smi_result, near_jump);
2011 } else {
2012 j(negative, on_not_smi_result, near_jump);
2013 }
2014 bind(&safe_div);
2015
2016 SmiToInteger32(src2, src2);
2017 // Sign extend src1 into edx:eax.
2018 cdq();
2019 idivl(src2);
2020 Integer32ToSmi(src2, src2);
2021 // Check that the remainder is zero.
2022 testl(rdx, rdx);
2023 if (src1.is(rax)) {
2024 Label smi_result;
2025 j(zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002026 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002027 jmp(on_not_smi_result, near_jump);
2028 bind(&smi_result);
2029 } else {
2030 j(not_zero, on_not_smi_result, near_jump);
2031 }
2032 if (!dst.is(src1) && src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002033 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002034 }
2035 Integer32ToSmi(dst, rax);
2036}
2037
2038
2039void MacroAssembler::SmiMod(Register dst,
2040 Register src1,
2041 Register src2,
2042 Label* on_not_smi_result,
2043 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002044 DCHECK(!dst.is(kScratchRegister));
2045 DCHECK(!src1.is(kScratchRegister));
2046 DCHECK(!src2.is(kScratchRegister));
2047 DCHECK(!src2.is(rax));
2048 DCHECK(!src2.is(rdx));
2049 DCHECK(!src1.is(rdx));
2050 DCHECK(!src1.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002051
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002052 testp(src2, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002053 j(zero, on_not_smi_result, near_jump);
2054
2055 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056 movp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002057 }
2058 SmiToInteger32(rax, src1);
2059 SmiToInteger32(src2, src2);
2060
2061 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
2062 Label safe_div;
2063 cmpl(rax, Immediate(Smi::kMinValue));
2064 j(not_equal, &safe_div, Label::kNear);
2065 cmpl(src2, Immediate(-1));
2066 j(not_equal, &safe_div, Label::kNear);
2067 // Retag inputs and go slow case.
2068 Integer32ToSmi(src2, src2);
2069 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002070 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002071 }
2072 jmp(on_not_smi_result, near_jump);
2073 bind(&safe_div);
2074
2075 // Sign extend eax into edx:eax.
2076 cdq();
2077 idivl(src2);
2078 // Restore smi tags on inputs.
2079 Integer32ToSmi(src2, src2);
2080 if (src1.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002081 movp(src1, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002082 }
2083 // Check for a negative zero result. If the result is zero, and the
2084 // dividend is negative, go slow to return a floating point negative zero.
2085 Label smi_result;
2086 testl(rdx, rdx);
2087 j(not_zero, &smi_result, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002088 testp(src1, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002089 j(negative, on_not_smi_result, near_jump);
2090 bind(&smi_result);
2091 Integer32ToSmi(dst, rdx);
2092}
2093
2094
Steve Blocka7e24c12009-10-30 11:49:00 +00002095void MacroAssembler::SmiNot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096 DCHECK(!dst.is(kScratchRegister));
2097 DCHECK(!src.is(kScratchRegister));
2098 if (SmiValuesAre32Bits()) {
2099 // Set tag and padding bits before negating, so that they are zero
2100 // afterwards.
2101 movl(kScratchRegister, Immediate(~0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002102 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002103 DCHECK(SmiValuesAre31Bits());
2104 movl(kScratchRegister, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002105 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002106 if (dst.is(src)) {
2107 xorp(dst, kScratchRegister);
2108 } else {
2109 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2110 }
2111 notp(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002112}
2113
2114
2115void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002116 DCHECK(!dst.is(src2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002117 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002119 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120 andp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002121}
2122
2123
Steve Block3ce2e202009-11-05 08:53:23 +00002124void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2125 if (constant->value() == 0) {
Steve Block9fac8402011-05-12 15:51:54 +01002126 Set(dst, 0);
Steve Block3ce2e202009-11-05 08:53:23 +00002127 } else if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002128 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002129 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002130 andp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002131 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002132 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002133 andp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002134 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002135}
2136
2137
2138void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2139 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002140 DCHECK(!src1.is(src2));
2141 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002142 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002143 orp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002144}
2145
2146
Steve Block3ce2e202009-11-05 08:53:23 +00002147void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2148 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002149 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002150 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002151 orp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002152 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002153 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002154 orp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002156}
2157
Steve Block3ce2e202009-11-05 08:53:23 +00002158
Steve Blocka7e24c12009-10-30 11:49:00 +00002159void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2160 if (!dst.is(src1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002161 DCHECK(!src1.is(src2));
2162 movp(dst, src1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002163 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164 xorp(dst, src2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002165}
2166
2167
Steve Block3ce2e202009-11-05 08:53:23 +00002168void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2169 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002170 DCHECK(!dst.is(kScratchRegister));
Steve Block8defd9f2010-07-08 12:39:36 +01002171 Register constant_reg = GetSmiConstant(constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002172 xorp(dst, constant_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00002173 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01002174 LoadSmiConstant(dst, constant);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002175 xorp(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +00002176 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002177}
2178
2179
Steve Blocka7e24c12009-10-30 11:49:00 +00002180void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2181 Register src,
2182 int shift_value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002183 DCHECK(is_uint5(shift_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00002184 if (shift_value > 0) {
2185 if (dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002186 sarp(dst, Immediate(shift_value + kSmiShift));
2187 shlp(dst, Immediate(kSmiShift));
Steve Blocka7e24c12009-10-30 11:49:00 +00002188 } else {
2189 UNIMPLEMENTED(); // Not used.
2190 }
2191 }
2192}
2193
2194
Steve Blocka7e24c12009-10-30 11:49:00 +00002195void MacroAssembler::SmiShiftLeftConstant(Register dst,
2196 Register src,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002197 int shift_value,
2198 Label* on_not_smi_result,
2199 Label::Distance near_jump) {
2200 if (SmiValuesAre32Bits()) {
2201 if (!dst.is(src)) {
2202 movp(dst, src);
2203 }
2204 if (shift_value > 0) {
2205 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2206 shlq(dst, Immediate(shift_value & 0x1f));
2207 }
2208 } else {
2209 DCHECK(SmiValuesAre31Bits());
2210 if (dst.is(src)) {
2211 UNIMPLEMENTED(); // Not used.
2212 } else {
2213 SmiToInteger32(dst, src);
2214 shll(dst, Immediate(shift_value));
2215 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2216 Integer32ToSmi(dst, dst);
2217 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002218 }
2219}
2220
2221
Ben Murdoch257744e2011-11-30 15:57:28 +00002222void MacroAssembler::SmiShiftLogicalRightConstant(
2223 Register dst, Register src, int shift_value,
2224 Label* on_not_smi_result, Label::Distance near_jump) {
2225 // Logic right shift interprets its result as an *unsigned* number.
2226 if (dst.is(src)) {
2227 UNIMPLEMENTED(); // Not used.
2228 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002229 if (shift_value == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002230 testp(src, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00002231 j(negative, on_not_smi_result, near_jump);
2232 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002233 if (SmiValuesAre32Bits()) {
2234 movp(dst, src);
2235 shrp(dst, Immediate(shift_value + kSmiShift));
2236 shlp(dst, Immediate(kSmiShift));
2237 } else {
2238 DCHECK(SmiValuesAre31Bits());
2239 SmiToInteger32(dst, src);
2240 shrp(dst, Immediate(shift_value));
2241 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2242 Integer32ToSmi(dst, dst);
2243 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002244 }
2245}
2246
2247
Steve Blocka7e24c12009-10-30 11:49:00 +00002248void MacroAssembler::SmiShiftLeft(Register dst,
2249 Register src1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002250 Register src2,
2251 Label* on_not_smi_result,
2252 Label::Distance near_jump) {
2253 if (SmiValuesAre32Bits()) {
2254 DCHECK(!dst.is(rcx));
2255 if (!dst.is(src1)) {
2256 movp(dst, src1);
2257 }
2258 // Untag shift amount.
2259 SmiToInteger32(rcx, src2);
2260 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2261 andp(rcx, Immediate(0x1f));
2262 shlq_cl(dst);
2263 } else {
2264 DCHECK(SmiValuesAre31Bits());
2265 DCHECK(!dst.is(kScratchRegister));
2266 DCHECK(!src1.is(kScratchRegister));
2267 DCHECK(!src2.is(kScratchRegister));
2268 DCHECK(!dst.is(src2));
2269 DCHECK(!dst.is(rcx));
2270
2271 if (src1.is(rcx) || src2.is(rcx)) {
2272 movq(kScratchRegister, rcx);
2273 }
2274 if (dst.is(src1)) {
2275 UNIMPLEMENTED(); // Not used.
2276 } else {
2277 Label valid_result;
2278 SmiToInteger32(dst, src1);
2279 SmiToInteger32(rcx, src2);
2280 shll_cl(dst);
2281 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2282 // As src1 or src2 could not be dst, we do not need to restore them for
2283 // clobbering dst.
2284 if (src1.is(rcx) || src2.is(rcx)) {
2285 if (src1.is(rcx)) {
2286 movq(src1, kScratchRegister);
2287 } else {
2288 movq(src2, kScratchRegister);
2289 }
2290 }
2291 jmp(on_not_smi_result, near_jump);
2292 bind(&valid_result);
2293 Integer32ToSmi(dst, dst);
2294 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002295 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002296}
2297
2298
Ben Murdoch257744e2011-11-30 15:57:28 +00002299void MacroAssembler::SmiShiftLogicalRight(Register dst,
2300 Register src1,
2301 Register src2,
2302 Label* on_not_smi_result,
2303 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002304 DCHECK(!dst.is(kScratchRegister));
2305 DCHECK(!src1.is(kScratchRegister));
2306 DCHECK(!src2.is(kScratchRegister));
2307 DCHECK(!dst.is(src2));
2308 DCHECK(!dst.is(rcx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002309 if (src1.is(rcx) || src2.is(rcx)) {
2310 movq(kScratchRegister, rcx);
2311 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002312 if (dst.is(src1)) {
2313 UNIMPLEMENTED(); // Not used.
Ben Murdoch257744e2011-11-30 15:57:28 +00002314 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002315 Label valid_result;
2316 SmiToInteger32(dst, src1);
2317 SmiToInteger32(rcx, src2);
2318 shrl_cl(dst);
2319 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2320 // As src1 or src2 could not be dst, we do not need to restore them for
2321 // clobbering dst.
2322 if (src1.is(rcx) || src2.is(rcx)) {
2323 if (src1.is(rcx)) {
2324 movq(src1, kScratchRegister);
2325 } else {
2326 movq(src2, kScratchRegister);
2327 }
2328 }
2329 jmp(on_not_smi_result, near_jump);
2330 bind(&valid_result);
2331 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00002332 }
2333}
2334
2335
Steve Blocka7e24c12009-10-30 11:49:00 +00002336void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2337 Register src1,
2338 Register src2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002339 DCHECK(!dst.is(kScratchRegister));
2340 DCHECK(!src1.is(kScratchRegister));
2341 DCHECK(!src2.is(kScratchRegister));
2342 DCHECK(!dst.is(rcx));
2343
Steve Blocka7e24c12009-10-30 11:49:00 +00002344 SmiToInteger32(rcx, src2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002345 if (!dst.is(src1)) {
2346 movp(dst, src1);
Steve Block3ce2e202009-11-05 08:53:23 +00002347 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348 SmiToInteger32(dst, dst);
2349 sarl_cl(dst);
2350 Integer32ToSmi(dst, dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00002351}
2352
2353
Ben Murdoch257744e2011-11-30 15:57:28 +00002354void MacroAssembler::SelectNonSmi(Register dst,
2355 Register src1,
2356 Register src2,
2357 Label* on_not_smis,
2358 Label::Distance near_jump) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002359 DCHECK(!dst.is(kScratchRegister));
2360 DCHECK(!src1.is(kScratchRegister));
2361 DCHECK(!src2.is(kScratchRegister));
2362 DCHECK(!dst.is(src1));
2363 DCHECK(!dst.is(src2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002364 // Both operands must not be smis.
2365#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002366 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2367 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
Ben Murdoch257744e2011-11-30 15:57:28 +00002368#endif
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002369 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002370 DCHECK_EQ(0, Smi::FromInt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002371 movl(kScratchRegister, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372 andp(kScratchRegister, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002373 testl(kScratchRegister, src2);
2374 // If non-zero then both are smis.
2375 j(not_zero, on_not_smis, near_jump);
2376
2377 // Exactly one operand is a smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378 DCHECK_EQ(1, static_cast<int>(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002379 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002380 subp(kScratchRegister, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002381 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002382 movp(dst, src1);
2383 xorp(dst, src2);
2384 andp(dst, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00002385 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002386 xorp(dst, src1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002387 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2388}
2389
2390
Steve Block3ce2e202009-11-05 08:53:23 +00002391SmiIndex MacroAssembler::SmiToIndex(Register dst,
2392 Register src,
2393 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002394 if (SmiValuesAre32Bits()) {
2395 DCHECK(is_uint6(shift));
2396 // There is a possible optimization if shift is in the range 60-63, but that
2397 // will (and must) never happen.
2398 if (!dst.is(src)) {
2399 movp(dst, src);
2400 }
2401 if (shift < kSmiShift) {
2402 sarp(dst, Immediate(kSmiShift - shift));
2403 } else {
2404 shlp(dst, Immediate(shift - kSmiShift));
2405 }
2406 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002407 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002408 DCHECK(SmiValuesAre31Bits());
2409 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2410 if (!dst.is(src)) {
2411 movp(dst, src);
2412 }
2413 // We have to sign extend the index register to 64-bit as the SMI might
2414 // be negative.
2415 movsxlq(dst, dst);
2416 if (shift == times_1) {
2417 sarq(dst, Immediate(kSmiShift));
2418 return SmiIndex(dst, times_1);
2419 }
2420 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00002421 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002422}
2423
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002424
Steve Blocka7e24c12009-10-30 11:49:00 +00002425SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2426 Register src,
2427 int shift) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002428 if (SmiValuesAre32Bits()) {
2429 // Register src holds a positive smi.
2430 DCHECK(is_uint6(shift));
2431 if (!dst.is(src)) {
2432 movp(dst, src);
2433 }
2434 negp(dst);
2435 if (shift < kSmiShift) {
2436 sarp(dst, Immediate(kSmiShift - shift));
2437 } else {
2438 shlp(dst, Immediate(shift - kSmiShift));
2439 }
2440 return SmiIndex(dst, times_1);
Steve Block3ce2e202009-11-05 08:53:23 +00002441 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 DCHECK(SmiValuesAre31Bits());
2443 DCHECK(shift >= times_1 && shift <= (static_cast<int>(times_8) + 1));
2444 if (!dst.is(src)) {
2445 movp(dst, src);
2446 }
2447 negq(dst);
2448 if (shift == times_1) {
2449 sarq(dst, Immediate(kSmiShift));
2450 return SmiIndex(dst, times_1);
2451 }
2452 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
Steve Block3ce2e202009-11-05 08:53:23 +00002453 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002454}
2455
2456
Steve Block44f0eee2011-05-26 01:26:41 +01002457void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002458 if (SmiValuesAre32Bits()) {
2459 DCHECK_EQ(0, kSmiShift % kBitsPerByte);
2460 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2461 } else {
2462 DCHECK(SmiValuesAre31Bits());
2463 SmiToInteger32(kScratchRegister, src);
2464 addl(dst, kScratchRegister);
2465 }
2466}
2467
2468
2469void MacroAssembler::Push(Smi* source) {
2470 intptr_t smi = reinterpret_cast<intptr_t>(source);
2471 if (is_int32(smi)) {
2472 Push(Immediate(static_cast<int32_t>(smi)));
2473 } else {
2474 Register constant = GetSmiConstant(source);
2475 Push(constant);
2476 }
2477}
2478
2479
2480void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
2481 DCHECK(!src.is(scratch));
2482 movp(scratch, src);
2483 // High bits.
2484 shrp(src, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2485 shlp(src, Immediate(kSmiShift));
2486 Push(src);
2487 // Low bits.
2488 shlp(scratch, Immediate(kSmiShift));
2489 Push(scratch);
2490}
2491
2492
2493void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2494 DCHECK(!dst.is(scratch));
2495 Pop(scratch);
2496 // Low bits.
2497 shrp(scratch, Immediate(kSmiShift));
2498 Pop(dst);
2499 shrp(dst, Immediate(kSmiShift));
2500 // High bits.
2501 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2502 orp(dst, scratch);
2503}
2504
2505
2506void MacroAssembler::Test(const Operand& src, Smi* source) {
2507 if (SmiValuesAre32Bits()) {
2508 testl(Operand(src, kIntSize), Immediate(source->value()));
2509 } else {
2510 DCHECK(SmiValuesAre31Bits());
2511 testl(src, Immediate(source));
2512 }
2513}
2514
2515
2516// ----------------------------------------------------------------------------
2517
2518
2519void MacroAssembler::LookupNumberStringCache(Register object,
2520 Register result,
2521 Register scratch1,
2522 Register scratch2,
2523 Label* not_found) {
2524 // Use of registers. Register result is used as a temporary.
2525 Register number_string_cache = result;
2526 Register mask = scratch1;
2527 Register scratch = scratch2;
2528
2529 // Load the number string cache.
2530 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2531
2532 // Make the hash mask from the length of the number string cache. It
2533 // contains two elements (number and string) for each cache entry.
2534 SmiToInteger32(
2535 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2536 shrl(mask, Immediate(1));
2537 subp(mask, Immediate(1)); // Make mask.
2538
2539 // Calculate the entry in the number string cache. The hash value in the
2540 // number string cache for smis is just the smi value, and the hash for
2541 // doubles is the xor of the upper and lower words. See
2542 // Heap::GetNumberStringCache.
2543 Label is_smi;
2544 Label load_result_from_cache;
2545 JumpIfSmi(object, &is_smi);
2546 CheckMap(object,
2547 isolate()->factory()->heap_number_map(),
2548 not_found,
2549 DONT_DO_SMI_CHECK);
2550
2551 STATIC_ASSERT(8 == kDoubleSize);
2552 movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2553 xorp(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2554 andp(scratch, mask);
2555 // Each entry in string cache consists of two pointer sized fields,
2556 // but times_twice_pointer_size (multiplication by 16) scale factor
2557 // is not supported by addrmode on x64 platform.
2558 // So we have to premultiply entry index before lookup.
2559 shlp(scratch, Immediate(kPointerSizeLog2 + 1));
2560
2561 Register index = scratch;
2562 Register probe = mask;
2563 movp(probe,
2564 FieldOperand(number_string_cache,
2565 index,
2566 times_1,
2567 FixedArray::kHeaderSize));
2568 JumpIfSmi(probe, not_found);
2569 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2570 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2571 j(parity_even, not_found); // Bail out if NaN is involved.
2572 j(not_equal, not_found); // The cache did not contain this value.
2573 jmp(&load_result_from_cache);
2574
2575 bind(&is_smi);
2576 SmiToInteger32(scratch, object);
2577 andp(scratch, mask);
2578 // Each entry in string cache consists of two pointer sized fields,
2579 // but times_twice_pointer_size (multiplication by 16) scale factor
2580 // is not supported by addrmode on x64 platform.
2581 // So we have to premultiply entry index before lookup.
2582 shlp(scratch, Immediate(kPointerSizeLog2 + 1));
2583
2584 // Check if the entry is the smi we are looking for.
2585 cmpp(object,
2586 FieldOperand(number_string_cache,
2587 index,
2588 times_1,
2589 FixedArray::kHeaderSize));
2590 j(not_equal, not_found);
2591
2592 // Get the result from the cache.
2593 bind(&load_result_from_cache);
2594 movp(result,
2595 FieldOperand(number_string_cache,
2596 index,
2597 times_1,
2598 FixedArray::kHeaderSize + kPointerSize));
2599 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
Steve Block44f0eee2011-05-26 01:26:41 +01002600}
2601
2602
Ben Murdoch257744e2011-11-30 15:57:28 +00002603void MacroAssembler::JumpIfNotString(Register object,
2604 Register object_map,
2605 Label* not_string,
2606 Label::Distance near_jump) {
2607 Condition is_smi = CheckSmi(object);
2608 j(is_smi, not_string, near_jump);
2609 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2610 j(above_equal, not_string, near_jump);
2611}
2612
2613
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002614void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(
2615 Register first_object, Register second_object, Register scratch1,
2616 Register scratch2, Label* on_fail, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002617 // Check that both objects are not smis.
2618 Condition either_smi = CheckEitherSmi(first_object, second_object);
2619 j(either_smi, on_fail, near_jump);
2620
2621 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002622 movp(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2623 movp(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002624 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2625 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2626
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002627 // Check that both are flat one-byte strings.
2628 DCHECK(kNotStringTag != 0);
2629 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002630 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002631 const int kFlatOneByteStringTag =
2632 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002633
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002634 andl(scratch1, Immediate(kFlatOneByteStringMask));
2635 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002636 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002637 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2638 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002639 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002640 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002641 j(not_equal, on_fail, near_jump);
2642}
2643
2644
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002645void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2646 Register instance_type, Register scratch, Label* failure,
Ben Murdoch257744e2011-11-30 15:57:28 +00002647 Label::Distance near_jump) {
2648 if (!scratch.is(instance_type)) {
2649 movl(scratch, instance_type);
2650 }
2651
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002652 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002653 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2654
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655 andl(scratch, Immediate(kFlatOneByteStringMask));
2656 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002657 j(not_equal, failure, near_jump);
2658}
2659
2660
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002661void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
2662 Register first_object_instance_type, Register second_object_instance_type,
2663 Register scratch1, Register scratch2, Label* on_fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002664 Label::Distance near_jump) {
2665 // Load instance type for both strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002666 movp(scratch1, first_object_instance_type);
2667 movp(scratch2, second_object_instance_type);
Ben Murdoch257744e2011-11-30 15:57:28 +00002668
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002669 // Check that both are flat one-byte strings.
2670 DCHECK(kNotStringTag != 0);
2671 const int kFlatOneByteStringMask =
Ben Murdoch257744e2011-11-30 15:57:28 +00002672 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002673 const int kFlatOneByteStringTag =
2674 kStringTag | kOneByteStringTag | kSeqStringTag;
Ben Murdoch257744e2011-11-30 15:57:28 +00002675
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002676 andl(scratch1, Immediate(kFlatOneByteStringMask));
2677 andl(scratch2, Immediate(kFlatOneByteStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002678 // Interleave the bits to check both scratch1 and scratch2 in one test.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002679 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2680 leap(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002681 cmpl(scratch1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002682 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002683 j(not_equal, on_fail, near_jump);
2684}
2685
2686
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002687template<class T>
2688static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
2689 T operand_or_register,
2690 Label* not_unique_name,
2691 Label::Distance distance) {
2692 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2693 Label succeed;
2694 masm->testb(operand_or_register,
2695 Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2696 masm->j(zero, &succeed, Label::kNear);
2697 masm->cmpb(operand_or_register, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
2698 masm->j(not_equal, not_unique_name, distance);
2699
2700 masm->bind(&succeed);
2701}
2702
2703
2704void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2705 Label* not_unique_name,
2706 Label::Distance distance) {
2707 JumpIfNotUniqueNameHelper<Operand>(this, operand, not_unique_name, distance);
2708}
2709
2710
2711void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2712 Label* not_unique_name,
2713 Label::Distance distance) {
2714 JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name, distance);
2715}
2716
Steve Block44f0eee2011-05-26 01:26:41 +01002717
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002718void MacroAssembler::Move(Register dst, Register src) {
2719 if (!dst.is(src)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002720 movp(dst, src);
Steve Block6ded16b2010-05-10 14:33:55 +01002721 }
Steve Block6ded16b2010-05-10 14:33:55 +01002722}
2723
2724
Steve Blocka7e24c12009-10-30 11:49:00 +00002725void MacroAssembler::Move(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002726 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002727 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002728 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002729 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002730 MoveHeapObject(dst, source);
Steve Blocka7e24c12009-10-30 11:49:00 +00002731 }
2732}
2733
2734
2735void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002736 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002737 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002738 Move(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002739 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002740 MoveHeapObject(kScratchRegister, source);
2741 movp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002742 }
2743}
2744
2745
2746void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002747 AllowDeferredHandleDereference smi_check;
Steve Block3ce2e202009-11-05 08:53:23 +00002748 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002749 Cmp(dst, Smi::cast(*source));
Steve Block3ce2e202009-11-05 08:53:23 +00002750 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002751 MoveHeapObject(kScratchRegister, source);
2752 cmpp(dst, kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00002753 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002754}
2755
2756
2757void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002758 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002759 if (source->IsSmi()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002760 Cmp(dst, Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002761 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002762 MoveHeapObject(kScratchRegister, source);
2763 cmpp(dst, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002764 }
2765}
2766
2767
2768void MacroAssembler::Push(Handle<Object> source) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002769 AllowDeferredHandleDereference smi_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00002770 if (source->IsSmi()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002771 Push(Smi::cast(*source));
Steve Blocka7e24c12009-10-30 11:49:00 +00002772 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002773 MoveHeapObject(kScratchRegister, source);
2774 Push(kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00002775 }
2776}
2777
2778
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002779void MacroAssembler::MoveHeapObject(Register result,
2780 Handle<Object> object) {
2781 AllowDeferredHandleDereference using_raw_address;
2782 DCHECK(object->IsHeapObject());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002783 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002784 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2785 Move(result, cell, RelocInfo::CELL);
2786 movp(result, Operand(result, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002787 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002788 Move(result, object, RelocInfo::EMBEDDED_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002789 }
2790}
2791
2792
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002793void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002794 if (dst.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002795 AllowDeferredHandleDereference embedding_raw_address;
2796 load_rax(cell.location(), RelocInfo::CELL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002797 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002798 Move(dst, cell, RelocInfo::CELL);
2799 movp(dst, Operand(dst, 0));
Steve Block3ce2e202009-11-05 08:53:23 +00002800 }
2801}
2802
2803
Leon Clarkee46be812010-01-19 14:06:41 +00002804void MacroAssembler::Drop(int stack_elements) {
2805 if (stack_elements > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002806 addp(rsp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00002807 }
2808}
2809
2810
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002811void MacroAssembler::DropUnderReturnAddress(int stack_elements,
2812 Register scratch) {
2813 DCHECK(stack_elements > 0);
2814 if (kPointerSize == kInt64Size && stack_elements == 1) {
2815 popq(MemOperand(rsp, 0));
2816 return;
2817 }
2818
2819 PopReturnAddressTo(scratch);
2820 Drop(stack_elements);
2821 PushReturnAddressFrom(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002822}
2823
2824
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002825void MacroAssembler::Push(Register src) {
2826 if (kPointerSize == kInt64Size) {
2827 pushq(src);
2828 } else {
2829 // x32 uses 64-bit push for rbp in the prologue.
2830 DCHECK(src.code() != rbp.code());
2831 leal(rsp, Operand(rsp, -4));
2832 movp(Operand(rsp, 0), src);
2833 }
2834}
2835
2836
2837void MacroAssembler::Push(const Operand& src) {
2838 if (kPointerSize == kInt64Size) {
2839 pushq(src);
2840 } else {
2841 movp(kScratchRegister, src);
2842 leal(rsp, Operand(rsp, -4));
2843 movp(Operand(rsp, 0), kScratchRegister);
2844 }
2845}
2846
2847
2848void MacroAssembler::PushQuad(const Operand& src) {
2849 if (kPointerSize == kInt64Size) {
2850 pushq(src);
2851 } else {
2852 movp(kScratchRegister, src);
2853 pushq(kScratchRegister);
2854 }
2855}
2856
2857
2858void MacroAssembler::Push(Immediate value) {
2859 if (kPointerSize == kInt64Size) {
2860 pushq(value);
2861 } else {
2862 leal(rsp, Operand(rsp, -4));
2863 movp(Operand(rsp, 0), value);
2864 }
2865}
2866
2867
2868void MacroAssembler::PushImm32(int32_t imm32) {
2869 if (kPointerSize == kInt64Size) {
2870 pushq_imm32(imm32);
2871 } else {
2872 leal(rsp, Operand(rsp, -4));
2873 movp(Operand(rsp, 0), Immediate(imm32));
2874 }
2875}
2876
2877
2878void MacroAssembler::Pop(Register dst) {
2879 if (kPointerSize == kInt64Size) {
2880 popq(dst);
2881 } else {
2882 // x32 uses 64-bit pop for rbp in the epilogue.
2883 DCHECK(dst.code() != rbp.code());
2884 movp(dst, Operand(rsp, 0));
2885 leal(rsp, Operand(rsp, 4));
2886 }
2887}
2888
2889
2890void MacroAssembler::Pop(const Operand& dst) {
2891 if (kPointerSize == kInt64Size) {
2892 popq(dst);
2893 } else {
2894 Register scratch = dst.AddressUsesRegister(kScratchRegister)
2895 ? kSmiConstantRegister : kScratchRegister;
2896 movp(scratch, Operand(rsp, 0));
2897 movp(dst, scratch);
2898 leal(rsp, Operand(rsp, 4));
2899 if (scratch.is(kSmiConstantRegister)) {
2900 // Restore kSmiConstantRegister.
2901 movp(kSmiConstantRegister,
2902 reinterpret_cast<void*>(Smi::FromInt(kSmiConstantRegisterValue)),
2903 Assembler::RelocInfoNone());
2904 }
2905 }
2906}
2907
2908
2909void MacroAssembler::PopQuad(const Operand& dst) {
2910 if (kPointerSize == kInt64Size) {
2911 popq(dst);
2912 } else {
2913 popq(kScratchRegister);
2914 movp(dst, kScratchRegister);
2915 }
2916}
2917
2918
2919void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
2920 Register base,
2921 int offset) {
2922 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
2923 offset <= SharedFunctionInfo::kSize &&
2924 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
2925 if (kPointerSize == kInt64Size) {
2926 movsxlq(dst, FieldOperand(base, offset));
2927 } else {
2928 movp(dst, FieldOperand(base, offset));
2929 SmiToInteger32(dst, dst);
2930 }
2931}
2932
2933
2934void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base,
2935 int offset,
2936 int bits) {
2937 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
2938 offset <= SharedFunctionInfo::kSize &&
2939 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1));
2940 if (kPointerSize == kInt32Size) {
2941 // On x32, this field is represented by SMI.
2942 bits += kSmiShift;
2943 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002944 int byte_offset = bits / kBitsPerByte;
2945 int bit_in_byte = bits & (kBitsPerByte - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002946 testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002947}
2948
2949
Steve Blocka7e24c12009-10-30 11:49:00 +00002950void MacroAssembler::Jump(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002951 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002952 jmp(kScratchRegister);
2953}
2954
2955
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002956void MacroAssembler::Jump(const Operand& op) {
2957 if (kPointerSize == kInt64Size) {
2958 jmp(op);
2959 } else {
2960 movp(kScratchRegister, op);
2961 jmp(kScratchRegister);
2962 }
2963}
2964
2965
Steve Blocka7e24c12009-10-30 11:49:00 +00002966void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002967 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002968 jmp(kScratchRegister);
2969}
2970
2971
2972void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
Steve Block3ce2e202009-11-05 08:53:23 +00002973 // TODO(X64): Inline this
2974 jmp(code_object, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002975}
2976
2977
Steve Block44f0eee2011-05-26 01:26:41 +01002978int MacroAssembler::CallSize(ExternalReference ext) {
2979 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002980 return LoadAddressSize(ext) +
2981 Assembler::kCallScratchRegisterInstructionLength;
Steve Block44f0eee2011-05-26 01:26:41 +01002982}
2983
2984
Steve Blocka7e24c12009-10-30 11:49:00 +00002985void MacroAssembler::Call(ExternalReference ext) {
Steve Block44f0eee2011-05-26 01:26:41 +01002986#ifdef DEBUG
2987 int end_position = pc_offset() + CallSize(ext);
2988#endif
2989 LoadAddress(kScratchRegister, ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002990 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01002991#ifdef DEBUG
2992 CHECK_EQ(end_position, pc_offset());
2993#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002994}
2995
2996
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002997void MacroAssembler::Call(const Operand& op) {
2998 if (kPointerSize == kInt64Size) {
2999 call(op);
3000 } else {
3001 movp(kScratchRegister, op);
3002 call(kScratchRegister);
3003 }
3004}
3005
3006
Steve Blocka7e24c12009-10-30 11:49:00 +00003007void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003008#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003009 int end_position = pc_offset() + CallSize(destination);
Steve Block44f0eee2011-05-26 01:26:41 +01003010#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003011 Move(kScratchRegister, destination, rmode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003012 call(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003013#ifdef DEBUG
3014 CHECK_EQ(pc_offset(), end_position);
3015#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003016}
3017
3018
Ben Murdoch257744e2011-11-30 15:57:28 +00003019void MacroAssembler::Call(Handle<Code> code_object,
3020 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003021 TypeFeedbackId ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01003022#ifdef DEBUG
3023 int end_position = pc_offset() + CallSize(code_object);
3024#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003025 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
3026 rmode == RelocInfo::CODE_AGE_SEQUENCE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003027 call(code_object, rmode, ast_id);
Steve Block44f0eee2011-05-26 01:26:41 +01003028#ifdef DEBUG
3029 CHECK_EQ(end_position, pc_offset());
3030#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003031}
3032
3033
Steve Block1e0659c2011-05-24 12:43:12 +01003034void MacroAssembler::Pushad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003035 Push(rax);
3036 Push(rcx);
3037 Push(rdx);
3038 Push(rbx);
Steve Block1e0659c2011-05-24 12:43:12 +01003039 // Not pushing rsp or rbp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003040 Push(rsi);
3041 Push(rdi);
3042 Push(r8);
3043 Push(r9);
Steve Block1e0659c2011-05-24 12:43:12 +01003044 // r10 is kScratchRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003045 Push(r11);
Steve Block44f0eee2011-05-26 01:26:41 +01003046 // r12 is kSmiConstantRegister.
Steve Block1e0659c2011-05-24 12:43:12 +01003047 // r13 is kRootRegister.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003048 Push(r14);
3049 Push(r15);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003050 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
3051 // Use lea for symmetry with Popad.
3052 int sp_delta =
3053 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003054 leap(rsp, Operand(rsp, -sp_delta));
Steve Block1e0659c2011-05-24 12:43:12 +01003055}
3056
3057
3058void MacroAssembler::Popad() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003059 // Popad must not change the flags, so use lea instead of addq.
3060 int sp_delta =
3061 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003062 leap(rsp, Operand(rsp, sp_delta));
3063 Pop(r15);
3064 Pop(r14);
3065 Pop(r11);
3066 Pop(r9);
3067 Pop(r8);
3068 Pop(rdi);
3069 Pop(rsi);
3070 Pop(rbx);
3071 Pop(rdx);
3072 Pop(rcx);
3073 Pop(rax);
Steve Block1e0659c2011-05-24 12:43:12 +01003074}
3075
3076
3077void MacroAssembler::Dropad() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003078 addp(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01003079}
3080
3081
3082// Order general registers are pushed by Pushad:
Steve Block44f0eee2011-05-26 01:26:41 +01003083// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003084const int
3085MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
Steve Block1e0659c2011-05-24 12:43:12 +01003086 0,
3087 1,
3088 2,
3089 3,
3090 -1,
3091 -1,
3092 4,
3093 5,
3094 6,
3095 7,
3096 -1,
3097 8,
Steve Block1e0659c2011-05-24 12:43:12 +01003098 -1,
Steve Block44f0eee2011-05-26 01:26:41 +01003099 -1,
3100 9,
3101 10
Steve Block1e0659c2011-05-24 12:43:12 +01003102};
3103
3104
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003105void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3106 const Immediate& imm) {
3107 movp(SafepointRegisterSlot(dst), imm);
3108}
3109
3110
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003111void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003112 movp(SafepointRegisterSlot(dst), src);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003113}
3114
3115
3116void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003117 movp(dst, SafepointRegisterSlot(src));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003118}
3119
3120
3121Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
3122 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
3123}
3124
3125
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003126void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
3127 int handler_index) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003128 // Adjust this code if not the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003129 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
3130 kFPOnStackSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003131 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3132 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3133 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3134 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3135 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003136
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003137 // We will build up the handler from the bottom by pushing on the stack.
3138 // First push the frame pointer and context.
3139 if (kind == StackHandler::JS_ENTRY) {
3140 // The frame pointer does not point to a JS frame so we save NULL for
3141 // rbp. We expect the code throwing an exception to check rbp before
3142 // dereferencing it to restore the context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003143 pushq(Immediate(0)); // NULL frame pointer.
Ben Murdoch85b71792012-04-11 18:30:58 +01003144 Push(Smi::FromInt(0)); // No context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003145 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003146 pushq(rbp);
3147 Push(rsi);
Steve Blocka7e24c12009-10-30 11:49:00 +00003148 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003149
3150 // Push the state and the code object.
3151 unsigned state =
3152 StackHandler::IndexField::encode(handler_index) |
3153 StackHandler::KindField::encode(kind);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003154 Push(Immediate(state));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003155 Push(CodeObject());
3156
3157 // Link the current handler as the next handler.
3158 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003159 Push(ExternalOperand(handler_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003160 // Set this new handler as the current one.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003161 movp(ExternalOperand(handler_address), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00003162}
3163
3164
Leon Clarkee46be812010-01-19 14:06:41 +00003165void MacroAssembler::PopTryHandler() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003166 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3167 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003168 Pop(ExternalOperand(handler_address));
3169 addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00003170}
3171
3172
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003173void MacroAssembler::JumpToHandlerEntry() {
3174 // Compute the handler entry address and jump to it. The handler table is
3175 // a fixed array of (smi-tagged) code offsets.
3176 // rax = exception, rdi = code object, rdx = state.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003177 movp(rbx, FieldOperand(rdi, Code::kHandlerTableOffset));
3178 shrp(rdx, Immediate(StackHandler::kKindWidth));
3179 movp(rdx,
3180 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003181 SmiToInteger64(rdx, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003182 leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003183 jmp(rdi);
3184}
3185
3186
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003187void MacroAssembler::Throw(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003188 // Adjust this code if not the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003189 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
3190 kFPOnStackSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003191 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3192 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3193 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3194 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3195 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
3196
3197 // The exception is expected in rax.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003198 if (!value.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003199 movp(rax, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003200 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003201 // Drop the stack pointer to the top of the top handler.
Ben Murdoch589d6972011-11-30 16:04:58 +00003202 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003203 movp(rsp, ExternalOperand(handler_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003204 // Restore the next handler.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003205 Pop(ExternalOperand(handler_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003206
3207 // Remove the code object and state, compute the handler address in rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003208 Pop(rdi); // Code object.
3209 Pop(rdx); // Offset and state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003210
3211 // Restore the context and frame pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003212 Pop(rsi); // Context.
3213 popq(rbp); // Frame pointer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003214
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003215 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003216 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either
3217 // rbp or rsi.
Ben Murdoch257744e2011-11-30 15:57:28 +00003218 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003219 testp(rsi, rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003220 j(zero, &skip, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003221 movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003222 bind(&skip);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003223
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003224 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003225}
3226
3227
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003228void MacroAssembler::ThrowUncatchable(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003229 // Adjust this code if not the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003230 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
3231 kFPOnStackSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003232 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3233 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3234 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3235 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3236 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
3237
3238 // The exception is expected in rax.
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003239 if (!value.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003240 movp(rax, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003241 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003242 // Drop the stack pointer to the top of the top stack handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003243 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
3244 Load(rsp, handler_address);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003245
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003246 // Unwind the handlers until the top ENTRY handler is found.
3247 Label fetch_next, check_kind;
3248 jmp(&check_kind, Label::kNear);
3249 bind(&fetch_next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003250 movp(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003251
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003252 bind(&check_kind);
3253 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
3254 testl(Operand(rsp, StackHandlerConstants::kStateOffset),
3255 Immediate(StackHandler::KindField::kMask));
3256 j(not_zero, &fetch_next);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003257
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003258 // Set the top handler address to next handler past the top ENTRY handler.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003259 Pop(ExternalOperand(handler_address));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003260
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003261 // Remove the code object and state, compute the handler address in rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003262 Pop(rdi); // Code object.
3263 Pop(rdx); // Offset and state.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003264
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003265 // Clear the context pointer and frame pointer (0 was saved in the handler).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003266 Pop(rsi);
3267 popq(rbp);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003268
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003269 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003270}
3271
3272
Steve Blocka7e24c12009-10-30 11:49:00 +00003273void MacroAssembler::Ret() {
3274 ret(0);
3275}
3276
3277
Steve Block1e0659c2011-05-24 12:43:12 +01003278void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
3279 if (is_uint16(bytes_dropped)) {
3280 ret(bytes_dropped);
3281 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003282 PopReturnAddressTo(scratch);
3283 addp(rsp, Immediate(bytes_dropped));
3284 PushReturnAddressFrom(scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003285 ret(0);
3286 }
3287}
3288
3289
Steve Blocka7e24c12009-10-30 11:49:00 +00003290void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +00003291 fucomip();
Steve Block8defd9f2010-07-08 12:39:36 +01003292 fstp(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003293}
3294
3295
3296void MacroAssembler::CmpObjectType(Register heap_object,
3297 InstanceType type,
3298 Register map) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003299 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003300 CmpInstanceType(map, type);
3301}
3302
3303
3304void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
3305 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
3306 Immediate(static_cast<int8_t>(type)));
3307}
3308
3309
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003310void MacroAssembler::CheckFastElements(Register map,
3311 Label* fail,
3312 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003313 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3314 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3315 STATIC_ASSERT(FAST_ELEMENTS == 2);
3316 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003317 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003318 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003319 j(above, fail, distance);
3320}
3321
3322
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003323void MacroAssembler::CheckFastObjectElements(Register map,
3324 Label* fail,
3325 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003326 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3327 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3328 STATIC_ASSERT(FAST_ELEMENTS == 2);
3329 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003330 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003331 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003332 j(below_equal, fail, distance);
3333 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003334 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003335 j(above, fail, distance);
3336}
3337
3338
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003339void MacroAssembler::CheckFastSmiElements(Register map,
3340 Label* fail,
3341 Label::Distance distance) {
3342 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3343 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003344 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003345 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003346 j(above, fail, distance);
3347}
3348
3349
3350void MacroAssembler::StoreNumberToDoubleElements(
3351 Register maybe_number,
3352 Register elements,
3353 Register index,
3354 XMMRegister xmm_scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003355 Label* fail,
3356 int elements_offset) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003357 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
3358
3359 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
3360
3361 CheckMap(maybe_number,
3362 isolate()->factory()->heap_number_map(),
3363 fail,
3364 DONT_DO_SMI_CHECK);
3365
3366 // Double value, canonicalize NaN.
3367 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
3368 cmpl(FieldOperand(maybe_number, offset),
3369 Immediate(kNaNOrInfinityLowerBoundUpper32));
3370 j(greater_equal, &maybe_nan, Label::kNear);
3371
3372 bind(&not_nan);
3373 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
3374 bind(&have_double_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003375 movsd(FieldOperand(elements, index, times_8,
3376 FixedDoubleArray::kHeaderSize - elements_offset),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003377 xmm_scratch);
3378 jmp(&done);
3379
3380 bind(&maybe_nan);
3381 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
3382 // it's an Infinity, and the non-NaN code path applies.
3383 j(greater, &is_nan, Label::kNear);
3384 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
3385 j(zero, &not_nan);
3386 bind(&is_nan);
3387 // Convert all NaNs to the same canonical NaN value when they are stored in
3388 // the double array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003389 Set(kScratchRegister,
3390 bit_cast<uint64_t>(
3391 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003392 movq(xmm_scratch, kScratchRegister);
3393 jmp(&have_double_value, Label::kNear);
3394
3395 bind(&smi_value);
3396 // Value is a smi. convert to a double and store.
3397 // Preserve original value.
3398 SmiToInteger32(kScratchRegister, maybe_number);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003399 Cvtlsi2sd(xmm_scratch, kScratchRegister);
3400 movsd(FieldOperand(elements, index, times_8,
3401 FixedDoubleArray::kHeaderSize - elements_offset),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003402 xmm_scratch);
3403 bind(&done);
3404}
3405
3406
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003407void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003408 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003409}
3410
3411
Andrei Popescu31002712010-02-23 13:46:05 +00003412void MacroAssembler::CheckMap(Register obj,
3413 Handle<Map> map,
3414 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003415 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003416 if (smi_check_type == DO_SMI_CHECK) {
Andrei Popescu31002712010-02-23 13:46:05 +00003417 JumpIfSmi(obj, fail);
3418 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003419
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003420 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +00003421 j(not_equal, fail);
3422}
3423
3424
Ben Murdoch257744e2011-11-30 15:57:28 +00003425void MacroAssembler::ClampUint8(Register reg) {
3426 Label done;
3427 testl(reg, Immediate(0xFFFFFF00));
3428 j(zero, &done, Label::kNear);
3429 setcc(negative, reg); // 1 if negative, 0 if positive.
3430 decb(reg); // 0 if negative, 255 if positive.
3431 bind(&done);
3432}
3433
3434
3435void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
3436 XMMRegister temp_xmm_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003437 Register result_reg) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003438 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003439 Label conv_failure;
Ben Murdoch257744e2011-11-30 15:57:28 +00003440 xorps(temp_xmm_reg, temp_xmm_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003441 cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003442 testl(result_reg, Immediate(0xFFFFFF00));
3443 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003444 cmpl(result_reg, Immediate(1));
3445 j(overflow, &conv_failure, Label::kNear);
3446 movl(result_reg, Immediate(0));
3447 setcc(sign, result_reg);
3448 subl(result_reg, Immediate(1));
3449 andl(result_reg, Immediate(255));
3450 jmp(&done, Label::kNear);
3451 bind(&conv_failure);
3452 Set(result_reg, 0);
3453 ucomisd(input_reg, temp_xmm_reg);
3454 j(below, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003455 Set(result_reg, 255);
3456 bind(&done);
3457}
3458
3459
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003460void MacroAssembler::LoadUint32(XMMRegister dst,
3461 Register src) {
3462 if (FLAG_debug_code) {
3463 cmpq(src, Immediate(0xffffffff));
3464 Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
3465 }
3466 cvtqsi2sd(dst, src);
3467}
3468
3469
3470void MacroAssembler::SlowTruncateToI(Register result_reg,
3471 Register input_reg,
3472 int offset) {
3473 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
3474 call(stub.GetCode(), RelocInfo::CODE_TARGET);
3475}
3476
3477
3478void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
3479 Register input_reg) {
3480 Label done;
3481 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3482 cvttsd2siq(result_reg, xmm0);
3483 cmpq(result_reg, Immediate(1));
3484 j(no_overflow, &done, Label::kNear);
3485
3486 // Slow case.
3487 if (input_reg.is(result_reg)) {
3488 subp(rsp, Immediate(kDoubleSize));
3489 movsd(MemOperand(rsp, 0), xmm0);
3490 SlowTruncateToI(result_reg, rsp, 0);
3491 addp(rsp, Immediate(kDoubleSize));
3492 } else {
3493 SlowTruncateToI(result_reg, input_reg);
3494 }
3495
3496 bind(&done);
3497 // Keep our invariant that the upper 32 bits are zero.
3498 movl(result_reg, result_reg);
3499}
3500
3501
3502void MacroAssembler::TruncateDoubleToI(Register result_reg,
3503 XMMRegister input_reg) {
3504 Label done;
3505 cvttsd2siq(result_reg, input_reg);
3506 cmpq(result_reg, Immediate(1));
3507 j(no_overflow, &done, Label::kNear);
3508
3509 subp(rsp, Immediate(kDoubleSize));
3510 movsd(MemOperand(rsp, 0), input_reg);
3511 SlowTruncateToI(result_reg, rsp, 0);
3512 addp(rsp, Immediate(kDoubleSize));
3513
3514 bind(&done);
3515 // Keep our invariant that the upper 32 bits are zero.
3516 movl(result_reg, result_reg);
3517}
3518
3519
3520void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
3521 XMMRegister scratch,
3522 MinusZeroMode minus_zero_mode,
3523 Label* lost_precision, Label* is_nan,
3524 Label* minus_zero, Label::Distance dst) {
3525 cvttsd2si(result_reg, input_reg);
3526 Cvtlsi2sd(xmm0, result_reg);
3527 ucomisd(xmm0, input_reg);
3528 j(not_equal, lost_precision, dst);
3529 j(parity_even, is_nan, dst); // NaN.
3530 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
3531 Label done;
3532 // The integer converted back is equal to the original. We
3533 // only have to test if we got -0 as an input.
3534 testl(result_reg, result_reg);
3535 j(not_zero, &done, Label::kNear);
3536 movmskpd(result_reg, input_reg);
3537 // Bit 0 contains the sign of the double in input_reg.
3538 // If input was positive, we are ok and return 0, otherwise
3539 // jump to minus_zero.
3540 andl(result_reg, Immediate(1));
3541 j(not_zero, minus_zero, dst);
3542 bind(&done);
3543 }
3544}
3545
3546
Ben Murdoch257744e2011-11-30 15:57:28 +00003547void MacroAssembler::LoadInstanceDescriptors(Register map,
3548 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003549 movp(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
3550}
3551
3552
3553void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3554 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3555 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3556}
3557
3558
3559void MacroAssembler::EnumLength(Register dst, Register map) {
3560 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3561 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3562 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3563 Integer32ToSmi(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003564}
3565
3566
3567void MacroAssembler::DispatchMap(Register obj,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003568 Register unused,
Ben Murdoch257744e2011-11-30 15:57:28 +00003569 Handle<Map> map,
3570 Handle<Code> success,
3571 SmiCheckType smi_check_type) {
3572 Label fail;
3573 if (smi_check_type == DO_SMI_CHECK) {
3574 JumpIfSmi(obj, &fail);
3575 }
3576 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
3577 j(equal, success, RelocInfo::CODE_TARGET);
3578
3579 bind(&fail);
3580}
3581
3582
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003583void MacroAssembler::AssertNumber(Register object) {
3584 if (emit_debug_code()) {
3585 Label ok;
3586 Condition is_smi = CheckSmi(object);
3587 j(is_smi, &ok, Label::kNear);
3588 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3589 isolate()->factory()->heap_number_map());
3590 Check(equal, kOperandIsNotANumber);
3591 bind(&ok);
3592 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003593}
3594
3595
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003596void MacroAssembler::AssertNotSmi(Register object) {
3597 if (emit_debug_code()) {
3598 Condition is_smi = CheckSmi(object);
3599 Check(NegateCondition(is_smi), kOperandIsASmi);
3600 }
Iain Merrick75681382010-08-19 15:07:18 +01003601}
3602
3603
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003604void MacroAssembler::AssertSmi(Register object) {
3605 if (emit_debug_code()) {
3606 Condition is_smi = CheckSmi(object);
3607 Check(is_smi, kOperandIsNotASmi);
3608 }
Steve Block44f0eee2011-05-26 01:26:41 +01003609}
3610
3611
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003612void MacroAssembler::AssertSmi(const Operand& object) {
3613 if (emit_debug_code()) {
3614 Condition is_smi = CheckSmi(object);
3615 Check(is_smi, kOperandIsNotASmi);
3616 }
Steve Block6ded16b2010-05-10 14:33:55 +01003617}
3618
3619
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003620void MacroAssembler::AssertZeroExtended(Register int32_register) {
3621 if (emit_debug_code()) {
3622 DCHECK(!int32_register.is(kScratchRegister));
3623 movq(kScratchRegister, V8_INT64_C(0x0000000100000000));
3624 cmpq(kScratchRegister, int32_register);
3625 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3626 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003627}
3628
3629
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003630void MacroAssembler::AssertString(Register object) {
3631 if (emit_debug_code()) {
3632 testb(object, Immediate(kSmiTagMask));
3633 Check(not_equal, kOperandIsASmiAndNotAString);
3634 Push(object);
3635 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3636 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3637 Pop(object);
3638 Check(below, kOperandIsNotAString);
3639 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003640}
3641
3642
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003643void MacroAssembler::AssertName(Register object) {
3644 if (emit_debug_code()) {
3645 testb(object, Immediate(kSmiTagMask));
3646 Check(not_equal, kOperandIsASmiAndNotAName);
3647 Push(object);
3648 movp(object, FieldOperand(object, HeapObject::kMapOffset));
3649 CmpInstanceType(object, LAST_NAME_TYPE);
3650 Pop(object);
3651 Check(below_equal, kOperandIsNotAName);
3652 }
3653}
3654
3655
3656void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
3657 if (emit_debug_code()) {
3658 Label done_checking;
3659 AssertNotSmi(object);
3660 Cmp(object, isolate()->factory()->undefined_value());
3661 j(equal, &done_checking);
3662 Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map());
3663 Assert(equal, kExpectedUndefinedOrCell);
3664 bind(&done_checking);
3665 }
3666}
3667
3668
3669void MacroAssembler::AssertRootValue(Register src,
3670 Heap::RootListIndex root_value_index,
3671 BailoutReason reason) {
3672 if (emit_debug_code()) {
3673 DCHECK(!src.is(kScratchRegister));
3674 LoadRoot(kScratchRegister, root_value_index);
3675 cmpp(src, kScratchRegister);
3676 Check(equal, reason);
3677 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003678}
3679
3680
3681
Leon Clarked91b9f72010-01-27 17:25:45 +00003682Condition MacroAssembler::IsObjectStringType(Register heap_object,
3683 Register map,
3684 Register instance_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003685 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00003686 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003687 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00003688 testb(instance_type, Immediate(kIsNotStringMask));
3689 return zero;
3690}
3691
3692
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003693Condition MacroAssembler::IsObjectNameType(Register heap_object,
3694 Register map,
3695 Register instance_type) {
3696 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
3697 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3698 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE)));
3699 return below_equal;
3700}
3701
3702
Steve Blocka7e24c12009-10-30 11:49:00 +00003703void MacroAssembler::TryGetFunctionPrototype(Register function,
3704 Register result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003705 Label* miss,
3706 bool miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003707 Label non_instance;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003708 if (miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003709 // Check that the receiver isn't a smi.
3710 testl(function, Immediate(kSmiTagMask));
3711 j(zero, miss);
3712
3713 // Check that the function really is a function.
3714 CmpObjectType(function, JS_FUNCTION_TYPE, result);
3715 j(not_equal, miss);
3716
3717 movp(kScratchRegister,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003718 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3719 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
3720 // field).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003721 TestBitSharedFunctionInfoSpecialField(kScratchRegister,
3722 SharedFunctionInfo::kCompilerHintsOffset,
3723 SharedFunctionInfo::kBoundFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003724 j(not_zero, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003725
3726 // Make sure that the function has an instance prototype.
3727 testb(FieldOperand(result, Map::kBitFieldOffset),
3728 Immediate(1 << Map::kHasNonInstancePrototype));
3729 j(not_zero, &non_instance, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003730 }
3731
Steve Blocka7e24c12009-10-30 11:49:00 +00003732 // Get the prototype or initial map from the function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003733 movp(result,
Steve Blocka7e24c12009-10-30 11:49:00 +00003734 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
3735
3736 // If the prototype or initial map is the hole, don't return it and
3737 // simply miss the cache instead. This will allow us to allocate a
3738 // prototype object on-demand in the runtime system.
3739 CompareRoot(result, Heap::kTheHoleValueRootIndex);
3740 j(equal, miss);
3741
3742 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00003743 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00003744 CmpObjectType(result, MAP_TYPE, kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00003745 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00003746
3747 // Get the prototype from the initial map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003748 movp(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003749
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003750 if (miss_on_bound_function) {
3751 jmp(&done, Label::kNear);
3752
3753 // Non-instance prototype: Fetch prototype from constructor field
3754 // in initial map.
3755 bind(&non_instance);
3756 movp(result, FieldOperand(result, Map::kConstructorOffset));
3757 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003758
3759 // All done.
3760 bind(&done);
3761}
3762
3763
3764void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
3765 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003766 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003767 movl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00003768 }
3769}
3770
3771
3772void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003773 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003774 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003775 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00003776 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01003777 incl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003778 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003779 addl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00003780 }
3781 }
3782}
3783
3784
3785void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003786 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003787 if (FLAG_native_code_counters && counter->Enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003788 Operand counter_operand = ExternalOperand(ExternalReference(counter));
Steve Blocka7e24c12009-10-30 11:49:00 +00003789 if (value == 1) {
Steve Block44f0eee2011-05-26 01:26:41 +01003790 decl(counter_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003791 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003792 subl(counter_operand, Immediate(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00003793 }
3794 }
3795}
3796
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003797
Andrei Popescu402d9372010-02-26 13:31:12 +00003798void MacroAssembler::DebugBreak() {
Steve Block9fac8402011-05-12 15:51:54 +01003799 Set(rax, 0); // No arguments.
Steve Block44f0eee2011-05-26 01:26:41 +01003800 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003801 CEntryStub ces(isolate(), 1);
3802 DCHECK(AllowThisStubCall(&ces));
Andrei Popescu402d9372010-02-26 13:31:12 +00003803 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
Steve Blocka7e24c12009-10-30 11:49:00 +00003804}
Ben Murdoch257744e2011-11-30 15:57:28 +00003805
3806
Steve Blocka7e24c12009-10-30 11:49:00 +00003807void MacroAssembler::InvokeCode(Register code,
3808 const ParameterCount& expected,
3809 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003810 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003811 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003812 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003813 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003814
Ben Murdoch257744e2011-11-30 15:57:28 +00003815 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003816 bool definitely_mismatches = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003817 InvokePrologue(expected,
3818 actual,
3819 Handle<Code>::null(),
3820 code,
3821 &done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003822 &definitely_mismatches,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003823 flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003824 Label::kNear,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003825 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003826 if (!definitely_mismatches) {
3827 if (flag == CALL_FUNCTION) {
3828 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003829 call(code);
3830 call_wrapper.AfterCall();
3831 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003832 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003833 jmp(code);
3834 }
3835 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003836 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003837}
3838
3839
Steve Blocka7e24c12009-10-30 11:49:00 +00003840void MacroAssembler::InvokeFunction(Register function,
3841 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003842 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003843 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003844 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003845 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003846
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003847 DCHECK(function.is(rdi));
3848 movp(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3849 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
3850 LoadSharedFunctionInfoSpecialField(rbx, rdx,
3851 SharedFunctionInfo::kFormalParameterCountOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +00003852 // Advances rdx to the end of the Code object header, to the start of
3853 // the executable code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003854 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003855
3856 ParameterCount expected(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003857 InvokeCode(rdx, expected, actual, flag, call_wrapper);
3858}
3859
3860
3861void MacroAssembler::InvokeFunction(Register function,
3862 const ParameterCount& expected,
3863 const ParameterCount& actual,
3864 InvokeFlag flag,
3865 const CallWrapper& call_wrapper) {
3866 // You can't call a function without a valid frame.
3867 DCHECK(flag == JUMP_FUNCTION || has_frame());
3868
3869 DCHECK(function.is(rdi));
3870 movp(rsi, FieldOperand(function, JSFunction::kContextOffset));
3871 // Advances rdx to the end of the Code object header, to the start of
3872 // the executable code.
3873 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3874
3875 InvokeCode(rdx, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00003876}
3877
3878
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003879void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003880 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00003881 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003882 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003883 const CallWrapper& call_wrapper) {
3884 Move(rdi, function);
3885 InvokeFunction(rdi, expected, actual, flag, call_wrapper);
Ben Murdoch257744e2011-11-30 15:57:28 +00003886}
3887
3888
3889void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3890 const ParameterCount& actual,
3891 Handle<Code> code_constant,
3892 Register code_register,
3893 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003894 bool* definitely_mismatches,
Ben Murdoch257744e2011-11-30 15:57:28 +00003895 InvokeFlag flag,
3896 Label::Distance near_jump,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003897 const CallWrapper& call_wrapper) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003898 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003899 *definitely_mismatches = false;
Ben Murdoch257744e2011-11-30 15:57:28 +00003900 Label invoke;
3901 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003902 DCHECK(actual.is_immediate());
Ben Murdoch257744e2011-11-30 15:57:28 +00003903 if (expected.immediate() == actual.immediate()) {
3904 definitely_matches = true;
3905 } else {
3906 Set(rax, actual.immediate());
3907 if (expected.immediate() ==
3908 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3909 // Don't worry about adapting arguments for built-ins that
3910 // don't want that done. Skip adaption code by making it look
3911 // like we have a match between expected and actual number of
3912 // arguments.
3913 definitely_matches = true;
3914 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003915 *definitely_mismatches = true;
Ben Murdoch257744e2011-11-30 15:57:28 +00003916 Set(rbx, expected.immediate());
3917 }
3918 }
3919 } else {
3920 if (actual.is_immediate()) {
3921 // Expected is in register, actual is immediate. This is the
3922 // case when we invoke function values without going through the
3923 // IC mechanism.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003924 cmpp(expected.reg(), Immediate(actual.immediate()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003925 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003926 DCHECK(expected.reg().is(rbx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003927 Set(rax, actual.immediate());
3928 } else if (!expected.reg().is(actual.reg())) {
3929 // Both expected and actual are in (different) registers. This
3930 // is the case when we invoke functions using call and apply.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003931 cmpp(expected.reg(), actual.reg());
Ben Murdoch257744e2011-11-30 15:57:28 +00003932 j(equal, &invoke, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003933 DCHECK(actual.reg().is(rax));
3934 DCHECK(expected.reg().is(rbx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003935 }
3936 }
3937
3938 if (!definitely_matches) {
3939 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3940 if (!code_constant.is_null()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003941 Move(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3942 addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00003943 } else if (!code_register.is(rdx)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003944 movp(rdx, code_register);
Ben Murdoch257744e2011-11-30 15:57:28 +00003945 }
3946
3947 if (flag == CALL_FUNCTION) {
3948 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch257744e2011-11-30 15:57:28 +00003949 Call(adaptor, RelocInfo::CODE_TARGET);
3950 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003951 if (!*definitely_mismatches) {
3952 jmp(done, near_jump);
3953 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003954 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003955 Jump(adaptor, RelocInfo::CODE_TARGET);
3956 }
3957 bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01003958 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003959}
3960
3961
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003962void MacroAssembler::StubPrologue() {
3963 pushq(rbp); // Caller's frame pointer.
3964 movp(rbp, rsp);
3965 Push(rsi); // Callee's context.
3966 Push(Smi::FromInt(StackFrame::STUB));
3967}
3968
3969
3970void MacroAssembler::Prologue(bool code_pre_aging) {
3971 PredictableCodeSizeScope predictible_code_size_scope(this,
3972 kNoCodeAgeSequenceLength);
3973 if (code_pre_aging) {
3974 // Pre-age the code.
3975 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
3976 RelocInfo::CODE_AGE_SEQUENCE);
3977 Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
3978 } else {
3979 pushq(rbp); // Caller's frame pointer.
3980 movp(rbp, rsp);
3981 Push(rsi); // Callee's context.
3982 Push(rdi); // Callee's JS function.
3983 }
3984}
3985
3986
Steve Blocka7e24c12009-10-30 11:49:00 +00003987void MacroAssembler::EnterFrame(StackFrame::Type type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003988 pushq(rbp);
3989 movp(rbp, rsp);
3990 Push(rsi); // Context.
Steve Block3ce2e202009-11-05 08:53:23 +00003991 Push(Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003992 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3993 Push(kScratchRegister);
Steve Block44f0eee2011-05-26 01:26:41 +01003994 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003995 Move(kScratchRegister,
Ben Murdoch257744e2011-11-30 15:57:28 +00003996 isolate()->factory()->undefined_value(),
Steve Blocka7e24c12009-10-30 11:49:00 +00003997 RelocInfo::EMBEDDED_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003998 cmpp(Operand(rsp, 0), kScratchRegister);
3999 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +00004000 }
4001}
4002
4003
4004void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01004005 if (emit_debug_code()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004006 Move(kScratchRegister, Smi::FromInt(type));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004007 cmpp(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
4008 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +00004009 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004010 movp(rsp, rbp);
4011 popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004012}
4013
4014
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004015void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004016 // Set up the frame structure on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00004017 // All constants are relative to the frame pointer of the exit frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004018 DCHECK(ExitFrameConstants::kCallerSPDisplacement ==
4019 kFPOnStackSize + kPCOnStackSize);
4020 DCHECK(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize);
4021 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
4022 pushq(rbp);
4023 movp(rbp, rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004024
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004025 // Reserve room for entry stack pointer and push the code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004026 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
4027 Push(Immediate(0)); // Saved entry sp, patched before call.
4028 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
4029 Push(kScratchRegister); // Accessed from EditFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00004030
4031 // Save the frame pointer and the context in top.
Ben Murdochbb769b22010-08-11 14:56:33 +01004032 if (save_rax) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004033 movp(r14, rax); // Backup rax in callee-save register.
Ben Murdochbb769b22010-08-11 14:56:33 +01004034 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004035
Ben Murdoch589d6972011-11-30 16:04:58 +00004036 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
4037 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
Ben Murdochbb769b22010-08-11 14:56:33 +01004038}
Steve Blocka7e24c12009-10-30 11:49:00 +00004039
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004040
Steve Block1e0659c2011-05-24 12:43:12 +01004041void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
4042 bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004043#ifdef _WIN64
Steve Block1e0659c2011-05-24 12:43:12 +01004044 const int kShadowSpace = 4;
4045 arg_stack_space += kShadowSpace;
Steve Blocka7e24c12009-10-30 11:49:00 +00004046#endif
Steve Block1e0659c2011-05-24 12:43:12 +01004047 // Optionally save all XMM registers.
4048 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004049 int space = XMMRegister::kMaxNumAllocatableRegisters * kDoubleSize +
4050 arg_stack_space * kRegisterSize;
4051 subp(rsp, Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +01004052 int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004053 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
Steve Block1e0659c2011-05-24 12:43:12 +01004054 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
4055 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
4056 }
4057 } else if (arg_stack_space > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004058 subp(rsp, Immediate(arg_stack_space * kRegisterSize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004059 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004060
4061 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004062 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00004063 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004064 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
4065 DCHECK(is_int8(kFrameAlignment));
4066 andp(rsp, Immediate(-kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00004067 }
4068
4069 // Patch the saved entry sp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004070 movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004071}
4072
4073
Steve Block1e0659c2011-05-24 12:43:12 +01004074void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004075 EnterExitFramePrologue(true);
Ben Murdochbb769b22010-08-11 14:56:33 +01004076
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004077 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
Ben Murdochbb769b22010-08-11 14:56:33 +01004078 // so it must be retained across the C-call.
4079 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004080 leap(r15, Operand(rbp, r14, times_pointer_size, offset));
Ben Murdochbb769b22010-08-11 14:56:33 +01004081
Steve Block1e0659c2011-05-24 12:43:12 +01004082 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
Ben Murdochbb769b22010-08-11 14:56:33 +01004083}
4084
4085
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004086void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004087 EnterExitFramePrologue(false);
Steve Block1e0659c2011-05-24 12:43:12 +01004088 EnterExitFrameEpilogue(arg_stack_space, false);
Ben Murdochbb769b22010-08-11 14:56:33 +01004089}
4090
4091
Steve Block1e0659c2011-05-24 12:43:12 +01004092void MacroAssembler::LeaveExitFrame(bool save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004093 // Registers:
Steve Block44f0eee2011-05-26 01:26:41 +01004094 // r15 : argv
Steve Block1e0659c2011-05-24 12:43:12 +01004095 if (save_doubles) {
4096 int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004097 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
Steve Block1e0659c2011-05-24 12:43:12 +01004098 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
4099 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
4100 }
4101 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004102 // Get the return address from the stack and restore the frame pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004103 movp(rcx, Operand(rbp, kFPOnStackSize));
4104 movp(rbp, Operand(rbp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004105
Steve Block1e0659c2011-05-24 12:43:12 +01004106 // Drop everything up to and including the arguments and the receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00004107 // from the caller stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004108 leap(rsp, Operand(r15, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004109
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004110 PushReturnAddressFrom(rcx);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004111
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004112 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004113}
4114
4115
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004116void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
4117 movp(rsp, rbp);
4118 popq(rbp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004119
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004120 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004121}
4122
4123
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004124void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004125 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00004126 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01004127 Operand context_operand = ExternalOperand(context_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004128 if (restore_context) {
4129 movp(rsi, context_operand);
4130 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004131#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004132 movp(context_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004133#endif
4134
Steve Blocka7e24c12009-10-30 11:49:00 +00004135 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00004136 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004137 isolate());
4138 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004139 movp(c_entry_fp_operand, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004140}
4141
4142
Steve Blocka7e24c12009-10-30 11:49:00 +00004143void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
4144 Register scratch,
4145 Label* miss) {
4146 Label same_contexts;
4147
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004148 DCHECK(!holder_reg.is(scratch));
4149 DCHECK(!scratch.is(kScratchRegister));
Steve Blocka7e24c12009-10-30 11:49:00 +00004150 // Load current lexical context from the stack frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004151 movp(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004152
4153 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01004154 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004155 cmpp(scratch, Immediate(0));
4156 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004157 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004158 // Load the native context of the current context.
4159 int offset =
4160 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
4161 movp(scratch, FieldOperand(scratch, offset));
4162 movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004163
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004164 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004165 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004166 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004167 isolate()->factory()->native_context_map());
4168 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00004169 }
4170
4171 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004172 cmpp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004173 j(equal, &same_contexts);
4174
4175 // Compare security tokens.
4176 // Check that the security token in the calling global object is
4177 // compatible with the security token in the receiving global
4178 // object.
4179
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004180 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004181 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004182 // Preserve original value of holder_reg.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004183 Push(holder_reg);
4184 movp(holder_reg,
4185 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004186 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004187 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00004188
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004189 // Read the first word and compare to native_context_map(),
4190 movp(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
4191 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
4192 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
4193 Pop(holder_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00004194 }
4195
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004196 movp(kScratchRegister,
4197 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00004198 int token_offset =
4199 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004200 movp(scratch, FieldOperand(scratch, token_offset));
4201 cmpp(scratch, FieldOperand(kScratchRegister, token_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004202 j(not_equal, miss);
4203
4204 bind(&same_contexts);
4205}
4206
4207
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004208// Compute the hash code from the untagged key. This must be kept in sync with
4209// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
4210// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00004211void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
4212 // First of all we assign the hash seed to scratch.
4213 LoadRoot(scratch, Heap::kHashSeedRootIndex);
4214 SmiToInteger32(scratch, scratch);
4215
4216 // Xor original key with a seed.
4217 xorl(r0, scratch);
4218
4219 // Compute the hash code from the untagged key. This must be kept in sync
4220 // with ComputeIntegerHash in utils.h.
4221 //
4222 // hash = ~hash + (hash << 15);
4223 movl(scratch, r0);
4224 notl(r0);
4225 shll(scratch, Immediate(15));
4226 addl(r0, scratch);
4227 // hash = hash ^ (hash >> 12);
4228 movl(scratch, r0);
4229 shrl(scratch, Immediate(12));
4230 xorl(r0, scratch);
4231 // hash = hash + (hash << 2);
4232 leal(r0, Operand(r0, r0, times_4, 0));
4233 // hash = hash ^ (hash >> 4);
4234 movl(scratch, r0);
4235 shrl(scratch, Immediate(4));
4236 xorl(r0, scratch);
4237 // hash = hash * 2057;
4238 imull(r0, r0, Immediate(2057));
4239 // hash = hash ^ (hash >> 16);
4240 movl(scratch, r0);
4241 shrl(scratch, Immediate(16));
4242 xorl(r0, scratch);
4243}
4244
4245
4246
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004247void MacroAssembler::LoadFromNumberDictionary(Label* miss,
4248 Register elements,
4249 Register key,
4250 Register r0,
4251 Register r1,
4252 Register r2,
4253 Register result) {
4254 // Register use:
4255 //
4256 // elements - holds the slow-case elements of the receiver on entry.
4257 // Unchanged unless 'result' is the same register.
4258 //
4259 // key - holds the smi key on entry.
4260 // Unchanged unless 'result' is the same register.
4261 //
4262 // Scratch registers:
4263 //
4264 // r0 - holds the untagged key on entry and holds the hash once computed.
4265 //
4266 // r1 - used to hold the capacity mask of the dictionary
4267 //
4268 // r2 - used for the index into the dictionary.
4269 //
4270 // result - holds the result on exit if the load succeeded.
4271 // Allowed to be the same as 'key' or 'result'.
4272 // Unchanged on bailout so 'key' or 'result' can be used
4273 // in further computation.
4274
4275 Label done;
4276
Ben Murdochc7cc0282012-03-05 14:35:55 +00004277 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004278
4279 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00004280 SmiToInteger32(r1, FieldOperand(elements,
4281 SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004282 decl(r1);
4283
4284 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004285 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004286 // Use r2 for index calculations and keep the hash intact in r0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004287 movp(r2, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004288 // Compute the masked index: (hash + i + i * i) & mask.
4289 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00004290 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004291 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004292 andp(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004293
4294 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004295 DCHECK(SeededNumberDictionary::kEntrySize == 3);
4296 leap(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004297
4298 // Check if the key matches.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004299 cmpp(key, FieldOperand(elements,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004300 r2,
4301 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00004302 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004303 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004304 j(equal, &done);
4305 } else {
4306 j(not_equal, miss);
4307 }
4308 }
4309
4310 bind(&done);
4311 // Check that the value is a normal propety.
4312 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004313 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004314 DCHECK_EQ(NORMAL, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004315 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00004316 Smi::FromInt(PropertyDetails::TypeField::kMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004317 j(not_zero, miss);
4318
4319 // Get the value at the masked, scaled index.
4320 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00004321 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004322 movp(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004323}
4324
4325
Steve Blocka7e24c12009-10-30 11:49:00 +00004326void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00004327 Register scratch,
4328 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004329 ExternalReference allocation_top =
4330 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004331
4332 // Just return if allocation top is already known.
4333 if ((flags & RESULT_CONTAINS_TOP) != 0) {
4334 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004335 DCHECK(!scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00004336#ifdef DEBUG
4337 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004338 Operand top_operand = ExternalOperand(allocation_top);
4339 cmpp(result, top_operand);
4340 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004341#endif
4342 return;
4343 }
4344
Steve Block6ded16b2010-05-10 14:33:55 +01004345 // Move address of new object to result. Use scratch register if available,
4346 // and keep address in scratch until call to UpdateAllocationTopHelper.
4347 if (scratch.is_valid()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004348 LoadAddress(scratch, allocation_top);
4349 movp(result, Operand(scratch, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01004350 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004351 Load(result, allocation_top);
4352 }
4353}
4354
4355
4356void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
4357 Register scratch,
4358 Label* gc_required,
4359 AllocationFlags flags) {
4360 if (kPointerSize == kDoubleSize) {
4361 if (FLAG_debug_code) {
4362 testl(result, Immediate(kDoubleAlignmentMask));
4363 Check(zero, kAllocationIsNotDoubleAligned);
4364 }
4365 } else {
4366 // Align the next allocation. Storing the filler map without checking top
4367 // is safe in new-space because the limit of the heap is aligned there.
4368 DCHECK(kPointerSize * 2 == kDoubleSize);
4369 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
4370 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
4371 // Make sure scratch is not clobbered by this function as it might be
4372 // used in UpdateAllocationTopHelper later.
4373 DCHECK(!scratch.is(kScratchRegister));
4374 Label aligned;
4375 testl(result, Immediate(kDoubleAlignmentMask));
4376 j(zero, &aligned, Label::kNear);
4377 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
4378 ExternalReference allocation_limit =
4379 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4380 cmpp(result, ExternalOperand(allocation_limit));
4381 j(above_equal, gc_required);
4382 }
4383 LoadRoot(kScratchRegister, Heap::kOnePointerFillerMapRootIndex);
4384 movp(Operand(result, 0), kScratchRegister);
4385 addp(result, Immediate(kDoubleSize / 2));
4386 bind(&aligned);
Steve Blocka7e24c12009-10-30 11:49:00 +00004387 }
4388}
4389
4390
4391void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004392 Register scratch,
4393 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01004394 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004395 testp(result_end, Immediate(kObjectAlignmentMask));
4396 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00004397 }
4398
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004399 ExternalReference allocation_top =
4400 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004401
4402 // Update new top.
Steve Block44f0eee2011-05-26 01:26:41 +01004403 if (scratch.is_valid()) {
4404 // Scratch already contains address of allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004405 movp(Operand(scratch, 0), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004406 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004407 Store(allocation_top, result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00004408 }
4409}
4410
4411
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004412void MacroAssembler::Allocate(int object_size,
4413 Register result,
4414 Register result_end,
4415 Register scratch,
4416 Label* gc_required,
4417 AllocationFlags flags) {
4418 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
4419 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07004420 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004421 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004422 // Trash the registers to simulate an allocation failure.
4423 movl(result, Immediate(0x7091));
4424 if (result_end.is_valid()) {
4425 movl(result_end, Immediate(0x7191));
4426 }
4427 if (scratch.is_valid()) {
4428 movl(scratch, Immediate(0x7291));
4429 }
4430 }
4431 jmp(gc_required);
4432 return;
4433 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004434 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00004435
4436 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004437 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004438
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004439 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4440 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
4441 }
4442
Steve Blocka7e24c12009-10-30 11:49:00 +00004443 // Calculate new top and bail out if new space is exhausted.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004444 ExternalReference allocation_limit =
4445 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block6ded16b2010-05-10 14:33:55 +01004446
4447 Register top_reg = result_end.is_valid() ? result_end : result;
4448
Steve Block1e0659c2011-05-24 12:43:12 +01004449 if (!top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004450 movp(top_reg, result);
Steve Block6ded16b2010-05-10 14:33:55 +01004451 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004452 addp(top_reg, Immediate(object_size));
Steve Block1e0659c2011-05-24 12:43:12 +01004453 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004454 Operand limit_operand = ExternalOperand(allocation_limit);
4455 cmpp(top_reg, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004456 j(above, gc_required);
4457
4458 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004459 UpdateAllocationTopHelper(top_reg, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004460
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004461 bool tag_result = (flags & TAG_OBJECT) != 0;
Steve Block6ded16b2010-05-10 14:33:55 +01004462 if (top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004463 if (tag_result) {
4464 subp(result, Immediate(object_size - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01004465 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004466 subp(result, Immediate(object_size));
Steve Block6ded16b2010-05-10 14:33:55 +01004467 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004468 } else if (tag_result) {
Steve Block6ded16b2010-05-10 14:33:55 +01004469 // Tag the result if requested.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004470 DCHECK(kHeapObjectTag == 1);
4471 incp(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004472 }
4473}
4474
4475
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004476void MacroAssembler::Allocate(int header_size,
4477 ScaleFactor element_size,
4478 Register element_count,
4479 Register result,
4480 Register result_end,
4481 Register scratch,
4482 Label* gc_required,
4483 AllocationFlags flags) {
4484 DCHECK((flags & SIZE_IN_WORDS) == 0);
4485 leap(result_end, Operand(element_count, element_size, header_size));
4486 Allocate(result_end, result, result_end, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004487}
4488
4489
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004490void MacroAssembler::Allocate(Register object_size,
4491 Register result,
4492 Register result_end,
4493 Register scratch,
4494 Label* gc_required,
4495 AllocationFlags flags) {
4496 DCHECK((flags & SIZE_IN_WORDS) == 0);
John Reck59135872010-11-02 12:39:01 -07004497 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01004498 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07004499 // Trash the registers to simulate an allocation failure.
4500 movl(result, Immediate(0x7091));
4501 movl(result_end, Immediate(0x7191));
4502 if (scratch.is_valid()) {
4503 movl(scratch, Immediate(0x7291));
4504 }
4505 // object_size is left unchanged by this function.
4506 }
4507 jmp(gc_required);
4508 return;
4509 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004510 DCHECK(!result.is(result_end));
John Reck59135872010-11-02 12:39:01 -07004511
Steve Blocka7e24c12009-10-30 11:49:00 +00004512 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004513 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004514
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004515 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4516 MakeSureDoubleAlignedHelper(result, scratch, gc_required, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004517 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004518
4519 // Calculate new top and bail out if new space is exhausted.
4520 ExternalReference allocation_limit =
4521 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4522 if (!object_size.is(result_end)) {
4523 movp(result_end, object_size);
4524 }
4525 addp(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01004526 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004527 Operand limit_operand = ExternalOperand(allocation_limit);
4528 cmpp(result_end, limit_operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00004529 j(above, gc_required);
4530
4531 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004532 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00004533
4534 // Tag the result if requested.
4535 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004536 addp(result, Immediate(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00004537 }
4538}
4539
4540
4541void MacroAssembler::UndoAllocationInNewSpace(Register object) {
4542 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01004543 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00004544
4545 // Make sure the object has no tag before resetting top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004546 andp(object, Immediate(~kHeapObjectTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01004547 Operand top_operand = ExternalOperand(new_space_allocation_top);
Steve Blocka7e24c12009-10-30 11:49:00 +00004548#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004549 cmpp(object, top_operand);
4550 Check(below, kUndoAllocationOfNonAllocatedMemory);
Steve Blocka7e24c12009-10-30 11:49:00 +00004551#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004552 movp(top_operand, object);
Steve Blocka7e24c12009-10-30 11:49:00 +00004553}
4554
4555
Steve Block3ce2e202009-11-05 08:53:23 +00004556void MacroAssembler::AllocateHeapNumber(Register result,
4557 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004558 Label* gc_required,
4559 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00004560 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004561 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
4562
4563 Heap::RootListIndex map_index = mode == MUTABLE
4564 ? Heap::kMutableHeapNumberMapRootIndex
4565 : Heap::kHeapNumberMapRootIndex;
Steve Block3ce2e202009-11-05 08:53:23 +00004566
4567 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004568 LoadRoot(kScratchRegister, map_index);
4569 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +00004570}
4571
4572
Leon Clarkee46be812010-01-19 14:06:41 +00004573void MacroAssembler::AllocateTwoByteString(Register result,
4574 Register length,
4575 Register scratch1,
4576 Register scratch2,
4577 Register scratch3,
4578 Label* gc_required) {
4579 // Calculate the number of bytes needed for the characters in the string while
4580 // observing object alignment.
Steve Block6ded16b2010-05-10 14:33:55 +01004581 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
4582 kObjectAlignmentMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004583 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004584 // scratch1 = length * 2 + kObjectAlignmentMask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004585 leap(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
Steve Block6ded16b2010-05-10 14:33:55 +01004586 kHeaderAlignment));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004587 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01004588 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004589 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01004590 }
Leon Clarkee46be812010-01-19 14:06:41 +00004591
4592 // Allocate two byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004593 Allocate(SeqTwoByteString::kHeaderSize,
4594 times_1,
4595 scratch1,
4596 result,
4597 scratch2,
4598 scratch3,
4599 gc_required,
4600 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004601
4602 // Set the map, length and hash field.
4603 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004604 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01004605 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004606 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4607 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00004608 Immediate(String::kEmptyHashField));
4609}
4610
4611
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004612void MacroAssembler::AllocateOneByteString(Register result, Register length,
4613 Register scratch1, Register scratch2,
4614 Register scratch3,
4615 Label* gc_required) {
Leon Clarkee46be812010-01-19 14:06:41 +00004616 // Calculate the number of bytes needed for the characters in the string while
4617 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004618 const int kHeaderAlignment = SeqOneByteString::kHeaderSize &
Steve Block6ded16b2010-05-10 14:33:55 +01004619 kObjectAlignmentMask;
Leon Clarkee46be812010-01-19 14:06:41 +00004620 movl(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004621 DCHECK(kCharSize == 1);
4622 addp(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
4623 andp(scratch1, Immediate(~kObjectAlignmentMask));
Steve Block6ded16b2010-05-10 14:33:55 +01004624 if (kHeaderAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004625 subp(scratch1, Immediate(kHeaderAlignment));
Steve Block6ded16b2010-05-10 14:33:55 +01004626 }
Leon Clarkee46be812010-01-19 14:06:41 +00004627
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004628 // Allocate one-byte string in new space.
4629 Allocate(SeqOneByteString::kHeaderSize,
4630 times_1,
4631 scratch1,
4632 result,
4633 scratch2,
4634 scratch3,
4635 gc_required,
4636 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004637
4638 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004639 LoadRoot(kScratchRegister, Heap::kOneByteStringMapRootIndex);
4640 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Steve Block6ded16b2010-05-10 14:33:55 +01004641 Integer32ToSmi(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004642 movp(FieldOperand(result, String::kLengthOffset), scratch1);
4643 movp(FieldOperand(result, String::kHashFieldOffset),
Leon Clarkee46be812010-01-19 14:06:41 +00004644 Immediate(String::kEmptyHashField));
4645}
4646
4647
Ben Murdoch589d6972011-11-30 16:04:58 +00004648void MacroAssembler::AllocateTwoByteConsString(Register result,
Leon Clarkee46be812010-01-19 14:06:41 +00004649 Register scratch1,
4650 Register scratch2,
4651 Label* gc_required) {
4652 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004653 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
4654 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004655
4656 // Set the map. The other fields are left uninitialized.
4657 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004658 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00004659}
4660
4661
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004662void MacroAssembler::AllocateOneByteConsString(Register result,
4663 Register scratch1,
4664 Register scratch2,
4665 Label* gc_required) {
4666 Allocate(ConsString::kSize,
4667 result,
4668 scratch1,
4669 scratch2,
4670 gc_required,
4671 TAG_OBJECT);
Leon Clarkee46be812010-01-19 14:06:41 +00004672
4673 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004674 LoadRoot(kScratchRegister, Heap::kConsOneByteStringMapRootIndex);
4675 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +00004676}
4677
4678
Ben Murdoch589d6972011-11-30 16:04:58 +00004679void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4680 Register scratch1,
4681 Register scratch2,
4682 Label* gc_required) {
4683 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004684 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4685 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00004686
4687 // Set the map. The other fields are left uninitialized.
4688 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004689 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00004690}
4691
4692
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004693void MacroAssembler::AllocateOneByteSlicedString(Register result,
4694 Register scratch1,
4695 Register scratch2,
4696 Label* gc_required) {
Ben Murdoch589d6972011-11-30 16:04:58 +00004697 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004698 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4699 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00004700
4701 // Set the map. The other fields are left uninitialized.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004702 LoadRoot(kScratchRegister, Heap::kSlicedOneByteStringMapRootIndex);
4703 movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
Ben Murdoch589d6972011-11-30 16:04:58 +00004704}
4705
4706
Steve Block44f0eee2011-05-26 01:26:41 +01004707// Copy memory, byte-by-byte, from source to destination. Not optimized for
4708// long or aligned copies. The contents of scratch and length are destroyed.
4709// Destination is incremented by length, source, length and scratch are
4710// clobbered.
4711// A simpler loop is faster on small copies, but slower on large ones.
4712// The cld() instruction must have been emitted, to set the direction flag(),
4713// before calling this function.
4714void MacroAssembler::CopyBytes(Register destination,
4715 Register source,
4716 Register length,
4717 int min_length,
4718 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004719 DCHECK(min_length >= 0);
4720 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004721 cmpl(length, Immediate(min_length));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004722 Assert(greater_equal, kInvalidMinLength);
Steve Block44f0eee2011-05-26 01:26:41 +01004723 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004724 Label short_loop, len8, len16, len24, done, short_string;
Steve Block44f0eee2011-05-26 01:26:41 +01004725
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004726 const int kLongStringLimit = 4 * kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +01004727 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004728 cmpl(length, Immediate(kPointerSize));
4729 j(below, &short_string, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004730 }
4731
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004732 DCHECK(source.is(rsi));
4733 DCHECK(destination.is(rdi));
4734 DCHECK(length.is(rcx));
4735
4736 if (min_length <= kLongStringLimit) {
4737 cmpl(length, Immediate(2 * kPointerSize));
4738 j(below_equal, &len8, Label::kNear);
4739 cmpl(length, Immediate(3 * kPointerSize));
4740 j(below_equal, &len16, Label::kNear);
4741 cmpl(length, Immediate(4 * kPointerSize));
4742 j(below_equal, &len24, Label::kNear);
4743 }
Steve Block44f0eee2011-05-26 01:26:41 +01004744
4745 // Because source is 8-byte aligned in our uses of this function,
4746 // we keep source aligned for the rep movs operation by copying the odd bytes
4747 // at the end of the ranges.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004748 movp(scratch, length);
4749 shrl(length, Immediate(kPointerSizeLog2));
4750 repmovsp();
Steve Block44f0eee2011-05-26 01:26:41 +01004751 // Move remaining bytes of length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004752 andl(scratch, Immediate(kPointerSize - 1));
4753 movp(length, Operand(source, scratch, times_1, -kPointerSize));
4754 movp(Operand(destination, scratch, times_1, -kPointerSize), length);
4755 addp(destination, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01004756
4757 if (min_length <= kLongStringLimit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004758 jmp(&done, Label::kNear);
4759 bind(&len24);
4760 movp(scratch, Operand(source, 2 * kPointerSize));
4761 movp(Operand(destination, 2 * kPointerSize), scratch);
4762 bind(&len16);
4763 movp(scratch, Operand(source, kPointerSize));
4764 movp(Operand(destination, kPointerSize), scratch);
4765 bind(&len8);
4766 movp(scratch, Operand(source, 0));
4767 movp(Operand(destination, 0), scratch);
4768 // Move remaining bytes of length.
4769 movp(scratch, Operand(source, length, times_1, -kPointerSize));
4770 movp(Operand(destination, length, times_1, -kPointerSize), scratch);
4771 addp(destination, length);
4772 jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004773
4774 bind(&short_string);
4775 if (min_length == 0) {
4776 testl(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004777 j(zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004778 }
Steve Block44f0eee2011-05-26 01:26:41 +01004779
4780 bind(&short_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004781 movb(scratch, Operand(source, 0));
4782 movb(Operand(destination, 0), scratch);
4783 incp(source);
4784 incp(destination);
4785 decl(length);
4786 j(not_zero, &short_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01004787 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004788
4789 bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01004790}
4791
4792
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004793void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4794 Register end_offset,
4795 Register filler) {
4796 Label loop, entry;
4797 jmp(&entry);
4798 bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004799 movp(Operand(start_offset, 0), filler);
4800 addp(start_offset, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004801 bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004802 cmpp(start_offset, end_offset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004803 j(less, &loop);
4804}
4805
4806
Steve Blockd0582a62009-12-15 09:54:21 +00004807void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4808 if (context_chain_length > 0) {
4809 // Move up the chain of contexts to the context containing the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004810 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004811 for (int i = 1; i < context_chain_length; i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004812 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00004813 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004814 } else {
4815 // Slot is in the current function context. Move it into the
4816 // destination register in case we store into it (the write barrier
4817 // cannot be allowed to destroy the context in rsi).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004818 movp(dst, rsi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004819 }
4820
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004821 // We should not have found a with context by walking the context
4822 // chain (i.e., the static scope chain and runtime context chain do
4823 // not agree). A variable occurring in such a scope should have
4824 // slot type LOOKUP and not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01004825 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004826 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4827 Heap::kWithContextMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004828 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00004829 }
4830}
4831
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004832
4833void MacroAssembler::LoadTransitionedArrayMapConditional(
4834 ElementsKind expected_kind,
4835 ElementsKind transitioned_kind,
4836 Register map_in_out,
4837 Register scratch,
4838 Label* no_map_match) {
4839 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004840 movp(scratch,
4841 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4842 movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004843
4844 // Check that the function's map is the same as the expected cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004845 movp(scratch, Operand(scratch,
4846 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4847
4848 int offset = expected_kind * kPointerSize +
4849 FixedArrayBase::kHeaderSize;
4850 cmpp(map_in_out, FieldOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004851 j(not_equal, no_map_match);
4852
4853 // Use the transitioned cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004854 offset = transitioned_kind * kPointerSize +
4855 FixedArrayBase::kHeaderSize;
4856 movp(map_in_out, FieldOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004857}
4858
4859
Steve Block44f0eee2011-05-26 01:26:41 +01004860#ifdef _WIN64
4861static const int kRegisterPassedArguments = 4;
4862#else
4863static const int kRegisterPassedArguments = 6;
4864#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004865
Ben Murdochb0fe1622011-05-05 13:52:32 +01004866void MacroAssembler::LoadGlobalFunction(int index, Register function) {
4867 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004868 movp(function,
4869 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4870 // Load the native context from the global or builtins object.
4871 movp(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
4872 // Load the function from the native context.
4873 movp(function, Operand(function, Context::SlotOffset(index)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004874}
4875
4876
4877void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4878 Register map) {
4879 // Load the initial map. The global functions all have initial maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004880 movp(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004881 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004882 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00004883 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004884 jmp(&ok);
4885 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004886 Abort(kGlobalFunctionsMustHaveInitialMap);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004887 bind(&ok);
4888 }
4889}
4890
4891
Leon Clarke4515c472010-02-03 11:58:03 +00004892int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004893 // On Windows 64 stack slots are reserved by the caller for all arguments
4894 // including the ones passed in registers, and space is always allocated for
4895 // the four register arguments even if the function takes fewer than four
4896 // arguments.
4897 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
4898 // and the caller does not reserve stack slots for them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004899 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00004900#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01004901 const int kMinimumStackSlots = kRegisterPassedArguments;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004902 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
4903 return num_arguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004904#else
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004905 if (num_arguments < kRegisterPassedArguments) return 0;
4906 return num_arguments - kRegisterPassedArguments;
Leon Clarke4515c472010-02-03 11:58:03 +00004907#endif
Leon Clarke4515c472010-02-03 11:58:03 +00004908}
4909
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004910
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004911void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
4912 Register index,
4913 Register value,
4914 uint32_t encoding_mask) {
4915 Label is_object;
4916 JumpIfNotSmi(string, &is_object);
4917 Abort(kNonObject);
4918 bind(&is_object);
4919
4920 Push(value);
4921 movp(value, FieldOperand(string, HeapObject::kMapOffset));
4922 movzxbp(value, FieldOperand(value, Map::kInstanceTypeOffset));
4923
4924 andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
4925 cmpp(value, Immediate(encoding_mask));
4926 Pop(value);
4927 Check(equal, kUnexpectedStringType);
4928
4929 // The index is assumed to be untagged coming in, tag it to compare with the
4930 // string length without using a temp register, it is restored at the end of
4931 // this function.
4932 Integer32ToSmi(index, index);
4933 SmiCompare(index, FieldOperand(string, String::kLengthOffset));
4934 Check(less, kIndexIsTooLarge);
4935
4936 SmiCompare(index, Smi::FromInt(0));
4937 Check(greater_equal, kIndexIsNegative);
4938
4939 // Restore the index
4940 SmiToInteger32(index, index);
4941}
4942
4943
Leon Clarke4515c472010-02-03 11:58:03 +00004944void MacroAssembler::PrepareCallCFunction(int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004945 int frame_alignment = base::OS::ActivationFrameAlignment();
4946 DCHECK(frame_alignment != 0);
4947 DCHECK(num_arguments >= 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004948
Leon Clarke4515c472010-02-03 11:58:03 +00004949 // Make stack end at alignment and allocate space for arguments and old rsp.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004950 movp(kScratchRegister, rsp);
4951 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Leon Clarke4515c472010-02-03 11:58:03 +00004952 int argument_slots_on_stack =
4953 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004954 subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
4955 andp(rsp, Immediate(-frame_alignment));
4956 movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
Leon Clarke4515c472010-02-03 11:58:03 +00004957}
4958
4959
4960void MacroAssembler::CallCFunction(ExternalReference function,
4961 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01004962 LoadAddress(rax, function);
Leon Clarke4515c472010-02-03 11:58:03 +00004963 CallCFunction(rax, num_arguments);
4964}
4965
4966
4967void MacroAssembler::CallCFunction(Register function, int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004968 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01004969 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01004970 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01004971 CheckStackAlignment();
4972 }
4973
Leon Clarke4515c472010-02-03 11:58:03 +00004974 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004975 DCHECK(base::OS::ActivationFrameAlignment() != 0);
4976 DCHECK(num_arguments >= 0);
Leon Clarke4515c472010-02-03 11:58:03 +00004977 int argument_slots_on_stack =
4978 ArgumentStackSlotsForCFunctionCall(num_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004979 movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
Leon Clarke4515c472010-02-03 11:58:03 +00004980}
4981
Steve Blockd0582a62009-12-15 09:54:21 +00004982
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004983#ifdef DEBUG
4984bool AreAliased(Register reg1,
4985 Register reg2,
4986 Register reg3,
4987 Register reg4,
4988 Register reg5,
4989 Register reg6,
4990 Register reg7,
4991 Register reg8) {
4992 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
4993 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
4994 reg7.is_valid() + reg8.is_valid();
4995
4996 RegList regs = 0;
4997 if (reg1.is_valid()) regs |= reg1.bit();
4998 if (reg2.is_valid()) regs |= reg2.bit();
4999 if (reg3.is_valid()) regs |= reg3.bit();
5000 if (reg4.is_valid()) regs |= reg4.bit();
5001 if (reg5.is_valid()) regs |= reg5.bit();
5002 if (reg6.is_valid()) regs |= reg6.bit();
5003 if (reg7.is_valid()) regs |= reg7.bit();
5004 if (reg8.is_valid()) regs |= reg8.bit();
5005 int n_of_non_aliasing_regs = NumRegs(regs);
5006
5007 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005008}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005009#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005010
5011
Steve Blocka7e24c12009-10-30 11:49:00 +00005012CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01005013 : address_(address),
5014 size_(size),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005015 masm_(NULL, address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005016 // Create a new macro assembler pointing to the address of the code to patch.
5017 // The size is adjusted with kGap on order for the assembler to generate size
5018 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005019 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005020}
5021
5022
5023CodePatcher::~CodePatcher() {
5024 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005025 CpuFeatures::FlushICache(address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005026
5027 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005028 DCHECK(masm_.pc_ == address_ + size_);
5029 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00005030}
5031
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005032
5033void MacroAssembler::CheckPageFlag(
5034 Register object,
5035 Register scratch,
5036 int mask,
5037 Condition cc,
5038 Label* condition_met,
5039 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005040 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005041 if (scratch.is(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005042 andp(scratch, Immediate(~Page::kPageAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005043 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005044 movp(scratch, Immediate(~Page::kPageAlignmentMask));
5045 andp(scratch, object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005046 }
5047 if (mask < (1 << kBitsPerByte)) {
5048 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
5049 Immediate(static_cast<uint8_t>(mask)));
5050 } else {
5051 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
5052 }
5053 j(cc, condition_met, condition_met_distance);
5054}
5055
5056
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005057void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
5058 Register scratch,
5059 Label* if_deprecated) {
5060 if (map->CanBeDeprecated()) {
5061 Move(scratch, map);
5062 movl(scratch, FieldOperand(scratch, Map::kBitField3Offset));
5063 andl(scratch, Immediate(Map::Deprecated::kMask));
5064 j(not_zero, if_deprecated);
5065 }
5066}
5067
5068
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005069void MacroAssembler::JumpIfBlack(Register object,
5070 Register bitmap_scratch,
5071 Register mask_scratch,
5072 Label* on_black,
5073 Label::Distance on_black_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005074 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005075 GetMarkBits(object, bitmap_scratch, mask_scratch);
5076
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005077 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005078 // The mask_scratch register contains a 1 at the position of the first bit
5079 // and a 0 at all other positions, including the position of the second bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005080 movp(rcx, mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005081 // Make rcx into a mask that covers both marking bits using the operation
5082 // rcx = mask | (mask << 1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005083 leap(rcx, Operand(mask_scratch, mask_scratch, times_2, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005084 // Note that we are using a 4-byte aligned 8-byte load.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005085 andp(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
5086 cmpp(mask_scratch, rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005087 j(equal, on_black, on_black_distance);
5088}
5089
5090
5091// Detect some, but not all, common pointer-free objects. This is used by the
5092// incremental write barrier which doesn't care about oddballs (they are always
5093// marked black immediately so this code is not hit).
5094void MacroAssembler::JumpIfDataObject(
5095 Register value,
5096 Register scratch,
5097 Label* not_data_object,
5098 Label::Distance not_data_object_distance) {
5099 Label is_data_object;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005100 movp(scratch, FieldOperand(value, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005101 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
5102 j(equal, &is_data_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005103 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
5104 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005105 // If it's a string and it's not a cons string then it's an object containing
5106 // no GC pointers.
5107 testb(FieldOperand(scratch, Map::kInstanceTypeOffset),
5108 Immediate(kIsIndirectStringMask | kIsNotStringMask));
5109 j(not_zero, not_data_object, not_data_object_distance);
5110 bind(&is_data_object);
5111}
5112
5113
5114void MacroAssembler::GetMarkBits(Register addr_reg,
5115 Register bitmap_reg,
5116 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005117 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
5118 movp(bitmap_reg, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005119 // Sign extended 32 bit immediate.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005120 andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
5121 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005122 int shift =
5123 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
5124 shrl(rcx, Immediate(shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005125 andp(rcx,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005126 Immediate((Page::kPageAlignmentMask >> shift) &
5127 ~(Bitmap::kBytesPerCell - 1)));
5128
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005129 addp(bitmap_reg, rcx);
5130 movp(rcx, addr_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005131 shrl(rcx, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005132 andp(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005133 movl(mask_reg, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005134 shlp_cl(mask_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005135}
5136
5137
5138void MacroAssembler::EnsureNotWhite(
5139 Register value,
5140 Register bitmap_scratch,
5141 Register mask_scratch,
5142 Label* value_is_white_and_not_data,
5143 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005144 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005145 GetMarkBits(value, bitmap_scratch, mask_scratch);
5146
5147 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005148 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
5149 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
5150 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
5151 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005152
5153 Label done;
5154
5155 // Since both black and grey have a 1 in the first position and white does
5156 // not have a 1 there we only need to check one bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005157 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005158 j(not_zero, &done, Label::kNear);
5159
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005160 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005161 // Check for impossible bit pattern.
5162 Label ok;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005163 Push(mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005164 // shl. May overflow making the check conservative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005165 addp(mask_scratch, mask_scratch);
5166 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005167 j(zero, &ok, Label::kNear);
5168 int3();
5169 bind(&ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005170 Pop(mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005171 }
5172
5173 // Value is white. We check whether it is data that doesn't need scanning.
5174 // Currently only checks for HeapNumber and non-cons strings.
5175 Register map = rcx; // Holds map while checking type.
5176 Register length = rcx; // Holds length of object after checking type.
5177 Label not_heap_number;
5178 Label is_data_object;
5179
5180 // Check for heap-number
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005181 movp(map, FieldOperand(value, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005182 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
5183 j(not_equal, &not_heap_number, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005184 movp(length, Immediate(HeapNumber::kSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005185 jmp(&is_data_object, Label::kNear);
5186
5187 bind(&not_heap_number);
5188 // Check for strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005189 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
5190 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005191 // If it's a string and it's not a cons string then it's an object containing
5192 // no GC pointers.
5193 Register instance_type = rcx;
5194 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
5195 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask));
5196 j(not_zero, value_is_white_and_not_data);
5197 // It's a non-indirect (non-cons and non-slice) string.
5198 // If it's external, the length is just ExternalString::kSize.
5199 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
5200 Label not_external;
5201 // External strings are the only ones with the kExternalStringTag bit
5202 // set.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005203 DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
5204 DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005205 testb(instance_type, Immediate(kExternalStringTag));
5206 j(zero, &not_external, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005207 movp(length, Immediate(ExternalString::kSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005208 jmp(&is_data_object, Label::kNear);
5209
5210 bind(&not_external);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005211 // Sequential string, either Latin1 or UC16.
5212 DCHECK(kOneByteStringTag == 0x04);
5213 andp(length, Immediate(kStringEncodingMask));
5214 xorp(length, Immediate(kStringEncodingMask));
5215 addp(length, Immediate(0x04));
5216 // Value now either 4 (if Latin1) or 8 (if UC16), i.e. char-size shifted by 2.
5217 imulp(length, FieldOperand(value, String::kLengthOffset));
5218 shrp(length, Immediate(2 + kSmiTagSize + kSmiShiftSize));
5219 addp(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
5220 andp(length, Immediate(~kObjectAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005221
5222 bind(&is_data_object);
5223 // Value is a data object, and it is white. Mark it black. Since we know
5224 // that the object is white we can make it black by flipping one bit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005225 orp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005226
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005227 andp(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005228 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
5229
5230 bind(&done);
5231}
5232
5233
5234void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005235 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005236 Register empty_fixed_array_value = r8;
5237 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005238 movp(rcx, rax);
5239
5240 // Check if the enum length field is properly initialized, indicating that
5241 // there is an enum cache.
5242 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
5243
5244 EnumLength(rdx, rbx);
5245 Cmp(rdx, Smi::FromInt(kInvalidEnumCacheSentinel));
5246 j(equal, call_runtime);
5247
5248 jmp(&start);
5249
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005250 bind(&next);
5251
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005252 movp(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005253
5254 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005255 EnumLength(rdx, rbx);
5256 Cmp(rdx, Smi::FromInt(0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005257 j(not_equal, call_runtime);
5258
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005259 bind(&start);
5260
5261 // Check that there are no elements. Register rcx contains the current JS
5262 // object we've reached through the prototype chain.
5263 Label no_elements;
5264 cmpp(empty_fixed_array_value,
5265 FieldOperand(rcx, JSObject::kElementsOffset));
5266 j(equal, &no_elements);
5267
5268 // Second chance, the object may be using the empty slow element dictionary.
5269 LoadRoot(kScratchRegister, Heap::kEmptySlowElementDictionaryRootIndex);
5270 cmpp(kScratchRegister, FieldOperand(rcx, JSObject::kElementsOffset));
5271 j(not_equal, call_runtime);
5272
5273 bind(&no_elements);
5274 movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
5275 cmpp(rcx, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005276 j(not_equal, &next);
5277}
5278
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005279void MacroAssembler::TestJSArrayForAllocationMemento(
5280 Register receiver_reg,
5281 Register scratch_reg,
5282 Label* no_memento_found) {
5283 ExternalReference new_space_start =
5284 ExternalReference::new_space_start(isolate());
5285 ExternalReference new_space_allocation_top =
5286 ExternalReference::new_space_allocation_top_address(isolate());
5287
5288 leap(scratch_reg, Operand(receiver_reg,
5289 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
5290 Move(kScratchRegister, new_space_start);
5291 cmpp(scratch_reg, kScratchRegister);
5292 j(less, no_memento_found);
5293 cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
5294 j(greater, no_memento_found);
5295 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
5296 Heap::kAllocationMementoMapRootIndex);
5297}
5298
5299
5300void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5301 Register object,
5302 Register scratch0,
5303 Register scratch1,
5304 Label* found) {
5305 DCHECK(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
5306 DCHECK(!scratch1.is(scratch0));
5307 Register current = scratch0;
5308 Label loop_again;
5309
5310 movp(current, object);
5311
5312 // Loop based on the map going up the prototype chain.
5313 bind(&loop_again);
5314 movp(current, FieldOperand(current, HeapObject::kMapOffset));
5315 movp(scratch1, FieldOperand(current, Map::kBitField2Offset));
5316 DecodeField<Map::ElementsKindBits>(scratch1);
5317 cmpp(scratch1, Immediate(DICTIONARY_ELEMENTS));
5318 j(equal, found);
5319 movp(current, FieldOperand(current, Map::kPrototypeOffset));
5320 CompareRoot(current, Heap::kNullValueRootIndex);
5321 j(not_equal, &loop_again);
5322}
5323
5324
5325void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
5326 DCHECK(!dividend.is(rax));
5327 DCHECK(!dividend.is(rdx));
5328 base::MagicNumbersForDivision<uint32_t> mag =
5329 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
5330 movl(rax, Immediate(mag.multiplier));
5331 imull(dividend);
5332 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
5333 if (divisor > 0 && neg) addl(rdx, dividend);
5334 if (divisor < 0 && !neg && mag.multiplier > 0) subl(rdx, dividend);
5335 if (mag.shift > 0) sarl(rdx, Immediate(mag.shift));
5336 movl(rax, dividend);
5337 shrl(rax, Immediate(31));
5338 addl(rdx, rax);
5339}
5340
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005341
Steve Blocka7e24c12009-10-30 11:49:00 +00005342} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01005343
5344#endif // V8_TARGET_ARCH_X64