blob: 25a0a95974e90633f87a6069b19542bd7cc6a061 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/debug/debug.h"
12#include "src/ia32/frames-ia32.h"
13#include "src/ia32/macro-assembler-ia32.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000015
16namespace v8 {
17namespace internal {
18
19// -------------------------------------------------------------------------
20// MacroAssembler implementation.
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
35 DCHECK(!r.IsDouble());
36 if (r.IsInteger8()) {
37 movsx_b(dst, src);
38 } else if (r.IsUInteger8()) {
39 movzx_b(dst, src);
40 } else if (r.IsInteger16()) {
41 movsx_w(dst, src);
42 } else if (r.IsUInteger16()) {
43 movzx_w(dst, src);
44 } else {
45 mov(dst, src);
46 }
47}
48
49
50void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
51 DCHECK(!r.IsDouble());
52 if (r.IsInteger8() || r.IsUInteger8()) {
53 mov_b(dst, src);
54 } else if (r.IsInteger16() || r.IsUInteger16()) {
55 mov_w(dst, src);
56 } else {
57 if (r.IsHeapObject()) {
58 AssertNotSmi(src);
59 } else if (r.IsSmi()) {
60 AssertSmi(src);
61 }
62 mov(dst, src);
63 }
64}
65
66
67void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
68 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 mov(destination, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 return;
71 }
72 ExternalReference roots_array_start =
73 ExternalReference::roots_array_start(isolate());
74 mov(destination, Immediate(index));
75 mov(destination, Operand::StaticArray(destination,
76 times_pointer_size,
77 roots_array_start));
78}
79
80
81void MacroAssembler::StoreRoot(Register source,
82 Register scratch,
83 Heap::RootListIndex index) {
84 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
85 ExternalReference roots_array_start =
86 ExternalReference::roots_array_start(isolate());
87 mov(scratch, Immediate(index));
88 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
89 source);
90}
91
92
93void MacroAssembler::CompareRoot(Register with,
94 Register scratch,
95 Heap::RootListIndex index) {
96 ExternalReference roots_array_start =
97 ExternalReference::roots_array_start(isolate());
98 mov(scratch, Immediate(index));
99 cmp(with, Operand::StaticArray(scratch,
100 times_pointer_size,
101 roots_array_start));
102}
103
104
105void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
106 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000107 cmp(with, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108}
109
110
111void MacroAssembler::CompareRoot(const Operand& with,
112 Heap::RootListIndex index) {
113 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 cmp(with, isolate()->heap()->root_handle(index));
115}
116
117
118void MacroAssembler::PushRoot(Heap::RootListIndex index) {
119 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
120 Push(isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121}
122
Ben Murdoch097c5b22016-05-18 11:27:45 +0100123#define REG(Name) \
124 { Register::kCode_##Name }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125
Ben Murdoch097c5b22016-05-18 11:27:45 +0100126static const Register saved_regs[] = {REG(eax), REG(ecx), REG(edx)};
127
128#undef REG
129
130static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
131
132void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
133 Register exclusion1, Register exclusion2,
134 Register exclusion3) {
135 // We don't allow a GC during a store buffer overflow so there is no need to
136 // store the registers in any particular way, but we do have to store and
137 // restore them.
138 for (int i = 0; i < kNumberOfSavedRegs; i++) {
139 Register reg = saved_regs[i];
140 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
141 push(reg);
142 }
Steve Block6ded16b2010-05-10 14:33:55 +0100143 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100144 if (fp_mode == kSaveFPRegs) {
145 sub(esp, Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1)));
146 // Save all XMM registers except XMM0.
147 for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) {
148 XMMRegister reg = XMMRegister::from_code(i);
149 movsd(Operand(esp, (i - 1) * kDoubleSize), reg);
150 }
151 }
152}
153
154void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
155 Register exclusion2, Register exclusion3) {
156 if (fp_mode == kSaveFPRegs) {
157 // Restore all XMM registers except XMM0.
158 for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) {
159 XMMRegister reg = XMMRegister::from_code(i);
160 movsd(reg, Operand(esp, (i - 1) * kDoubleSize));
161 }
162 add(esp, Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1)));
163 }
164
165 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
166 Register reg = saved_regs[i];
167 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
168 pop(reg);
169 }
170 }
171}
172
173void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
174 Label* condition_met,
175 Label::Distance distance) {
176 const int mask =
177 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
178 CheckPageFlag(object, scratch, mask, cc, condition_met, distance);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100179}
Steve Block6ded16b2010-05-10 14:33:55 +0100180
Steve Blocka7e24c12009-10-30 11:49:00 +0000181
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100182void MacroAssembler::RememberedSetHelper(
183 Register object, // Only used for debug checks.
184 Register addr,
185 Register scratch,
186 SaveFPRegsMode save_fp,
187 MacroAssembler::RememberedSetFinalAction and_then) {
188 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100190 Label ok;
191 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
192 int3();
193 bind(&ok);
194 }
195 // Load store buffer top.
196 ExternalReference store_buffer =
197 ExternalReference::store_buffer_top(isolate());
198 mov(scratch, Operand::StaticVariable(store_buffer));
199 // Store pointer to buffer.
200 mov(Operand(scratch, 0), addr);
201 // Increment buffer top.
202 add(scratch, Immediate(kPointerSize));
203 // Write back new top of buffer.
204 mov(Operand::StaticVariable(store_buffer), scratch);
205 // Call stub on end of buffer.
206 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100207 test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100208 if (and_then == kReturnAtEnd) {
209 Label buffer_overflowed;
Ben Murdochda12d292016-06-02 14:46:10 +0100210 j(equal, &buffer_overflowed, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100211 ret(0);
212 bind(&buffer_overflowed);
213 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100215 j(not_equal, &done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100216 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100218 CallStub(&store_buffer_overflow);
219 if (and_then == kReturnAtEnd) {
220 ret(0);
221 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 bind(&done);
224 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000225}
226
227
228void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
229 XMMRegister scratch_reg,
230 Register result_reg) {
231 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000232 Label conv_failure;
233 xorps(scratch_reg, scratch_reg);
234 cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +0000235 test(result_reg, Immediate(0xFFFFFF00));
236 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 cmp(result_reg, Immediate(0x1));
238 j(overflow, &conv_failure, Label::kNear);
239 mov(result_reg, Immediate(0));
240 setcc(sign, result_reg);
241 sub(result_reg, Immediate(1));
242 and_(result_reg, Immediate(255));
243 jmp(&done, Label::kNear);
244 bind(&conv_failure);
245 Move(result_reg, Immediate(0));
246 ucomisd(input_reg, scratch_reg);
247 j(below, &done, Label::kNear);
248 Move(result_reg, Immediate(255));
Ben Murdoch257744e2011-11-30 15:57:28 +0000249 bind(&done);
250}
251
252
253void MacroAssembler::ClampUint8(Register reg) {
254 Label done;
255 test(reg, Immediate(0xFFFFFF00));
256 j(zero, &done, Label::kNear);
257 setcc(negative, reg); // 1 if negative, 0 if positive.
258 dec_b(reg); // 0 if negative, 255 if positive.
259 bind(&done);
260}
261
262
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263void MacroAssembler::SlowTruncateToI(Register result_reg,
264 Register input_reg,
265 int offset) {
266 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
267 call(stub.GetCode(), RelocInfo::CODE_TARGET);
268}
269
270
271void MacroAssembler::TruncateDoubleToI(Register result_reg,
272 XMMRegister input_reg) {
273 Label done;
274 cvttsd2si(result_reg, Operand(input_reg));
275 cmp(result_reg, 0x1);
276 j(no_overflow, &done, Label::kNear);
277
278 sub(esp, Immediate(kDoubleSize));
279 movsd(MemOperand(esp, 0), input_reg);
280 SlowTruncateToI(result_reg, esp, 0);
281 add(esp, Immediate(kDoubleSize));
282 bind(&done);
283}
284
285
286void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
287 XMMRegister scratch,
288 MinusZeroMode minus_zero_mode,
289 Label* lost_precision, Label* is_nan,
290 Label* minus_zero, Label::Distance dst) {
291 DCHECK(!input_reg.is(scratch));
292 cvttsd2si(result_reg, Operand(input_reg));
293 Cvtsi2sd(scratch, Operand(result_reg));
294 ucomisd(scratch, input_reg);
295 j(not_equal, lost_precision, dst);
296 j(parity_even, is_nan, dst);
297 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
298 Label done;
299 // The integer converted back is equal to the original. We
300 // only have to test if we got -0 as an input.
301 test(result_reg, Operand(result_reg));
302 j(not_zero, &done, Label::kNear);
303 movmskpd(result_reg, input_reg);
304 // Bit 0 contains the sign of the double in input_reg.
305 // If input was positive, we are ok and return 0, otherwise
306 // jump to minus_zero.
307 and_(result_reg, 1);
308 j(not_zero, minus_zero, dst);
309 bind(&done);
310 }
311}
312
313
314void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
315 Register input_reg) {
316 Label done, slow_case;
317
318 if (CpuFeatures::IsSupported(SSE3)) {
319 CpuFeatureScope scope(this, SSE3);
320 Label convert;
321 // Use more powerful conversion when sse3 is available.
322 // Load x87 register with heap number.
323 fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
324 // Get exponent alone and check for too-big exponent.
325 mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
326 and_(result_reg, HeapNumber::kExponentMask);
327 const uint32_t kTooBigExponent =
328 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
329 cmp(Operand(result_reg), Immediate(kTooBigExponent));
330 j(greater_equal, &slow_case, Label::kNear);
331
332 // Reserve space for 64 bit answer.
333 sub(Operand(esp), Immediate(kDoubleSize));
334 // Do conversion, which cannot fail because we checked the exponent.
335 fisttp_d(Operand(esp, 0));
336 mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
337 add(Operand(esp), Immediate(kDoubleSize));
338 jmp(&done, Label::kNear);
339
340 // Slow case.
341 bind(&slow_case);
342 if (input_reg.is(result_reg)) {
343 // Input is clobbered. Restore number from fpu stack
344 sub(Operand(esp), Immediate(kDoubleSize));
345 fstp_d(Operand(esp, 0));
346 SlowTruncateToI(result_reg, esp, 0);
347 add(esp, Immediate(kDoubleSize));
348 } else {
349 fstp(0);
350 SlowTruncateToI(result_reg, input_reg);
351 }
352 } else {
353 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
354 cvttsd2si(result_reg, Operand(xmm0));
355 cmp(result_reg, 0x1);
356 j(no_overflow, &done, Label::kNear);
357 // Check if the input was 0x8000000 (kMinInt).
358 // If no, then we got an overflow and we deoptimize.
359 ExternalReference min_int = ExternalReference::address_of_min_int();
360 ucomisd(xmm0, Operand::StaticVariable(min_int));
361 j(not_equal, &slow_case, Label::kNear);
362 j(parity_even, &slow_case, Label::kNear); // NaN.
363 jmp(&done, Label::kNear);
364
365 // Slow case.
366 bind(&slow_case);
367 if (input_reg.is(result_reg)) {
368 // Input is clobbered. Restore number from double scratch.
369 sub(esp, Immediate(kDoubleSize));
370 movsd(MemOperand(esp, 0), xmm0);
371 SlowTruncateToI(result_reg, esp, 0);
372 add(esp, Immediate(kDoubleSize));
373 } else {
374 SlowTruncateToI(result_reg, input_reg);
375 }
376 }
377 bind(&done);
378}
379
380
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400381void MacroAssembler::LoadUint32(XMMRegister dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382 Label done;
383 cmp(src, Immediate(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000385 Cvtsi2sd(dst, src);
386 j(not_sign, &done, Label::kNear);
387 addsd(dst, Operand::StaticVariable(uint32_bias));
388 bind(&done);
389}
390
391
392void MacroAssembler::RecordWriteArray(
393 Register object,
394 Register value,
395 Register index,
396 SaveFPRegsMode save_fp,
397 RememberedSetAction remembered_set_action,
398 SmiCheck smi_check,
399 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100400 // First, check if a write barrier is even needed. The tests below
401 // catch stores of Smis.
402 Label done;
403
404 // Skip barrier if writing a smi.
405 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100407 test(value, Immediate(kSmiTagMask));
408 j(zero, &done);
409 }
410
411 // Array access: calculate the destination address in the same manner as
412 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
413 // into an array of words.
414 Register dst = index;
415 lea(dst, Operand(object, index, times_half_pointer_size,
416 FixedArray::kHeaderSize - kHeapObjectTag));
417
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 RecordWrite(object, dst, value, save_fp, remembered_set_action,
419 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100420
421 bind(&done);
422
423 // Clobber clobbered input registers when running with the debug-code flag
424 // turned on to provoke errors.
425 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
427 mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000428 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000429}
430
431
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100432void MacroAssembler::RecordWriteField(
433 Register object,
434 int offset,
435 Register value,
436 Register dst,
437 SaveFPRegsMode save_fp,
438 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 SmiCheck smi_check,
440 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100441 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100442 // catch stores of Smis.
Ben Murdoch257744e2011-11-30 15:57:28 +0000443 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +0000444
445 // Skip barrier if writing a smi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100446 if (smi_check == INLINE_SMI_CHECK) {
447 JumpIfSmi(value, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000448 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100449
450 // Although the object register is tagged, the offset is relative to the start
451 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453
454 lea(dst, FieldOperand(object, offset));
455 if (emit_debug_code()) {
456 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +0100457 test_b(dst, Immediate((1 << kPointerSizeLog2) - 1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458 j(zero, &ok, Label::kNear);
459 int3();
460 bind(&ok);
461 }
462
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 RecordWrite(object, dst, value, save_fp, remembered_set_action,
464 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000465
466 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000467
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100468 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000469 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100470 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
472 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000473 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000474}
475
476
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477void MacroAssembler::RecordWriteForMap(
478 Register object,
479 Handle<Map> map,
480 Register scratch1,
481 Register scratch2,
482 SaveFPRegsMode save_fp) {
483 Label done;
484
485 Register address = scratch1;
486 Register value = scratch2;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100487 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000488 Label ok;
489 lea(address, FieldOperand(object, HeapObject::kMapOffset));
Ben Murdochda12d292016-06-02 14:46:10 +0100490 test_b(address, Immediate((1 << kPointerSizeLog2) - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000491 j(zero, &ok, Label::kNear);
492 int3();
493 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 }
495
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 DCHECK(!object.is(value));
497 DCHECK(!object.is(address));
498 DCHECK(!value.is(address));
499 AssertNotSmi(object);
500
501 if (!FLAG_incremental_marking) {
502 return;
503 }
504
505 // Compute the address.
506 lea(address, FieldOperand(object, HeapObject::kMapOffset));
507
508 // A single check of the map's pages interesting flag suffices, since it is
509 // only set during incremental collection, and then it's also guaranteed that
510 // the from object's page's interesting flag is also set. This optimization
511 // relies on the fact that maps can never be in new space.
512 DCHECK(!isolate()->heap()->InNewSpace(*map));
513 CheckPageFlagForMap(map,
514 MemoryChunk::kPointersToHereAreInterestingMask,
515 zero,
516 &done,
517 Label::kNear);
518
519 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
520 save_fp);
521 CallStub(&stub);
522
523 bind(&done);
524
525 // Count number of write barriers in generated code.
526 isolate()->counters()->write_barriers_static()->Increment();
527 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
528
529 // Clobber clobbered input registers when running with the debug-code flag
530 // turned on to provoke errors.
531 if (emit_debug_code()) {
532 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
533 mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
534 mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
535 }
536}
537
538
539void MacroAssembler::RecordWrite(
540 Register object,
541 Register address,
542 Register value,
543 SaveFPRegsMode fp_mode,
544 RememberedSetAction remembered_set_action,
545 SmiCheck smi_check,
546 PointersToHereCheck pointers_to_here_check_for_value) {
547 DCHECK(!object.is(value));
548 DCHECK(!object.is(address));
549 DCHECK(!value.is(address));
550 AssertNotSmi(object);
551
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100552 if (remembered_set_action == OMIT_REMEMBERED_SET &&
553 !FLAG_incremental_marking) {
554 return;
555 }
556
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100558 Label ok;
559 cmp(value, Operand(address, 0));
560 j(equal, &ok, Label::kNear);
561 int3();
562 bind(&ok);
563 }
564
Steve Block8defd9f2010-07-08 12:39:36 +0100565 // First, check if a write barrier is even needed. The tests below
566 // catch stores of Smis and stores into young gen.
567 Label done;
568
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100569 if (smi_check == INLINE_SMI_CHECK) {
570 // Skip barrier if writing a smi.
571 JumpIfSmi(value, &done, Label::kNear);
572 }
Steve Block8defd9f2010-07-08 12:39:36 +0100573
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
575 CheckPageFlag(value,
576 value, // Used as scratch.
577 MemoryChunk::kPointersToHereAreInterestingMask,
578 zero,
579 &done,
580 Label::kNear);
581 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 CheckPageFlag(object,
583 value, // Used as scratch.
584 MemoryChunk::kPointersFromHereAreInterestingMask,
585 zero,
586 &done,
587 Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100588
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
590 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100592
593 bind(&done);
594
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 // Count number of write barriers in generated code.
596 isolate()->counters()->write_barriers_static()->Increment();
597 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
598
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100599 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100600 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100601 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
603 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
Steve Block8defd9f2010-07-08 12:39:36 +0100604 }
605}
606
Ben Murdoch097c5b22016-05-18 11:27:45 +0100607void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
608 Register code_entry,
609 Register scratch) {
610 const int offset = JSFunction::kCodeEntryOffset;
611
612 // Since a code entry (value) is always in old space, we don't need to update
613 // remembered set. If incremental marking is off, there is nothing for us to
614 // do.
615 if (!FLAG_incremental_marking) return;
616
617 DCHECK(!js_function.is(code_entry));
618 DCHECK(!js_function.is(scratch));
619 DCHECK(!code_entry.is(scratch));
620 AssertNotSmi(js_function);
621
622 if (emit_debug_code()) {
623 Label ok;
624 lea(scratch, FieldOperand(js_function, offset));
625 cmp(code_entry, Operand(scratch, 0));
626 j(equal, &ok, Label::kNear);
627 int3();
628 bind(&ok);
629 }
630
631 // First, check if a write barrier is even needed. The tests below
632 // catch stores of Smis and stores into young gen.
633 Label done;
634
635 CheckPageFlag(code_entry, scratch,
636 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
637 Label::kNear);
638 CheckPageFlag(js_function, scratch,
639 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
640 Label::kNear);
641
642 // Save input registers.
643 push(js_function);
644 push(code_entry);
645
646 const Register dst = scratch;
647 lea(dst, FieldOperand(js_function, offset));
648
649 // Save caller-saved registers.
650 PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
651
652 int argument_count = 3;
653 PrepareCallCFunction(argument_count, code_entry);
654 mov(Operand(esp, 0 * kPointerSize), js_function);
655 mov(Operand(esp, 1 * kPointerSize), dst); // Slot.
656 mov(Operand(esp, 2 * kPointerSize),
657 Immediate(ExternalReference::isolate_address(isolate())));
658
659 {
660 AllowExternalCallThatCantCauseGC scope(this);
661 CallCFunction(
662 ExternalReference::incremental_marking_record_write_code_entry_function(
663 isolate()),
664 argument_count);
665 }
666
667 // Restore caller-saved registers.
668 PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
669
670 // Restore input registers.
671 pop(code_entry);
672 pop(js_function);
673
674 bind(&done);
675}
Steve Block8defd9f2010-07-08 12:39:36 +0100676
Andrei Popescu402d9372010-02-26 13:31:12 +0000677void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 Move(eax, Immediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000679 mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
680 isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 CEntryStub ces(isolate(), 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000682 call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +0000683}
Steve Blocka7e24c12009-10-30 11:49:00 +0000684
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000685void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
686 xorps(dst, dst);
687 cvtsi2sd(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000688}
689
690
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691void MacroAssembler::Cvtui2ss(XMMRegister dst, Register src, Register tmp) {
692 Label msb_set_src;
693 Label jmp_return;
694 test(src, src);
695 j(sign, &msb_set_src, Label::kNear);
696 cvtsi2ss(dst, src);
697 jmp(&jmp_return, Label::kNear);
698 bind(&msb_set_src);
699 mov(tmp, src);
700 shr(src, 1);
701 // Recover the least significant bit to avoid rounding errors.
702 and_(tmp, Immediate(1));
703 or_(src, tmp);
704 cvtsi2ss(dst, src);
705 addss(dst, dst);
706 bind(&jmp_return);
707}
708
Ben Murdochda12d292016-06-02 14:46:10 +0100709void MacroAssembler::ShlPair(Register high, Register low, uint8_t shift) {
710 if (shift >= 32) {
711 mov(high, low);
712 shl(high, shift - 32);
713 xor_(low, low);
714 } else {
715 shld(high, low, shift);
716 shl(low, shift);
717 }
718}
719
720void MacroAssembler::ShlPair_cl(Register high, Register low) {
721 shld_cl(high, low);
722 shl_cl(low);
723 Label done;
724 test(ecx, Immediate(0x20));
725 j(equal, &done, Label::kNear);
726 mov(high, low);
727 xor_(low, low);
728 bind(&done);
729}
730
731void MacroAssembler::ShrPair(Register high, Register low, uint8_t shift) {
732 if (shift >= 32) {
733 mov(low, high);
734 shr(low, shift - 32);
735 xor_(high, high);
736 } else {
737 shrd(high, low, shift);
738 shr(high, shift);
739 }
740}
741
742void MacroAssembler::ShrPair_cl(Register high, Register low) {
743 shrd_cl(low, high);
744 shr_cl(high);
745 Label done;
746 test(ecx, Immediate(0x20));
747 j(equal, &done, Label::kNear);
748 mov(low, high);
749 xor_(high, high);
750 bind(&done);
751}
752
753void MacroAssembler::SarPair(Register high, Register low, uint8_t shift) {
754 if (shift >= 32) {
755 mov(low, high);
756 sar(low, shift - 32);
757 sar(high, 31);
758 } else {
759 shrd(high, low, shift);
760 sar(high, shift);
761 }
762}
763
764void MacroAssembler::SarPair_cl(Register high, Register low) {
765 shrd_cl(low, high);
766 sar_cl(high);
767 Label done;
768 test(ecx, Immediate(0x20));
769 j(equal, &done, Label::kNear);
770 mov(low, high);
771 sar(high, 31);
772 bind(&done);
773}
Ben Murdoch097c5b22016-05-18 11:27:45 +0100774
Steve Block053d10c2011-06-13 19:13:29 +0100775bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
776 static const int kMaxImmediateBits = 17;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000777 if (!RelocInfo::IsNone(x.rmode_)) return false;
Steve Block053d10c2011-06-13 19:13:29 +0100778 return !is_intn(x.x_, kMaxImmediateBits);
779}
780
781
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000782void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
Steve Block053d10c2011-06-13 19:13:29 +0100783 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784 Move(dst, Immediate(x.x_ ^ jit_cookie()));
Steve Block053d10c2011-06-13 19:13:29 +0100785 xor_(dst, jit_cookie());
786 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787 Move(dst, x);
Steve Block053d10c2011-06-13 19:13:29 +0100788 }
789}
790
791
792void MacroAssembler::SafePush(const Immediate& x) {
793 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
794 push(Immediate(x.x_ ^ jit_cookie()));
795 xor_(Operand(esp, 0), Immediate(jit_cookie()));
796 } else {
797 push(x);
798 }
799}
800
801
Steve Blocka7e24c12009-10-30 11:49:00 +0000802void MacroAssembler::CmpObjectType(Register heap_object,
803 InstanceType type,
804 Register map) {
805 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
806 CmpInstanceType(map, type);
807}
808
809
810void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
Ben Murdochda12d292016-06-02 14:46:10 +0100811 cmpb(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
Steve Blocka7e24c12009-10-30 11:49:00 +0000812}
813
814
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000815void MacroAssembler::CheckFastElements(Register map,
816 Label* fail,
817 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000818 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
819 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
820 STATIC_ASSERT(FAST_ELEMENTS == 2);
821 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000822 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochda12d292016-06-02 14:46:10 +0100823 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000824 j(above, fail, distance);
825}
826
827
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100828void MacroAssembler::CheckFastObjectElements(Register map,
829 Label* fail,
830 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
832 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
833 STATIC_ASSERT(FAST_ELEMENTS == 2);
834 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100835 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochda12d292016-06-02 14:46:10 +0100836 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100837 j(below_equal, fail, distance);
838 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochda12d292016-06-02 14:46:10 +0100839 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100840 j(above, fail, distance);
841}
842
843
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000844void MacroAssembler::CheckFastSmiElements(Register map,
845 Label* fail,
846 Label::Distance distance) {
847 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
848 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100849 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochda12d292016-06-02 14:46:10 +0100850 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100851 j(above, fail, distance);
852}
853
854
855void MacroAssembler::StoreNumberToDoubleElements(
856 Register maybe_number,
857 Register elements,
858 Register key,
859 Register scratch1,
860 XMMRegister scratch2,
861 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000863 Label smi_value, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100864 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
865
866 CheckMap(maybe_number,
867 isolate()->factory()->heap_number_map(),
868 fail,
869 DONT_DO_SMI_CHECK);
870
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000871 // Double value, turn potential sNaN into qNaN.
872 Move(scratch2, 1.0);
873 mulsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
874 jmp(&done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100875
876 bind(&smi_value);
877 // Value is a smi. Convert to a double and store.
878 // Preserve original value.
879 mov(scratch1, maybe_number);
880 SmiUntag(scratch1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000881 Cvtsi2sd(scratch2, scratch1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000882 bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000883 movsd(FieldOperand(elements, key, times_4,
884 FixedDoubleArray::kHeaderSize - elements_offset),
885 scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100886}
887
888
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100890 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100891}
892
893
Andrei Popescu31002712010-02-23 13:46:05 +0000894void MacroAssembler::CheckMap(Register obj,
895 Handle<Map> map,
896 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000897 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000898 if (smi_check_type == DO_SMI_CHECK) {
899 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +0000900 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100901
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +0000903 j(not_equal, fail);
904}
905
906
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400907void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
908 Register scratch2, Handle<WeakCell> cell,
909 Handle<Code> success,
910 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000911 Label fail;
912 if (smi_check_type == DO_SMI_CHECK) {
913 JumpIfSmi(obj, &fail);
914 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400915 mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
916 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000917 j(equal, success);
918
919 bind(&fail);
920}
921
922
Leon Clarkee46be812010-01-19 14:06:41 +0000923Condition MacroAssembler::IsObjectStringType(Register heap_object,
924 Register map,
925 Register instance_type) {
926 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
927 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000928 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +0000929 test(instance_type, Immediate(kIsNotStringMask));
930 return zero;
931}
932
933
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934Condition MacroAssembler::IsObjectNameType(Register heap_object,
935 Register map,
936 Register instance_type) {
937 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
938 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdochda12d292016-06-02 14:46:10 +0100939 cmpb(instance_type, Immediate(LAST_NAME_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 return below_equal;
941}
942
943
Steve Blocka7e24c12009-10-30 11:49:00 +0000944void MacroAssembler::FCmp() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 fucomip();
946 fstp(0);
947}
948
949
950void MacroAssembler::AssertNumber(Register object) {
951 if (emit_debug_code()) {
952 Label ok;
953 JumpIfSmi(object, &ok);
954 cmp(FieldOperand(object, HeapObject::kMapOffset),
955 isolate()->factory()->heap_number_map());
956 Check(equal, kOperandNotANumber);
957 bind(&ok);
Steve Block3ce2e202009-11-05 08:53:23 +0000958 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000959}
960
Ben Murdochda12d292016-06-02 14:46:10 +0100961void MacroAssembler::AssertNotNumber(Register object) {
962 if (emit_debug_code()) {
963 test(object, Immediate(kSmiTagMask));
964 Check(not_equal, kOperandIsANumber);
965 cmp(FieldOperand(object, HeapObject::kMapOffset),
966 isolate()->factory()->heap_number_map());
967 Check(not_equal, kOperandIsANumber);
968 }
969}
Steve Blocka7e24c12009-10-30 11:49:00 +0000970
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000971void MacroAssembler::AssertSmi(Register object) {
972 if (emit_debug_code()) {
973 test(object, Immediate(kSmiTagMask));
974 Check(equal, kOperandIsNotASmi);
975 }
Andrei Popescu402d9372010-02-26 13:31:12 +0000976}
977
978
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000979void MacroAssembler::AssertString(Register object) {
980 if (emit_debug_code()) {
981 test(object, Immediate(kSmiTagMask));
982 Check(not_equal, kOperandIsASmiAndNotAString);
983 push(object);
984 mov(object, FieldOperand(object, HeapObject::kMapOffset));
985 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
986 pop(object);
987 Check(below, kOperandIsNotAString);
988 }
Iain Merrick75681382010-08-19 15:07:18 +0100989}
990
991
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992void MacroAssembler::AssertName(Register object) {
993 if (emit_debug_code()) {
994 test(object, Immediate(kSmiTagMask));
995 Check(not_equal, kOperandIsASmiAndNotAName);
996 push(object);
997 mov(object, FieldOperand(object, HeapObject::kMapOffset));
998 CmpInstanceType(object, LAST_NAME_TYPE);
999 pop(object);
1000 Check(below_equal, kOperandIsNotAName);
1001 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001002}
1003
1004
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005void MacroAssembler::AssertFunction(Register object) {
1006 if (emit_debug_code()) {
1007 test(object, Immediate(kSmiTagMask));
1008 Check(not_equal, kOperandIsASmiAndNotAFunction);
1009 Push(object);
1010 CmpObjectType(object, JS_FUNCTION_TYPE, object);
1011 Pop(object);
1012 Check(equal, kOperandIsNotAFunction);
1013 }
1014}
1015
1016
1017void MacroAssembler::AssertBoundFunction(Register object) {
1018 if (emit_debug_code()) {
1019 test(object, Immediate(kSmiTagMask));
1020 Check(not_equal, kOperandIsASmiAndNotABoundFunction);
1021 Push(object);
1022 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
1023 Pop(object);
1024 Check(equal, kOperandIsNotABoundFunction);
1025 }
1026}
1027
Ben Murdochc5610432016-08-08 18:44:38 +01001028void MacroAssembler::AssertGeneratorObject(Register object) {
1029 if (emit_debug_code()) {
1030 test(object, Immediate(kSmiTagMask));
1031 Check(not_equal, kOperandIsASmiAndNotAGeneratorObject);
1032 Push(object);
1033 CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object);
1034 Pop(object);
1035 Check(equal, kOperandIsNotAGeneratorObject);
1036 }
1037}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038
Ben Murdoch097c5b22016-05-18 11:27:45 +01001039void MacroAssembler::AssertReceiver(Register object) {
1040 if (emit_debug_code()) {
1041 test(object, Immediate(kSmiTagMask));
1042 Check(not_equal, kOperandIsASmiAndNotAReceiver);
1043 Push(object);
1044 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1045 CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
1046 Pop(object);
1047 Check(above_equal, kOperandIsNotAReceiver);
1048 }
1049}
1050
1051
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001052void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
1053 if (emit_debug_code()) {
1054 Label done_checking;
1055 AssertNotSmi(object);
1056 cmp(object, isolate()->factory()->undefined_value());
1057 j(equal, &done_checking);
1058 cmp(FieldOperand(object, 0),
1059 Immediate(isolate()->factory()->allocation_site_map()));
1060 Assert(equal, kExpectedUndefinedOrCell);
1061 bind(&done_checking);
1062 }
1063}
1064
1065
1066void MacroAssembler::AssertNotSmi(Register object) {
1067 if (emit_debug_code()) {
1068 test(object, Immediate(kSmiTagMask));
1069 Check(not_equal, kOperandIsASmi);
1070 }
1071}
1072
Ben Murdochda12d292016-06-02 14:46:10 +01001073void MacroAssembler::StubPrologue(StackFrame::Type type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074 push(ebp); // Caller's frame pointer.
1075 mov(ebp, esp);
Ben Murdochda12d292016-06-02 14:46:10 +01001076 push(Immediate(Smi::FromInt(type)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077}
1078
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001079void MacroAssembler::Prologue(bool code_pre_aging) {
1080 PredictableCodeSizeScope predictible_code_size_scope(this,
1081 kNoCodeAgeSequenceLength);
1082 if (code_pre_aging) {
1083 // Pre-age the code.
1084 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
1085 RelocInfo::CODE_AGE_SEQUENCE);
1086 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
1087 } else {
1088 push(ebp); // Caller's frame pointer.
1089 mov(ebp, esp);
1090 push(esi); // Callee's context.
1091 push(edi); // Callee's JS function.
1092 }
Steve Block6ded16b2010-05-10 14:33:55 +01001093}
1094
1095
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001096void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1097 mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001098 mov(vector, FieldOperand(vector, JSFunction::kLiteralsOffset));
1099 mov(vector, FieldOperand(vector, LiteralsArray::kFeedbackVectorOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001100}
1101
1102
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001103void MacroAssembler::EnterFrame(StackFrame::Type type,
1104 bool load_constant_pool_pointer_reg) {
1105 // Out-of-line constant pool not implemented on ia32.
1106 UNREACHABLE();
1107}
1108
1109
Steve Blocka7e24c12009-10-30 11:49:00 +00001110void MacroAssembler::EnterFrame(StackFrame::Type type) {
1111 push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001112 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001113 push(Immediate(Smi::FromInt(type)));
Ben Murdochda12d292016-06-02 14:46:10 +01001114 if (type == StackFrame::INTERNAL) {
1115 push(Immediate(CodeObject()));
1116 }
Steve Block44f0eee2011-05-26 01:26:41 +01001117 if (emit_debug_code()) {
1118 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001119 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +00001120 }
1121}
1122
1123
1124void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01001125 if (emit_debug_code()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001126 cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
Steve Blocka7e24c12009-10-30 11:49:00 +00001127 Immediate(Smi::FromInt(type)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001128 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001129 }
1130 leave();
1131}
1132
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001133
1134void MacroAssembler::EnterExitFramePrologue() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001135 // Set up the frame structure on the stack.
Ben Murdochda12d292016-06-02 14:46:10 +01001136 DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1137 DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1138 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +00001139 push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001140 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001141
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001142 // Reserve room for entry stack pointer and push the code object.
Ben Murdochda12d292016-06-02 14:46:10 +01001143 push(Immediate(Smi::FromInt(StackFrame::EXIT)));
1144 DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
Andrei Popescu402d9372010-02-26 13:31:12 +00001145 push(Immediate(0)); // Saved entry sp, patched before call.
Ben Murdochda12d292016-06-02 14:46:10 +01001146 DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
Andrei Popescu402d9372010-02-26 13:31:12 +00001147 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00001148
1149 // Save the frame pointer and the context in top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
1151 ExternalReference context_address(Isolate::kContextAddress, isolate());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001152 ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001153 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
1154 mov(Operand::StaticVariable(context_address), esi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001155 mov(Operand::StaticVariable(c_function_address), ebx);
Steve Blockd0582a62009-12-15 09:54:21 +00001156}
Steve Blocka7e24c12009-10-30 11:49:00 +00001157
Steve Blocka7e24c12009-10-30 11:49:00 +00001158
Ben Murdochb0fe1622011-05-05 13:52:32 +01001159void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
1160 // Optionally save all XMM registers.
1161 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001162 int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
1163 argc * kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001164 sub(esp, Immediate(space));
Ben Murdochda12d292016-06-02 14:46:10 +01001165 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001167 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168 movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001169 }
1170 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001171 sub(esp, Immediate(argc * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001172 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001173
1174 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001175 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001176 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001177 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00001178 and_(esp, -kFrameAlignment);
1179 }
1180
1181 // Patch the saved entry sp.
1182 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1183}
1184
1185
Ben Murdoch097c5b22016-05-18 11:27:45 +01001186void MacroAssembler::EnterExitFrame(int argc, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001187 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +00001188
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001189 // Set up argc and argv in callee-saved registers.
Steve Blockd0582a62009-12-15 09:54:21 +00001190 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001191 mov(edi, eax);
Steve Blockd0582a62009-12-15 09:54:21 +00001192 lea(esi, Operand(ebp, eax, times_4, offset));
1193
Steve Block44f0eee2011-05-26 01:26:41 +01001194 // Reserve space for argc, argv and isolate.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001195 EnterExitFrameEpilogue(argc, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +00001196}
1197
1198
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001199void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001200 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001201 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001202}
1203
1204
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001205void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001206 // Optionally restore all XMM registers.
1207 if (save_doubles) {
Ben Murdochda12d292016-06-02 14:46:10 +01001208 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001209 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001210 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001211 movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001212 }
1213 }
1214
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001215 if (pop_arguments) {
1216 // Get the return address from the stack and restore the frame pointer.
1217 mov(ecx, Operand(ebp, 1 * kPointerSize));
1218 mov(ebp, Operand(ebp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001219
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001220 // Pop the arguments and the receiver from the caller stack.
1221 lea(esp, Operand(esi, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001222
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001223 // Push the return address to get ready to return.
1224 push(ecx);
1225 } else {
1226 // Otherwise just leave the exit frame.
1227 leave();
1228 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001229
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001231}
1232
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233
1234void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001235 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00001236 ExternalReference context_address(Isolate::kContextAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001237 if (restore_context) {
1238 mov(esi, Operand::StaticVariable(context_address));
1239 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001240#ifdef DEBUG
1241 mov(Operand::StaticVariable(context_address), Immediate(0));
1242#endif
1243
Steve Blocka7e24c12009-10-30 11:49:00 +00001244 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00001245 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01001246 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001247 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1248}
1249
1250
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001251void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001252 mov(esp, ebp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001253 pop(ebp);
1254
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001255 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001256}
1257
1258
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001259void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001260 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001261 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001262 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001263
1264 // Link the current handler as the next handler.
1265 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1266 push(Operand::StaticVariable(handler_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001267
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001268 // Set this new handler as the current one.
1269 mov(Operand::StaticVariable(handler_address), esp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001270}
1271
1272
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001273void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001274 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001275 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1276 pop(Operand::StaticVariable(handler_address));
1277 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1278}
1279
1280
Steve Blocka7e24c12009-10-30 11:49:00 +00001281void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001282 Register scratch1,
1283 Register scratch2,
Steve Blocka7e24c12009-10-30 11:49:00 +00001284 Label* miss) {
1285 Label same_contexts;
1286
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001287 DCHECK(!holder_reg.is(scratch1));
1288 DCHECK(!holder_reg.is(scratch2));
1289 DCHECK(!scratch1.is(scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001290
Ben Murdochda12d292016-06-02 14:46:10 +01001291 // Load current lexical context from the active StandardFrame, which
1292 // may require crawling past STUB frames.
1293 Label load_context;
1294 Label has_context;
1295 mov(scratch2, ebp);
1296 bind(&load_context);
1297 mov(scratch1,
1298 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
1299 JumpIfNotSmi(scratch1, &has_context);
1300 mov(scratch2, MemOperand(scratch2, CommonFrameConstants::kCallerFPOffset));
1301 jmp(&load_context);
1302 bind(&has_context);
Steve Blocka7e24c12009-10-30 11:49:00 +00001303
1304 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01001305 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001306 cmp(scratch1, Immediate(0));
1307 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001308 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001309 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001310 mov(scratch1, ContextOperand(scratch1, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001313 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314 // Read the first word and compare to native_context_map.
1315 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1316 isolate()->factory()->native_context_map());
1317 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001318 }
1319
1320 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001321 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001322 j(equal, &same_contexts);
Steve Blocka7e24c12009-10-30 11:49:00 +00001323
1324 // Compare security tokens, save holder_reg on the stack so we can use it
1325 // as a temporary register.
1326 //
Steve Blocka7e24c12009-10-30 11:49:00 +00001327 // Check that the security token in the calling global object is
1328 // compatible with the security token in the receiving global
1329 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001330 mov(scratch2,
1331 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001332
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001333 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001334 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001335 cmp(scratch2, isolate()->factory()->null_value());
1336 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001337
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338 // Read the first word and compare to native_context_map(),
1339 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1340 isolate()->factory()->native_context_map());
1341 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001342 }
1343
1344 int token_offset = Context::kHeaderSize +
1345 Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 mov(scratch1, FieldOperand(scratch1, token_offset));
1347 cmp(scratch1, FieldOperand(scratch2, token_offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001348 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001349
1350 bind(&same_contexts);
1351}
1352
1353
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001354// Compute the hash code from the untagged key. This must be kept in sync with
1355// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1356// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001357//
1358// Note: r0 will contain hash code
1359void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1360 // Xor original key with a seed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001361 if (serializer_enabled()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001362 ExternalReference roots_array_start =
1363 ExternalReference::roots_array_start(isolate());
Ben Murdochc7cc0282012-03-05 14:35:55 +00001364 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001365 mov(scratch,
1366 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001367 SmiUntag(scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001368 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001369 } else {
1370 int32_t seed = isolate()->heap()->HashSeed();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001371 xor_(r0, Immediate(seed));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001372 }
1373
1374 // hash = ~hash + (hash << 15);
1375 mov(scratch, r0);
1376 not_(r0);
1377 shl(scratch, 15);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001378 add(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001379 // hash = hash ^ (hash >> 12);
1380 mov(scratch, r0);
1381 shr(scratch, 12);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001382 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001383 // hash = hash + (hash << 2);
1384 lea(r0, Operand(r0, r0, times_4, 0));
1385 // hash = hash ^ (hash >> 4);
1386 mov(scratch, r0);
1387 shr(scratch, 4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001388 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001389 // hash = hash * 2057;
1390 imul(r0, r0, 2057);
1391 // hash = hash ^ (hash >> 16);
1392 mov(scratch, r0);
1393 shr(scratch, 16);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001394 xor_(r0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001395 and_(r0, 0x3fffffff);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001396}
1397
1398
1399
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001400void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1401 Register elements,
1402 Register key,
1403 Register r0,
1404 Register r1,
1405 Register r2,
1406 Register result) {
1407 // Register use:
1408 //
1409 // elements - holds the slow-case elements of the receiver and is unchanged.
1410 //
1411 // key - holds the smi key on entry and is unchanged.
1412 //
1413 // Scratch registers:
1414 //
1415 // r0 - holds the untagged key on entry and holds the hash once computed.
1416 //
1417 // r1 - used to hold the capacity mask of the dictionary
1418 //
1419 // r2 - used for the index into the dictionary.
1420 //
1421 // result - holds the result on exit if the load succeeds and we fall through.
1422
1423 Label done;
1424
Ben Murdochc7cc0282012-03-05 14:35:55 +00001425 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001426
1427 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001428 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001429 shr(r1, kSmiTagSize); // convert smi to int
1430 dec(r1);
1431
1432 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001434 // Use r2 for index calculations and keep the hash intact in r0.
1435 mov(r2, r0);
1436 // Compute the masked index: (hash + i + i * i) & mask.
1437 if (i > 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001438 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001439 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001440 and_(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001441
1442 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001443 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001444 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1445
1446 // Check if the key matches.
1447 cmp(key, FieldOperand(elements,
1448 r2,
1449 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00001450 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001452 j(equal, &done);
1453 } else {
1454 j(not_equal, miss);
1455 }
1456 }
1457
1458 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001459 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001460 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001461 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001462 DCHECK_EQ(DATA, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001463 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00001464 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001465 j(not_zero, miss);
1466
1467 // Get the value at the masked, scaled index.
1468 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001469 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001470 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1471}
1472
1473
Steve Blocka7e24c12009-10-30 11:49:00 +00001474void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001475 Register scratch,
1476 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001477 ExternalReference allocation_top =
1478 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001479
1480 // Just return if allocation top is already known.
1481 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1482 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001483 DCHECK(scratch.is(no_reg));
Steve Blocka7e24c12009-10-30 11:49:00 +00001484#ifdef DEBUG
1485 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001486 cmp(result, Operand::StaticVariable(allocation_top));
1487 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00001488#endif
1489 return;
1490 }
1491
1492 // Move address of new object to result. Use scratch register if available.
1493 if (scratch.is(no_reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001494 mov(result, Operand::StaticVariable(allocation_top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001496 mov(scratch, Immediate(allocation_top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001497 mov(result, Operand(scratch, 0));
1498 }
1499}
1500
1501
1502void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503 Register scratch,
1504 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01001505 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00001506 test(result_end, Immediate(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001507 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00001508 }
1509
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001510 ExternalReference allocation_top =
1511 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001512
1513 // Update new top. Use scratch if available.
1514 if (scratch.is(no_reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515 mov(Operand::StaticVariable(allocation_top), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00001516 } else {
1517 mov(Operand(scratch, 0), result_end);
1518 }
1519}
1520
1521
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001522void MacroAssembler::Allocate(int object_size,
1523 Register result,
1524 Register result_end,
1525 Register scratch,
1526 Label* gc_required,
1527 AllocationFlags flags) {
1528 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1529 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Ben Murdochc5610432016-08-08 18:44:38 +01001530 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07001531 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001532 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001533 // Trash the registers to simulate an allocation failure.
1534 mov(result, Immediate(0x7091));
1535 if (result_end.is_valid()) {
1536 mov(result_end, Immediate(0x7191));
1537 }
1538 if (scratch.is_valid()) {
1539 mov(scratch, Immediate(0x7291));
1540 }
1541 }
1542 jmp(gc_required);
1543 return;
1544 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001545 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001546
1547 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001548 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001549
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001550 ExternalReference allocation_limit =
1551 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1552
1553 // Align the next allocation. Storing the filler map without checking top is
1554 // safe in new-space because the limit of the heap is aligned there.
1555 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1557 Label aligned;
1558 test(result, Immediate(kDoubleAlignmentMask));
1559 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001561 cmp(result, Operand::StaticVariable(allocation_limit));
1562 j(above_equal, gc_required);
1563 }
1564 mov(Operand(result, 0),
1565 Immediate(isolate()->factory()->one_pointer_filler_map()));
1566 add(result, Immediate(kDoubleSize / 2));
1567 bind(&aligned);
1568 }
1569
1570 // Calculate new top and bail out if space is exhausted.
Ben Murdochbb769b22010-08-11 14:56:33 +01001571 Register top_reg = result_end.is_valid() ? result_end : result;
Ben Murdochc5610432016-08-08 18:44:38 +01001572
Steve Block1e0659c2011-05-24 12:43:12 +01001573 if (!top_reg.is(result)) {
1574 mov(top_reg, result);
Ben Murdochbb769b22010-08-11 14:56:33 +01001575 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001576 add(top_reg, Immediate(object_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001577 cmp(top_reg, Operand::StaticVariable(allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001578 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001579
Ben Murdochc5610432016-08-08 18:44:38 +01001580 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1581 // The top pointer is not updated for allocation folding dominators.
1582 UpdateAllocationTopHelper(top_reg, scratch, flags);
1583 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001584
Ben Murdochbb769b22010-08-11 14:56:33 +01001585 if (top_reg.is(result)) {
Ben Murdochc5610432016-08-08 18:44:38 +01001586 sub(result, Immediate(object_size - kHeapObjectTag));
1587 } else {
1588 // Tag the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001589 DCHECK(kHeapObjectTag == 1);
1590 inc(result);
Ben Murdochbb769b22010-08-11 14:56:33 +01001591 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001592}
1593
1594
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001595void MacroAssembler::Allocate(int header_size,
1596 ScaleFactor element_size,
1597 Register element_count,
1598 RegisterValueType element_count_type,
1599 Register result,
1600 Register result_end,
1601 Register scratch,
1602 Label* gc_required,
1603 AllocationFlags flags) {
1604 DCHECK((flags & SIZE_IN_WORDS) == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01001605 DCHECK((flags & ALLOCATION_FOLDING_DOMINATOR) == 0);
1606 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07001607 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001608 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001609 // Trash the registers to simulate an allocation failure.
1610 mov(result, Immediate(0x7091));
1611 mov(result_end, Immediate(0x7191));
1612 if (scratch.is_valid()) {
1613 mov(scratch, Immediate(0x7291));
1614 }
1615 // Register element_count is not modified by the function.
1616 }
1617 jmp(gc_required);
1618 return;
1619 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001620 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001621
1622 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001623 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001624
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001625 ExternalReference allocation_limit =
1626 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block1e0659c2011-05-24 12:43:12 +01001627
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001628 // Align the next allocation. Storing the filler map without checking top is
1629 // safe in new-space because the limit of the heap is aligned there.
1630 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001631 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1632 Label aligned;
1633 test(result, Immediate(kDoubleAlignmentMask));
1634 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001635 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001636 cmp(result, Operand::StaticVariable(allocation_limit));
1637 j(above_equal, gc_required);
1638 }
1639 mov(Operand(result, 0),
1640 Immediate(isolate()->factory()->one_pointer_filler_map()));
1641 add(result, Immediate(kDoubleSize / 2));
1642 bind(&aligned);
1643 }
1644
1645 // Calculate new top and bail out if space is exhausted.
Steve Block1e0659c2011-05-24 12:43:12 +01001646 // We assume that element_count*element_size + header_size does not
1647 // overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1649 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1650 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1651 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1652 DCHECK(element_size >= times_2);
1653 DCHECK(kSmiTagSize == 1);
1654 element_size = static_cast<ScaleFactor>(element_size - 1);
1655 } else {
1656 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1657 }
Ben Murdochc5610432016-08-08 18:44:38 +01001658
Steve Block1e0659c2011-05-24 12:43:12 +01001659 lea(result_end, Operand(element_count, element_size, header_size));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001660 add(result_end, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001661 cmp(result_end, Operand::StaticVariable(allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00001662 j(above, gc_required);
1663
Ben Murdochc5610432016-08-08 18:44:38 +01001664 // Tag result.
1665 DCHECK(kHeapObjectTag == 1);
1666 inc(result);
Leon Clarkee46be812010-01-19 14:06:41 +00001667
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001668 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001669}
1670
1671
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001672void MacroAssembler::Allocate(Register object_size,
1673 Register result,
1674 Register result_end,
1675 Register scratch,
1676 Label* gc_required,
1677 AllocationFlags flags) {
1678 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01001679 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07001680 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001681 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001682 // Trash the registers to simulate an allocation failure.
1683 mov(result, Immediate(0x7091));
1684 mov(result_end, Immediate(0x7191));
1685 if (scratch.is_valid()) {
1686 mov(scratch, Immediate(0x7291));
1687 }
1688 // object_size is left unchanged by this function.
1689 }
1690 jmp(gc_required);
1691 return;
1692 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001694
1695 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001696 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001697
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 ExternalReference allocation_limit =
1699 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1700
1701 // Align the next allocation. Storing the filler map without checking top is
1702 // safe in new-space because the limit of the heap is aligned there.
1703 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001704 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1705 Label aligned;
1706 test(result, Immediate(kDoubleAlignmentMask));
1707 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001708 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001709 cmp(result, Operand::StaticVariable(allocation_limit));
1710 j(above_equal, gc_required);
1711 }
1712 mov(Operand(result, 0),
1713 Immediate(isolate()->factory()->one_pointer_filler_map()));
1714 add(result, Immediate(kDoubleSize / 2));
1715 bind(&aligned);
1716 }
1717
1718 // Calculate new top and bail out if space is exhausted.
Steve Blocka7e24c12009-10-30 11:49:00 +00001719 if (!object_size.is(result_end)) {
1720 mov(result_end, object_size);
1721 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001722 add(result_end, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001723 cmp(result_end, Operand::StaticVariable(allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001724 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001725
Ben Murdochc5610432016-08-08 18:44:38 +01001726 // Tag result.
1727 DCHECK(kHeapObjectTag == 1);
1728 inc(result);
1729
1730 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1731 // The top pointer is not updated for allocation folding dominators.
1732 UpdateAllocationTopHelper(result_end, scratch, flags);
1733 }
1734}
1735
1736void MacroAssembler::FastAllocate(int object_size, Register result,
1737 Register result_end, AllocationFlags flags) {
1738 DCHECK(!result.is(result_end));
1739 // Load address of new object into result.
1740 LoadAllocationTopHelper(result, no_reg, flags);
1741
1742 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1743 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1744 Label aligned;
1745 test(result, Immediate(kDoubleAlignmentMask));
1746 j(zero, &aligned, Label::kNear);
1747 mov(Operand(result, 0),
1748 Immediate(isolate()->factory()->one_pointer_filler_map()));
1749 add(result, Immediate(kDoubleSize / 2));
1750 bind(&aligned);
Steve Blocka7e24c12009-10-30 11:49:00 +00001751 }
Leon Clarkee46be812010-01-19 14:06:41 +00001752
Ben Murdochc5610432016-08-08 18:44:38 +01001753 lea(result_end, Operand(result, object_size));
1754 UpdateAllocationTopHelper(result_end, no_reg, flags);
1755
1756 DCHECK(kHeapObjectTag == 1);
1757 inc(result);
1758}
1759
1760void MacroAssembler::FastAllocate(Register object_size, Register result,
1761 Register result_end, AllocationFlags flags) {
1762 DCHECK(!result.is(result_end));
1763 // Load address of new object into result.
1764 LoadAllocationTopHelper(result, no_reg, flags);
1765
1766 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1767 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1768 Label aligned;
1769 test(result, Immediate(kDoubleAlignmentMask));
1770 j(zero, &aligned, Label::kNear);
1771 mov(Operand(result, 0),
1772 Immediate(isolate()->factory()->one_pointer_filler_map()));
1773 add(result, Immediate(kDoubleSize / 2));
1774 bind(&aligned);
1775 }
1776
1777 lea(result_end, Operand(result, object_size, times_1, 0));
1778 UpdateAllocationTopHelper(result_end, no_reg, flags);
1779
1780 DCHECK(kHeapObjectTag == 1);
1781 inc(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001782}
1783
1784
Steve Block3ce2e202009-11-05 08:53:23 +00001785void MacroAssembler::AllocateHeapNumber(Register result,
1786 Register scratch1,
1787 Register scratch2,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788 Label* gc_required,
1789 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001790 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001791 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01001792 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793
1794 Handle<Map> map = mode == MUTABLE
1795 ? isolate()->factory()->mutable_heap_number_map()
1796 : isolate()->factory()->heap_number_map();
Steve Block3ce2e202009-11-05 08:53:23 +00001797
1798 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001799 mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
Steve Block3ce2e202009-11-05 08:53:23 +00001800}
1801
1802
Steve Blockd0582a62009-12-15 09:54:21 +00001803void MacroAssembler::AllocateTwoByteString(Register result,
1804 Register length,
1805 Register scratch1,
1806 Register scratch2,
1807 Register scratch3,
1808 Label* gc_required) {
1809 // Calculate the number of bytes needed for the characters in the string while
1810 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001811 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1812 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00001813 // scratch1 = length * 2 + kObjectAlignmentMask.
1814 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001815 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001816
1817 // Allocate two byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01001818 Allocate(SeqTwoByteString::kHeaderSize, times_1, scratch1,
1819 REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
1820 NO_ALLOCATION_FLAGS);
Steve Blockd0582a62009-12-15 09:54:21 +00001821
1822 // Set the map, length and hash field.
1823 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001824 Immediate(isolate()->factory()->string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001825 mov(scratch1, length);
1826 SmiTag(scratch1);
1827 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001828 mov(FieldOperand(result, String::kHashFieldOffset),
1829 Immediate(String::kEmptyHashField));
1830}
1831
1832
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833void MacroAssembler::AllocateOneByteString(Register result, Register length,
1834 Register scratch1, Register scratch2,
1835 Register scratch3,
1836 Label* gc_required) {
Steve Blockd0582a62009-12-15 09:54:21 +00001837 // Calculate the number of bytes needed for the characters in the string while
1838 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001839 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00001840 mov(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001841 DCHECK(kCharSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001842 add(scratch1, Immediate(kObjectAlignmentMask));
1843 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001844
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001845 // Allocate one-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01001846 Allocate(SeqOneByteString::kHeaderSize, times_1, scratch1,
1847 REGISTER_VALUE_IS_INT32, result, scratch2, scratch3, gc_required,
1848 NO_ALLOCATION_FLAGS);
Steve Blockd0582a62009-12-15 09:54:21 +00001849
1850 // Set the map, length and hash field.
1851 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 Immediate(isolate()->factory()->one_byte_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001853 mov(scratch1, length);
1854 SmiTag(scratch1);
1855 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001856 mov(FieldOperand(result, String::kHashFieldOffset),
1857 Immediate(String::kEmptyHashField));
1858}
1859
1860
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001861void MacroAssembler::AllocateOneByteString(Register result, int length,
1862 Register scratch1, Register scratch2,
1863 Label* gc_required) {
1864 DCHECK(length > 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001865
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001866 // Allocate one-byte string in new space.
1867 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
Ben Murdochc5610432016-08-08 18:44:38 +01001868 gc_required, NO_ALLOCATION_FLAGS);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001869
1870 // Set the map, length and hash field.
1871 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872 Immediate(isolate()->factory()->one_byte_string_map()));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001873 mov(FieldOperand(result, String::kLengthOffset),
1874 Immediate(Smi::FromInt(length)));
1875 mov(FieldOperand(result, String::kHashFieldOffset),
1876 Immediate(String::kEmptyHashField));
1877}
1878
1879
Ben Murdoch589d6972011-11-30 16:04:58 +00001880void MacroAssembler::AllocateTwoByteConsString(Register result,
Steve Blockd0582a62009-12-15 09:54:21 +00001881 Register scratch1,
1882 Register scratch2,
1883 Label* gc_required) {
1884 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001885 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01001886 NO_ALLOCATION_FLAGS);
Steve Blockd0582a62009-12-15 09:54:21 +00001887
1888 // Set the map. The other fields are left uninitialized.
1889 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001890 Immediate(isolate()->factory()->cons_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001891}
1892
1893
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001894void MacroAssembler::AllocateOneByteConsString(Register result,
1895 Register scratch1,
1896 Register scratch2,
1897 Label* gc_required) {
Ben Murdochc5610432016-08-08 18:44:38 +01001898 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1899 NO_ALLOCATION_FLAGS);
Steve Blockd0582a62009-12-15 09:54:21 +00001900
1901 // Set the map. The other fields are left uninitialized.
1902 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001903 Immediate(isolate()->factory()->cons_one_byte_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001904}
1905
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001906
Ben Murdoch589d6972011-11-30 16:04:58 +00001907void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001908 Register scratch1,
1909 Register scratch2,
1910 Label* gc_required) {
1911 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01001913 NO_ALLOCATION_FLAGS);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001914
1915 // Set the map. The other fields are left uninitialized.
1916 mov(FieldOperand(result, HeapObject::kMapOffset),
1917 Immediate(isolate()->factory()->sliced_string_map()));
1918}
1919
1920
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001921void MacroAssembler::AllocateOneByteSlicedString(Register result,
1922 Register scratch1,
1923 Register scratch2,
1924 Label* gc_required) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001925 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001926 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01001927 NO_ALLOCATION_FLAGS);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001928
1929 // Set the map. The other fields are left uninitialized.
1930 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001931 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001932}
1933
1934
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001935void MacroAssembler::AllocateJSValue(Register result, Register constructor,
1936 Register value, Register scratch,
1937 Label* gc_required) {
1938 DCHECK(!result.is(constructor));
1939 DCHECK(!result.is(scratch));
1940 DCHECK(!result.is(value));
1941
1942 // Allocate JSValue in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01001943 Allocate(JSValue::kSize, result, scratch, no_reg, gc_required,
1944 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001945
1946 // Initialize the JSValue.
1947 LoadGlobalFunctionInitialMap(constructor, scratch);
1948 mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
1949 LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
1950 mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
1951 mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
1952 mov(FieldOperand(result, JSValue::kValueOffset), value);
1953 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1954}
1955
1956
Ben Murdochb8e0da22011-05-16 14:20:40 +01001957// Copy memory, byte-by-byte, from source to destination. Not optimized for
1958// long or aligned copies. The contents of scratch and length are destroyed.
1959// Source and destination are incremented by length.
1960// Many variants of movsb, loop unrolling, word moves, and indexed operands
1961// have been tried here already, and this is fastest.
1962// A simpler loop is faster on small copies, but 30% slower on large ones.
1963// The cld() instruction must have been emitted, to set the direction flag(),
1964// before calling this function.
1965void MacroAssembler::CopyBytes(Register source,
1966 Register destination,
1967 Register length,
1968 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969 Label short_loop, len4, len8, len12, done, short_string;
1970 DCHECK(source.is(esi));
1971 DCHECK(destination.is(edi));
1972 DCHECK(length.is(ecx));
1973 cmp(length, Immediate(4));
1974 j(below, &short_string, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001975
1976 // Because source is 4-byte aligned in our uses of this function,
1977 // we keep source aligned for the rep_movs call by copying the odd bytes
1978 // at the end of the ranges.
1979 mov(scratch, Operand(source, length, times_1, -4));
1980 mov(Operand(destination, length, times_1, -4), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001981
1982 cmp(length, Immediate(8));
1983 j(below_equal, &len4, Label::kNear);
1984 cmp(length, Immediate(12));
1985 j(below_equal, &len8, Label::kNear);
1986 cmp(length, Immediate(16));
1987 j(below_equal, &len12, Label::kNear);
1988
Ben Murdochb8e0da22011-05-16 14:20:40 +01001989 mov(scratch, ecx);
1990 shr(ecx, 2);
1991 rep_movs();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001992 and_(scratch, Immediate(0x3));
1993 add(destination, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001994 jmp(&done, Label::kNear);
1995
1996 bind(&len12);
1997 mov(scratch, Operand(source, 8));
1998 mov(Operand(destination, 8), scratch);
1999 bind(&len8);
2000 mov(scratch, Operand(source, 4));
2001 mov(Operand(destination, 4), scratch);
2002 bind(&len4);
2003 mov(scratch, Operand(source, 0));
2004 mov(Operand(destination, 0), scratch);
2005 add(destination, length);
2006 jmp(&done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002007
2008 bind(&short_string);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002009 test(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010 j(zero, &done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002011
2012 bind(&short_loop);
2013 mov_b(scratch, Operand(source, 0));
2014 mov_b(Operand(destination, 0), scratch);
2015 inc(source);
2016 inc(destination);
2017 dec(length);
2018 j(not_zero, &short_loop);
2019
2020 bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002021}
2022
Steve Blockd0582a62009-12-15 09:54:21 +00002023
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002024void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
2025 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002026 Register filler) {
2027 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002028 jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002029 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002030 mov(Operand(current_address, 0), filler);
2031 add(current_address, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002032 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033 cmp(current_address, end_address);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002034 j(below, &loop, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002035}
2036
2037
2038void MacroAssembler::BooleanBitTest(Register object,
2039 int field_offset,
2040 int bit_index) {
2041 bit_index += kSmiTagSize + kSmiShiftSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002042 DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002043 int byte_index = bit_index / kBitsPerByte;
2044 int byte_bit_index = bit_index & (kBitsPerByte - 1);
2045 test_b(FieldOperand(object, field_offset + byte_index),
Ben Murdochda12d292016-06-02 14:46:10 +01002046 Immediate(1 << byte_bit_index));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002047}
2048
2049
2050
Steve Blocka7e24c12009-10-30 11:49:00 +00002051void MacroAssembler::NegativeZeroTest(Register result,
2052 Register op,
2053 Label* then_label) {
2054 Label ok;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002055 test(result, result);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002056 j(not_zero, &ok, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002057 test(op, op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002058 j(sign, then_label, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002059 bind(&ok);
2060}
2061
2062
2063void MacroAssembler::NegativeZeroTest(Register result,
2064 Register op1,
2065 Register op2,
2066 Register scratch,
2067 Label* then_label) {
2068 Label ok;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002069 test(result, result);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002070 j(not_zero, &ok, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002071 mov(scratch, op1);
2072 or_(scratch, op2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002073 j(sign, then_label, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002074 bind(&ok);
2075}
2076
2077
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002078void MacroAssembler::GetMapConstructor(Register result, Register map,
2079 Register temp) {
2080 Label done, loop;
2081 mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
2082 bind(&loop);
2083 JumpIfSmi(result, &done, Label::kNear);
2084 CmpObjectType(result, MAP_TYPE, temp);
2085 j(not_equal, &done, Label::kNear);
2086 mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
2087 jmp(&loop);
2088 bind(&done);
2089}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002090
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002091
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002092void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2093 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002094 // Get the prototype or initial map from the function.
2095 mov(result,
2096 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2097
2098 // If the prototype or initial map is the hole, don't return it and
2099 // simply miss the cache instead. This will allow us to allocate a
2100 // prototype object on-demand in the runtime system.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002101 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002102 j(equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002103
2104 // If the function does not have an initial map, we're done.
2105 Label done;
2106 CmpObjectType(result, MAP_TYPE, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002107 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00002108
2109 // Get the prototype from the initial map.
2110 mov(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002111
Steve Blocka7e24c12009-10-30 11:49:00 +00002112 // All done.
2113 bind(&done);
2114}
2115
2116
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002117void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
2118 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
Ben Murdoch257744e2011-11-30 15:57:28 +00002119 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +00002120}
2121
2122
Steve Blockd0582a62009-12-15 09:54:21 +00002123void MacroAssembler::TailCallStub(CodeStub* stub) {
Steve Blockd0582a62009-12-15 09:54:21 +00002124 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
2125}
2126
2127
Steve Blocka7e24c12009-10-30 11:49:00 +00002128void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002129 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +00002130 ret((argc - 1) * kPointerSize);
2131}
2132
2133
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002134bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002135 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002136}
2137
2138
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002139void MacroAssembler::IndexFromHash(Register hash, Register index) {
2140 // The assert checks that the constants for the maximum number of digits
2141 // for an array index cached in the hash field and the number of bits
2142 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002143 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002144 (1 << String::kArrayIndexValueBits));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002145 if (!index.is(hash)) {
2146 mov(index, hash);
2147 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002148 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002149}
2150
2151
Steve Block44f0eee2011-05-26 01:26:41 +01002152void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153 int num_arguments,
2154 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 // If the expected number of arguments of the runtime function is
2156 // constant, we check that the actual number of arguments match the
2157 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002158 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002159
Leon Clarke4515c472010-02-03 11:58:03 +00002160 // TODO(1236192): Most runtime routines don't need the number of
2161 // arguments passed in because it is constant. At some point we
2162 // should remove this need and make the runtime routine entry code
2163 // smarter.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164 Move(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002165 mov(ebx, Immediate(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002166 CEntryStub ces(isolate(), 1, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +00002167 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00002168}
2169
2170
Ben Murdochbb769b22010-08-11 14:56:33 +01002171void MacroAssembler::CallExternalReference(ExternalReference ref,
2172 int num_arguments) {
2173 mov(eax, Immediate(num_arguments));
2174 mov(ebx, Immediate(ref));
2175
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002176 CEntryStub stub(isolate(), 1);
Ben Murdochbb769b22010-08-11 14:56:33 +01002177 CallStub(&stub);
2178}
2179
2180
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002181void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2182 // ----------- S t a t e -------------
2183 // -- esp[0] : return address
2184 // -- esp[8] : argument num_arguments - 1
2185 // ...
2186 // -- esp[8 * num_arguments] : argument 0 (receiver)
2187 //
2188 // For runtime functions with variable arguments:
2189 // -- eax : number of arguments
2190 // -----------------------------------
Steve Block6ded16b2010-05-10 14:33:55 +01002191
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002192 const Runtime::Function* function = Runtime::FunctionForId(fid);
2193 DCHECK_EQ(1, function->result_size);
2194 if (function->nargs >= 0) {
2195 // TODO(1236192): Most runtime routines don't need the number of
2196 // arguments passed in because it is constant. At some point we
2197 // should remove this need and make the runtime routine entry code
2198 // smarter.
2199 mov(eax, Immediate(function->nargs));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002200 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002201 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Blockd0582a62009-12-15 09:54:21 +00002202}
2203
2204
Steve Block6ded16b2010-05-10 14:33:55 +01002205void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002206 // Set the entry point and jump to the C entry runtime stub.
2207 mov(ebx, Immediate(ext));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208 CEntryStub ces(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002209 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2210}
2211
Ben Murdochda12d292016-06-02 14:46:10 +01002212void MacroAssembler::PrepareForTailCall(
2213 const ParameterCount& callee_args_count, Register caller_args_count_reg,
2214 Register scratch0, Register scratch1, ReturnAddressState ra_state,
2215 int number_of_temp_values_after_return_address) {
2216#if DEBUG
2217 if (callee_args_count.is_reg()) {
2218 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
2219 scratch1));
2220 } else {
2221 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
2222 }
2223 DCHECK(ra_state != ReturnAddressState::kNotOnStack ||
2224 number_of_temp_values_after_return_address == 0);
2225#endif
2226
2227 // Calculate the destination address where we will put the return address
2228 // after we drop current frame.
2229 Register new_sp_reg = scratch0;
2230 if (callee_args_count.is_reg()) {
2231 sub(caller_args_count_reg, callee_args_count.reg());
2232 lea(new_sp_reg,
2233 Operand(ebp, caller_args_count_reg, times_pointer_size,
2234 StandardFrameConstants::kCallerPCOffset -
2235 number_of_temp_values_after_return_address * kPointerSize));
2236 } else {
2237 lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
2238 StandardFrameConstants::kCallerPCOffset -
2239 (callee_args_count.immediate() +
2240 number_of_temp_values_after_return_address) *
2241 kPointerSize));
2242 }
2243
2244 if (FLAG_debug_code) {
2245 cmp(esp, new_sp_reg);
2246 Check(below, kStackAccessBelowStackPointer);
2247 }
2248
2249 // Copy return address from caller's frame to current frame's return address
2250 // to avoid its trashing and let the following loop copy it to the right
2251 // place.
2252 Register tmp_reg = scratch1;
2253 if (ra_state == ReturnAddressState::kOnStack) {
2254 mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
2255 mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
2256 tmp_reg);
2257 } else {
2258 DCHECK(ReturnAddressState::kNotOnStack == ra_state);
2259 DCHECK_EQ(0, number_of_temp_values_after_return_address);
2260 Push(Operand(ebp, StandardFrameConstants::kCallerPCOffset));
2261 }
2262
2263 // Restore caller's frame pointer now as it could be overwritten by
2264 // the copying loop.
2265 mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2266
2267 // +2 here is to copy both receiver and return address.
2268 Register count_reg = caller_args_count_reg;
2269 if (callee_args_count.is_reg()) {
2270 lea(count_reg, Operand(callee_args_count.reg(),
2271 2 + number_of_temp_values_after_return_address));
2272 } else {
2273 mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
2274 number_of_temp_values_after_return_address));
2275 // TODO(ishell): Unroll copying loop for small immediate values.
2276 }
2277
2278 // Now copy callee arguments to the caller frame going backwards to avoid
2279 // callee arguments corruption (source and destination areas could overlap).
2280 Label loop, entry;
2281 jmp(&entry, Label::kNear);
2282 bind(&loop);
2283 dec(count_reg);
2284 mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
2285 mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
2286 bind(&entry);
2287 cmp(count_reg, Immediate(0));
2288 j(not_equal, &loop, Label::kNear);
2289
2290 // Leave current frame.
2291 mov(esp, new_sp_reg);
2292}
Steve Blocka7e24c12009-10-30 11:49:00 +00002293
2294void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2295 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00002296 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002297 bool* definitely_mismatches,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002298 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002299 Label::Distance done_near,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002300 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002301 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002302 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00002303 Label invoke;
2304 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002305 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002306 mov(eax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002307 if (expected.immediate() == actual.immediate()) {
2308 definitely_matches = true;
2309 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002310 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2311 if (expected.immediate() == sentinel) {
2312 // Don't worry about adapting arguments for builtins that
2313 // don't want that done. Skip adaption code by making it look
2314 // like we have a match between expected and actual number of
2315 // arguments.
2316 definitely_matches = true;
2317 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002318 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00002319 mov(ebx, expected.immediate());
2320 }
2321 }
2322 } else {
2323 if (actual.is_immediate()) {
2324 // Expected is in register, actual is immediate. This is the
2325 // case when we invoke function values without going through the
2326 // IC mechanism.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002327 mov(eax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002328 cmp(expected.reg(), actual.immediate());
2329 j(equal, &invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002330 DCHECK(expected.reg().is(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00002331 } else if (!expected.reg().is(actual.reg())) {
2332 // Both expected and actual are in (different) registers. This
2333 // is the case when we invoke functions using call and apply.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002334 cmp(expected.reg(), actual.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002335 j(equal, &invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002336 DCHECK(actual.reg().is(eax));
2337 DCHECK(expected.reg().is(ebx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002338 } else {
2339 Move(eax, actual.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002340 }
2341 }
2342
2343 if (!definitely_matches) {
2344 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01002345 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00002346 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002347 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00002349 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002350 if (!*definitely_mismatches) {
2351 jmp(done, done_near);
2352 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002353 } else {
2354 jmp(adaptor, RelocInfo::CODE_TARGET);
2355 }
2356 bind(&invoke);
2357 }
2358}
2359
2360
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002361void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
2362 const ParameterCount& expected,
2363 const ParameterCount& actual) {
2364 Label skip_flooding;
Ben Murdoch61f157c2016-09-16 13:49:30 +01002365 ExternalReference last_step_action =
2366 ExternalReference::debug_last_step_action_address(isolate());
2367 STATIC_ASSERT(StepFrame > StepIn);
2368 cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
2369 j(less, &skip_flooding);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002370 {
2371 FrameScope frame(this,
2372 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2373 if (expected.is_reg()) {
2374 SmiTag(expected.reg());
2375 Push(expected.reg());
2376 }
2377 if (actual.is_reg()) {
2378 SmiTag(actual.reg());
2379 Push(actual.reg());
2380 }
2381 if (new_target.is_valid()) {
2382 Push(new_target);
2383 }
2384 Push(fun);
2385 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002386 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002387 Pop(fun);
2388 if (new_target.is_valid()) {
2389 Pop(new_target);
2390 }
2391 if (actual.is_reg()) {
2392 Pop(actual.reg());
2393 SmiUntag(actual.reg());
2394 }
2395 if (expected.is_reg()) {
2396 Pop(expected.reg());
2397 SmiUntag(expected.reg());
2398 }
2399 }
2400 bind(&skip_flooding);
2401}
2402
2403
2404void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2405 const ParameterCount& expected,
2406 const ParameterCount& actual,
2407 InvokeFlag flag,
2408 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002409 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002410 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002411 DCHECK(function.is(edi));
2412 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
2413
2414 if (call_wrapper.NeedsDebugStepCheck()) {
2415 FloodFunctionIfStepping(function, new_target, expected, actual);
2416 }
2417
2418 // Clear the new.target register if not given.
2419 if (!new_target.is_valid()) {
2420 mov(edx, isolate()->factory()->undefined_value());
2421 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002422
Ben Murdoch257744e2011-11-30 15:57:28 +00002423 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002424 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002425 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2426 Label::kNear, call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002427 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002428 // We call indirectly through the code field in the function to
2429 // allow recompilation to take effect without changing any of the
2430 // call sites.
2431 Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002432 if (flag == CALL_FUNCTION) {
2433 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002434 call(code);
2435 call_wrapper.AfterCall();
2436 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002437 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002438 jmp(code);
2439 }
2440 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00002441 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002442}
2443
2444
Steve Blocka7e24c12009-10-30 11:49:00 +00002445void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00002447 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002448 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002449 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002450 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002452
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002453 DCHECK(fun.is(edi));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002454 mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002455 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002456 mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002457 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00002458
2459 ParameterCount expected(ebx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460 InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002461}
2462
2463
2464void MacroAssembler::InvokeFunction(Register fun,
2465 const ParameterCount& expected,
2466 const ParameterCount& actual,
2467 InvokeFlag flag,
2468 const CallWrapper& call_wrapper) {
2469 // You can't call a function without a valid frame.
2470 DCHECK(flag == JUMP_FUNCTION || has_frame());
2471
2472 DCHECK(fun.is(edi));
2473 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2474
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002475 InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002476}
2477
2478
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002479void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002480 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00002481 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002482 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002483 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002484 LoadHeapObject(edi, function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002485 InvokeFunction(edi, expected, actual, flag, call_wrapper);
Andrei Popescu402d9372010-02-26 13:31:12 +00002486}
2487
2488
Steve Blockd0582a62009-12-15 09:54:21 +00002489void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2490 if (context_chain_length > 0) {
2491 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002492 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002493 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002494 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002495 }
Steve Block1e0659c2011-05-24 12:43:12 +01002496 } else {
2497 // Slot is in the current function context. Move it into the
2498 // destination register in case we store into it (the write barrier
2499 // cannot be allowed to destroy the context in esi).
2500 mov(dst, esi);
2501 }
2502
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002503 // We should not have found a with context by walking the context chain
Steve Block1e0659c2011-05-24 12:43:12 +01002504 // (i.e., the static scope chain and runtime context chain do not agree).
2505 // A variable occurring in such a scope should have slot type LOOKUP and
2506 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01002507 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002508 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2509 isolate()->factory()->with_context_map());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002510 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00002511 }
2512}
2513
2514
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002515void MacroAssembler::LoadGlobalProxy(Register dst) {
2516 mov(dst, NativeContextOperand());
2517 mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
2518}
2519
2520
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002521void MacroAssembler::LoadTransitionedArrayMapConditional(
2522 ElementsKind expected_kind,
2523 ElementsKind transitioned_kind,
2524 Register map_in_out,
2525 Register scratch,
2526 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002527 DCHECK(IsFastElementsKind(expected_kind));
2528 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002529
2530 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002531 mov(scratch, NativeContextOperand());
2532 cmp(map_in_out,
2533 ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002534 j(not_equal, no_map_match);
2535
2536 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002537 mov(map_in_out,
2538 ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002539}
2540
2541
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002542void MacroAssembler::LoadGlobalFunction(int index, Register function) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002543 // Load the native context from the current context.
2544 mov(function, NativeContextOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002545 // Load the function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002546 mov(function, ContextOperand(function, index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002547}
2548
2549
2550void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2551 Register map) {
2552 // Load the initial map. The global functions all have initial maps.
2553 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002554 if (emit_debug_code()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002555 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002556 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002557 jmp(&ok);
2558 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002559 Abort(kGlobalFunctionsMustHaveInitialMap);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002560 bind(&ok);
2561 }
2562}
2563
Steve Blockd0582a62009-12-15 09:54:21 +00002564
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002565// Store the value in register src in the safepoint register stack
2566// slot for register dst.
2567void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2568 mov(SafepointRegisterSlot(dst), src);
2569}
2570
2571
2572void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2573 mov(SafepointRegisterSlot(dst), src);
2574}
2575
2576
2577void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2578 mov(dst, SafepointRegisterSlot(src));
2579}
2580
2581
2582Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2583 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2584}
2585
2586
Ben Murdochb0fe1622011-05-05 13:52:32 +01002587int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2588 // The registers are pushed starting with the lowest encoding,
2589 // which means that lowest encodings are furthest away from
2590 // the stack pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002591 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002592 return kNumSafepointRegisters - reg_code - 1;
2593}
2594
2595
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002596void MacroAssembler::LoadHeapObject(Register result,
2597 Handle<HeapObject> object) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002598 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002599 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002600 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2601 mov(result, Operand::ForCell(cell));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002602 } else {
2603 mov(result, object);
2604 }
2605}
2606
2607
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002608void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2609 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002610 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002611 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2612 cmp(reg, Operand::ForCell(cell));
2613 } else {
2614 cmp(reg, object);
2615 }
2616}
2617
2618
2619void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2620 AllowDeferredHandleDereference using_raw_address;
2621 if (isolate()->heap()->InNewSpace(*object)) {
2622 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2623 push(Operand::ForCell(cell));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002624 } else {
2625 Push(object);
2626 }
2627}
2628
2629
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002630void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2631 Register scratch) {
2632 mov(scratch, cell);
2633 cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2634}
2635
2636
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002637void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002638 mov(value, cell);
2639 mov(value, FieldOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002640}
2641
2642
2643void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2644 Label* miss) {
2645 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002646 JumpIfSmi(value, miss);
2647}
2648
2649
Steve Blocka7e24c12009-10-30 11:49:00 +00002650void MacroAssembler::Ret() {
2651 ret(0);
2652}
2653
2654
Steve Block1e0659c2011-05-24 12:43:12 +01002655void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2656 if (is_uint16(bytes_dropped)) {
2657 ret(bytes_dropped);
2658 } else {
2659 pop(scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002660 add(esp, Immediate(bytes_dropped));
Steve Block1e0659c2011-05-24 12:43:12 +01002661 push(scratch);
2662 ret(0);
2663 }
2664}
2665
2666
Leon Clarkee46be812010-01-19 14:06:41 +00002667void MacroAssembler::Drop(int stack_elements) {
2668 if (stack_elements > 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002669 add(esp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00002670 }
2671}
2672
2673
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002674void MacroAssembler::Move(Register dst, Register src) {
2675 if (!dst.is(src)) {
2676 mov(dst, src);
2677 }
2678}
2679
2680
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002681void MacroAssembler::Move(Register dst, const Immediate& x) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01002682 if (x.is_zero() && RelocInfo::IsNone(x.rmode_)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002683 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2684 } else {
2685 mov(dst, x);
2686 }
2687}
2688
2689
2690void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2691 mov(dst, x);
2692}
2693
2694
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002695void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2696 if (src == 0) {
2697 pxor(dst, dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002698 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002699 unsigned cnt = base::bits::CountPopulation32(src);
2700 unsigned nlz = base::bits::CountLeadingZeros32(src);
2701 unsigned ntz = base::bits::CountTrailingZeros32(src);
2702 if (nlz + cnt + ntz == 32) {
2703 pcmpeqd(dst, dst);
2704 if (ntz == 0) {
2705 psrld(dst, 32 - cnt);
2706 } else {
2707 pslld(dst, 32 - cnt);
2708 if (nlz != 0) psrld(dst, nlz);
2709 }
2710 } else {
2711 push(eax);
2712 mov(eax, Immediate(src));
2713 movd(dst, Operand(eax));
2714 pop(eax);
2715 }
2716 }
2717}
2718
2719
2720void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721 if (src == 0) {
2722 pxor(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002723 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002724 uint32_t lower = static_cast<uint32_t>(src);
2725 uint32_t upper = static_cast<uint32_t>(src >> 32);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002726 unsigned cnt = base::bits::CountPopulation64(src);
2727 unsigned nlz = base::bits::CountLeadingZeros64(src);
2728 unsigned ntz = base::bits::CountTrailingZeros64(src);
2729 if (nlz + cnt + ntz == 64) {
2730 pcmpeqd(dst, dst);
2731 if (ntz == 0) {
2732 psrlq(dst, 64 - cnt);
2733 } else {
2734 psllq(dst, 64 - cnt);
2735 if (nlz != 0) psrlq(dst, nlz);
2736 }
2737 } else if (lower == 0) {
2738 Move(dst, upper);
2739 psllq(dst, 32);
2740 } else if (CpuFeatures::IsSupported(SSE4_1)) {
2741 CpuFeatureScope scope(this, SSE4_1);
2742 push(eax);
2743 Move(eax, Immediate(lower));
2744 movd(dst, Operand(eax));
2745 Move(eax, Immediate(upper));
2746 pinsrd(dst, Operand(eax), 1);
2747 pop(eax);
2748 } else {
2749 push(Immediate(upper));
2750 push(Immediate(lower));
2751 movsd(dst, Operand(esp, 0));
2752 add(esp, Immediate(kDoubleSize));
2753 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002754 }
2755}
2756
2757
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002758void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
2759 if (imm8 == 0) {
2760 movd(dst, src);
2761 return;
2762 }
2763 DCHECK_EQ(1, imm8);
2764 if (CpuFeatures::IsSupported(SSE4_1)) {
2765 CpuFeatureScope sse_scope(this, SSE4_1);
2766 pextrd(dst, src, imm8);
2767 return;
2768 }
2769 pshufd(xmm0, src, 1);
2770 movd(dst, xmm0);
2771}
2772
2773
2774void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
2775 DCHECK(imm8 == 0 || imm8 == 1);
2776 if (CpuFeatures::IsSupported(SSE4_1)) {
2777 CpuFeatureScope sse_scope(this, SSE4_1);
2778 pinsrd(dst, src, imm8);
2779 return;
2780 }
2781 movd(xmm0, src);
2782 if (imm8 == 1) {
2783 punpckldq(dst, xmm0);
2784 } else {
2785 DCHECK_EQ(0, imm8);
2786 psrlq(dst, 32);
2787 punpckldq(xmm0, dst);
2788 movaps(dst, xmm0);
2789 }
2790}
2791
2792
2793void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
2794 if (CpuFeatures::IsSupported(LZCNT)) {
2795 CpuFeatureScope scope(this, LZCNT);
2796 lzcnt(dst, src);
2797 return;
2798 }
2799 Label not_zero_src;
2800 bsr(dst, src);
2801 j(not_zero, &not_zero_src, Label::kNear);
2802 Move(dst, Immediate(63)); // 63^31 == 32
2803 bind(&not_zero_src);
2804 xor_(dst, Immediate(31)); // for x in [0..31], 31^x == 31-x.
2805}
2806
2807
2808void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
2809 if (CpuFeatures::IsSupported(BMI1)) {
2810 CpuFeatureScope scope(this, BMI1);
2811 tzcnt(dst, src);
2812 return;
2813 }
2814 Label not_zero_src;
2815 bsf(dst, src);
2816 j(not_zero, &not_zero_src, Label::kNear);
2817 Move(dst, Immediate(32)); // The result of tzcnt is 32 if src = 0.
2818 bind(&not_zero_src);
2819}
2820
2821
2822void MacroAssembler::Popcnt(Register dst, const Operand& src) {
2823 if (CpuFeatures::IsSupported(POPCNT)) {
2824 CpuFeatureScope scope(this, POPCNT);
2825 popcnt(dst, src);
2826 return;
2827 }
2828 UNREACHABLE();
2829}
2830
2831
Steve Blocka7e24c12009-10-30 11:49:00 +00002832void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2833 if (FLAG_native_code_counters && counter->Enabled()) {
2834 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2835 }
2836}
2837
2838
2839void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002840 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002841 if (FLAG_native_code_counters && counter->Enabled()) {
2842 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2843 if (value == 1) {
2844 inc(operand);
2845 } else {
2846 add(operand, Immediate(value));
2847 }
2848 }
2849}
2850
2851
2852void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002853 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002854 if (FLAG_native_code_counters && counter->Enabled()) {
2855 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2856 if (value == 1) {
2857 dec(operand);
2858 } else {
2859 sub(operand, Immediate(value));
2860 }
2861 }
2862}
2863
2864
Leon Clarked91b9f72010-01-27 17:25:45 +00002865void MacroAssembler::IncrementCounter(Condition cc,
2866 StatsCounter* counter,
2867 int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002868 DCHECK(value > 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002869 if (FLAG_native_code_counters && counter->Enabled()) {
2870 Label skip;
2871 j(NegateCondition(cc), &skip);
2872 pushfd();
2873 IncrementCounter(counter, value);
2874 popfd();
2875 bind(&skip);
2876 }
2877}
2878
2879
2880void MacroAssembler::DecrementCounter(Condition cc,
2881 StatsCounter* counter,
2882 int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002883 DCHECK(value > 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002884 if (FLAG_native_code_counters && counter->Enabled()) {
2885 Label skip;
2886 j(NegateCondition(cc), &skip);
2887 pushfd();
2888 DecrementCounter(counter, value);
2889 popfd();
2890 bind(&skip);
2891 }
2892}
2893
2894
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002895void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2896 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002897}
2898
2899
Iain Merrick75681382010-08-19 15:07:18 +01002900void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002901 if (emit_debug_code()) {
2902 Factory* factory = isolate()->factory();
Iain Merrick75681382010-08-19 15:07:18 +01002903 Label ok;
2904 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002905 Immediate(factory->fixed_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002906 j(equal, &ok);
2907 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002908 Immediate(factory->fixed_double_array_map()));
2909 j(equal, &ok);
2910 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002911 Immediate(factory->fixed_cow_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002912 j(equal, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002914 bind(&ok);
2915 }
2916}
2917
2918
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002919void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002920 Label L;
Ben Murdoch257744e2011-11-30 15:57:28 +00002921 j(cc, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002922 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002923 // will not return here
2924 bind(&L);
2925}
2926
2927
Steve Block6ded16b2010-05-10 14:33:55 +01002928void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002929 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01002930 int frame_alignment_mask = frame_alignment - 1;
2931 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002932 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01002933 Label alignment_as_expected;
2934 test(esp, Immediate(frame_alignment_mask));
2935 j(zero, &alignment_as_expected);
2936 // Abort if stack is not aligned.
2937 int3();
2938 bind(&alignment_as_expected);
2939 }
2940}
2941
2942
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002943void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002944#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002945 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002946 if (msg != NULL) {
2947 RecordComment("Abort message: ");
2948 RecordComment(msg);
2949 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002950
2951 if (FLAG_trap_on_abort) {
2952 int3();
2953 return;
2954 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002955#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002956
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002957 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002958 // Disable stub call restrictions to always allow calls to abort.
2959 if (!has_frame_) {
2960 // We don't actually want to generate a pile of code for this, so just
2961 // claim there is a stack frame, without generating one.
2962 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002963 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002964 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002965 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002966 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002967 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00002968 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00002969}
2970
2971
Ben Murdoch257744e2011-11-30 15:57:28 +00002972void MacroAssembler::LoadInstanceDescriptors(Register map,
2973 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002974 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2975}
2976
2977
2978void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2979 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2980 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
Iain Merrick75681382010-08-19 15:07:18 +01002981}
2982
2983
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002984void MacroAssembler::LoadAccessor(Register dst, Register holder,
2985 int accessor_index,
2986 AccessorComponent accessor) {
2987 mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
2988 LoadInstanceDescriptors(dst, dst);
2989 mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
2990 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
2991 : AccessorPair::kSetterOffset;
2992 mov(dst, FieldOperand(dst, offset));
2993}
2994
2995
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002996void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2997 Register scratch,
2998 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002999 DCHECK(is_uintn(power + HeapNumber::kExponentBias,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003000 HeapNumber::kExponentBits));
3001 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003002 movd(dst, scratch);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003003 psllq(dst, HeapNumber::kMantissaBits);
3004}
3005
3006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003007void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
3008 Register instance_type, Register scratch, Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003009 if (!scratch.is(instance_type)) {
3010 mov(scratch, instance_type);
3011 }
3012 and_(scratch,
3013 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003014 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
Andrei Popescu402d9372010-02-26 13:31:12 +00003015 j(not_equal, failure);
3016}
3017
3018
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003019void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
3020 Register object2,
3021 Register scratch1,
3022 Register scratch2,
3023 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003024 // Check that both objects are not smis.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003025 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003026 mov(scratch1, object1);
3027 and_(scratch1, object2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003028 JumpIfSmi(scratch1, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00003029
3030 // Load instance type for both strings.
3031 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
3032 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
3033 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
3034 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
3035
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003036 // Check that both are flat one-byte strings.
3037 const int kFlatOneByteStringMask =
Leon Clarked91b9f72010-01-27 17:25:45 +00003038 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003039 const int kFlatOneByteStringTag =
3040 kStringTag | kOneByteStringTag | kSeqStringTag;
Leon Clarked91b9f72010-01-27 17:25:45 +00003041 // Interleave bits from both instance types and compare them in one check.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003042 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
3043 and_(scratch1, kFlatOneByteStringMask);
3044 and_(scratch2, kFlatOneByteStringMask);
Leon Clarked91b9f72010-01-27 17:25:45 +00003045 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003046 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
Leon Clarked91b9f72010-01-27 17:25:45 +00003047 j(not_equal, failure);
3048}
3049
3050
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003051void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
3052 Label* not_unique_name,
3053 Label::Distance distance) {
3054 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3055 Label succeed;
3056 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3057 j(zero, &succeed);
Ben Murdochda12d292016-06-02 14:46:10 +01003058 cmpb(operand, Immediate(SYMBOL_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003059 j(not_equal, not_unique_name, distance);
3060
3061 bind(&succeed);
3062}
3063
3064
3065void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3066 Register index,
3067 Register value,
3068 uint32_t encoding_mask) {
3069 Label is_object;
3070 JumpIfNotSmi(string, &is_object, Label::kNear);
3071 Abort(kNonObject);
3072 bind(&is_object);
3073
3074 push(value);
3075 mov(value, FieldOperand(string, HeapObject::kMapOffset));
3076 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
3077
3078 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
3079 cmp(value, Immediate(encoding_mask));
3080 pop(value);
3081 Check(equal, kUnexpectedStringType);
3082
3083 // The index is assumed to be untagged coming in, tag it to compare with the
3084 // string length without using a temp register, it is restored at the end of
3085 // this function.
3086 SmiTag(index);
3087 Check(no_overflow, kIndexIsTooLarge);
3088
3089 cmp(index, FieldOperand(string, String::kLengthOffset));
3090 Check(less, kIndexIsTooLarge);
3091
3092 cmp(index, Immediate(Smi::FromInt(0)));
3093 Check(greater_equal, kIndexIsNegative);
3094
3095 // Restore the index
3096 SmiUntag(index);
3097}
3098
3099
Steve Block6ded16b2010-05-10 14:33:55 +01003100void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003101 int frame_alignment = base::OS::ActivationFrameAlignment();
Ben Murdoch8b112d22011-06-08 16:22:53 +01003102 if (frame_alignment != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01003103 // Make stack end at alignment and make room for num_arguments words
3104 // and the original value of esp.
3105 mov(scratch, esp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003106 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003107 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003108 and_(esp, -frame_alignment);
Steve Block6ded16b2010-05-10 14:33:55 +01003109 mov(Operand(esp, num_arguments * kPointerSize), scratch);
3110 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003111 sub(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01003112 }
3113}
3114
3115
3116void MacroAssembler::CallCFunction(ExternalReference function,
3117 int num_arguments) {
3118 // Trashing eax is ok as it will be the return value.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003119 mov(eax, Immediate(function));
Steve Block6ded16b2010-05-10 14:33:55 +01003120 CallCFunction(eax, num_arguments);
3121}
3122
3123
3124void MacroAssembler::CallCFunction(Register function,
3125 int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003126 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003127 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01003128 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003129 CheckStackAlignment();
3130 }
3131
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003132 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003133 if (base::OS::ActivationFrameAlignment() != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01003134 mov(esp, Operand(esp, num_arguments * kPointerSize));
3135 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003136 add(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01003137 }
3138}
3139
3140
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003141#ifdef DEBUG
3142bool AreAliased(Register reg1,
3143 Register reg2,
3144 Register reg3,
3145 Register reg4,
3146 Register reg5,
3147 Register reg6,
3148 Register reg7,
3149 Register reg8) {
3150 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3151 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3152 reg7.is_valid() + reg8.is_valid();
3153
3154 RegList regs = 0;
3155 if (reg1.is_valid()) regs |= reg1.bit();
3156 if (reg2.is_valid()) regs |= reg2.bit();
3157 if (reg3.is_valid()) regs |= reg3.bit();
3158 if (reg4.is_valid()) regs |= reg4.bit();
3159 if (reg5.is_valid()) regs |= reg5.bit();
3160 if (reg6.is_valid()) regs |= reg6.bit();
3161 if (reg7.is_valid()) regs |= reg7.bit();
3162 if (reg8.is_valid()) regs |= reg8.bit();
3163 int n_of_non_aliasing_regs = NumRegs(regs);
3164
3165 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003166}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003167#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003168
3169
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003170CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01003171 : address_(address),
3172 size_(size),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003173 masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003174 // Create a new macro assembler pointing to the address of the code to patch.
3175 // The size is adjusted with kGap on order for the assembler to generate size
3176 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003177 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003178}
3179
3180
3181CodePatcher::~CodePatcher() {
3182 // Indicate that code has changed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003183 Assembler::FlushICache(masm_.isolate(), address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00003184
3185 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003186 DCHECK(masm_.pc_ == address_ + size_);
3187 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003188}
3189
3190
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003191void MacroAssembler::CheckPageFlag(
3192 Register object,
3193 Register scratch,
3194 int mask,
3195 Condition cc,
3196 Label* condition_met,
3197 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003198 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003199 if (scratch.is(object)) {
3200 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3201 } else {
3202 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3203 and_(scratch, object);
3204 }
3205 if (mask < (1 << kBitsPerByte)) {
Ben Murdochda12d292016-06-02 14:46:10 +01003206 test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003207 } else {
3208 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3209 }
3210 j(cc, condition_met, condition_met_distance);
3211}
3212
3213
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003214void MacroAssembler::CheckPageFlagForMap(
3215 Handle<Map> map,
3216 int mask,
3217 Condition cc,
3218 Label* condition_met,
3219 Label::Distance condition_met_distance) {
3220 DCHECK(cc == zero || cc == not_zero);
3221 Page* page = Page::FromAddress(map->address());
3222 DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
3223 ExternalReference reference(ExternalReference::page_flags(page));
3224 // The inlined static address check of the page's flags relies
3225 // on maps never being compacted.
3226 DCHECK(!isolate()->heap()->mark_compact_collector()->
3227 IsOnEvacuationCandidate(*map));
3228 if (mask < (1 << kBitsPerByte)) {
Ben Murdochda12d292016-06-02 14:46:10 +01003229 test_b(Operand::StaticVariable(reference), Immediate(mask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003230 } else {
3231 test(Operand::StaticVariable(reference), Immediate(mask));
3232 }
3233 j(cc, condition_met, condition_met_distance);
3234}
3235
3236
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003237void MacroAssembler::JumpIfBlack(Register object,
3238 Register scratch0,
3239 Register scratch1,
3240 Label* on_black,
3241 Label::Distance on_black_near) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003242 HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
3243 1); // kBlackBitPattern.
3244 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003245}
3246
3247
3248void MacroAssembler::HasColor(Register object,
3249 Register bitmap_scratch,
3250 Register mask_scratch,
3251 Label* has_color,
3252 Label::Distance has_color_distance,
3253 int first_bit,
3254 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003255 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003256
3257 GetMarkBits(object, bitmap_scratch, mask_scratch);
3258
3259 Label other_color, word_boundary;
3260 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3261 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3262 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
3263 j(zero, &word_boundary, Label::kNear);
3264 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3265 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3266 jmp(&other_color, Label::kNear);
3267
3268 bind(&word_boundary);
Ben Murdochda12d292016-06-02 14:46:10 +01003269 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize),
3270 Immediate(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003271
3272 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3273 bind(&other_color);
3274}
3275
3276
3277void MacroAssembler::GetMarkBits(Register addr_reg,
3278 Register bitmap_reg,
3279 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003280 DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003281 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3282 and_(bitmap_reg, addr_reg);
3283 mov(ecx, addr_reg);
3284 int shift =
3285 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3286 shr(ecx, shift);
3287 and_(ecx,
3288 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3289
3290 add(bitmap_reg, ecx);
3291 mov(ecx, addr_reg);
3292 shr(ecx, kPointerSizeLog2);
3293 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3294 mov(mask_reg, Immediate(1));
3295 shl_cl(mask_reg);
3296}
3297
3298
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003299void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3300 Register mask_scratch, Label* value_is_white,
3301 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003302 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003303 GetMarkBits(value, bitmap_scratch, mask_scratch);
3304
3305 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003306 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003307 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3308 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003309 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003310
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003311 // Since both black and grey have a 1 in the first position and white does
3312 // not have a 1 there we only need to check one bit.
3313 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003314 j(zero, value_is_white, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003315}
3316
3317
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003318void MacroAssembler::EnumLength(Register dst, Register map) {
3319 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3320 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3321 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3322 SmiTag(dst);
3323}
3324
3325
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003326void MacroAssembler::CheckEnumCache(Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003327 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003328 mov(ecx, eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003329
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003330 // Check if the enum length field is properly initialized, indicating that
3331 // there is an enum cache.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003332 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003333
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003334 EnumLength(edx, ebx);
3335 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3336 j(equal, call_runtime);
3337
3338 jmp(&start);
3339
3340 bind(&next);
3341 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003342
3343 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003344 EnumLength(edx, ebx);
3345 cmp(edx, Immediate(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003346 j(not_equal, call_runtime);
3347
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003348 bind(&start);
3349
3350 // Check that there are no elements. Register rcx contains the current JS
3351 // object we've reached through the prototype chain.
3352 Label no_elements;
3353 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3354 cmp(ecx, isolate()->factory()->empty_fixed_array());
3355 j(equal, &no_elements);
3356
3357 // Second chance, the object may be using the empty slow element dictionary.
3358 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3359 j(not_equal, call_runtime);
3360
3361 bind(&no_elements);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003362 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3363 cmp(ecx, isolate()->factory()->null_value());
3364 j(not_equal, &next);
3365}
3366
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003367
3368void MacroAssembler::TestJSArrayForAllocationMemento(
3369 Register receiver_reg,
3370 Register scratch_reg,
3371 Label* no_memento_found) {
Ben Murdochda12d292016-06-02 14:46:10 +01003372 Label map_check;
3373 Label top_check;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003374 ExternalReference new_space_allocation_top =
3375 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01003376 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
3377 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003378
Ben Murdochda12d292016-06-02 14:46:10 +01003379 // Bail out if the object is not in new space.
3380 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
3381 // If the object is in new space, we need to check whether it is on the same
3382 // page as the current top.
3383 lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
3384 xor_(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3385 test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
3386 j(zero, &top_check);
3387 // The object is on a different page than allocation top. Bail out if the
3388 // object sits on the page boundary as no memento can follow and we cannot
3389 // touch the memory following it.
3390 lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
3391 xor_(scratch_reg, receiver_reg);
3392 test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
3393 j(not_zero, no_memento_found);
3394 // Continue with the actual map check.
3395 jmp(&map_check);
3396 // If top is on the same page as the current object, we need to check whether
3397 // we are below top.
3398 bind(&top_check);
3399 lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003400 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3401 j(greater, no_memento_found);
Ben Murdochda12d292016-06-02 14:46:10 +01003402 // Memento map check.
3403 bind(&map_check);
3404 mov(scratch_reg, Operand(receiver_reg, kMementoMapOffset));
3405 cmp(scratch_reg, Immediate(isolate()->factory()->allocation_memento_map()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003406}
3407
3408
3409void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3410 Register object,
3411 Register scratch0,
3412 Register scratch1,
3413 Label* found) {
3414 DCHECK(!scratch1.is(scratch0));
3415 Factory* factory = isolate()->factory();
3416 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003417 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003418
3419 // scratch contained elements pointer.
3420 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003421 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3422 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3423 cmp(current, Immediate(factory->null_value()));
3424 j(equal, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003425
3426 // Loop based on the map going up the prototype chain.
3427 bind(&loop_again);
3428 mov(current, FieldOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003429 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3430 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3431 CmpInstanceType(current, JS_OBJECT_TYPE);
3432 j(below, found);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003433 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3434 DecodeField<Map::ElementsKindBits>(scratch1);
3435 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3436 j(equal, found);
3437 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3438 cmp(current, Immediate(factory->null_value()));
3439 j(not_equal, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003440
3441 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003442}
3443
3444
3445void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3446 DCHECK(!dividend.is(eax));
3447 DCHECK(!dividend.is(edx));
3448 base::MagicNumbersForDivision<uint32_t> mag =
3449 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3450 mov(eax, Immediate(mag.multiplier));
3451 imul(dividend);
3452 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3453 if (divisor > 0 && neg) add(edx, dividend);
3454 if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3455 if (mag.shift > 0) sar(edx, mag.shift);
3456 mov(eax, dividend);
3457 shr(eax, 31);
3458 add(edx, eax);
3459}
3460
3461
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003462} // namespace internal
3463} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01003464
3465#endif // V8_TARGET_ARCH_IA32