blob: 12daec8285c79fea84149f25660fb1f593978118 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/debug/debug.h"
12#include "src/ia32/frames-ia32.h"
13#include "src/ia32/macro-assembler-ia32.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000015
16namespace v8 {
17namespace internal {
18
19// -------------------------------------------------------------------------
20// MacroAssembler implementation.
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
35 DCHECK(!r.IsDouble());
36 if (r.IsInteger8()) {
37 movsx_b(dst, src);
38 } else if (r.IsUInteger8()) {
39 movzx_b(dst, src);
40 } else if (r.IsInteger16()) {
41 movsx_w(dst, src);
42 } else if (r.IsUInteger16()) {
43 movzx_w(dst, src);
44 } else {
45 mov(dst, src);
46 }
47}
48
49
50void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
51 DCHECK(!r.IsDouble());
52 if (r.IsInteger8() || r.IsUInteger8()) {
53 mov_b(dst, src);
54 } else if (r.IsInteger16() || r.IsUInteger16()) {
55 mov_w(dst, src);
56 } else {
57 if (r.IsHeapObject()) {
58 AssertNotSmi(src);
59 } else if (r.IsSmi()) {
60 AssertSmi(src);
61 }
62 mov(dst, src);
63 }
64}
65
66
67void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
68 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 mov(destination, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 return;
71 }
72 ExternalReference roots_array_start =
73 ExternalReference::roots_array_start(isolate());
74 mov(destination, Immediate(index));
75 mov(destination, Operand::StaticArray(destination,
76 times_pointer_size,
77 roots_array_start));
78}
79
80
81void MacroAssembler::StoreRoot(Register source,
82 Register scratch,
83 Heap::RootListIndex index) {
84 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
85 ExternalReference roots_array_start =
86 ExternalReference::roots_array_start(isolate());
87 mov(scratch, Immediate(index));
88 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
89 source);
90}
91
92
93void MacroAssembler::CompareRoot(Register with,
94 Register scratch,
95 Heap::RootListIndex index) {
96 ExternalReference roots_array_start =
97 ExternalReference::roots_array_start(isolate());
98 mov(scratch, Immediate(index));
99 cmp(with, Operand::StaticArray(scratch,
100 times_pointer_size,
101 roots_array_start));
102}
103
104
105void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
106 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000107 cmp(with, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108}
109
110
111void MacroAssembler::CompareRoot(const Operand& with,
112 Heap::RootListIndex index) {
113 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 cmp(with, isolate()->heap()->root_handle(index));
115}
116
117
118void MacroAssembler::PushRoot(Heap::RootListIndex index) {
119 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
120 Push(isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121}
122
Ben Murdoch097c5b22016-05-18 11:27:45 +0100123#define REG(Name) \
124 { Register::kCode_##Name }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125
Ben Murdoch097c5b22016-05-18 11:27:45 +0100126static const Register saved_regs[] = {REG(eax), REG(ecx), REG(edx)};
127
128#undef REG
129
130static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
131
132void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
133 Register exclusion1, Register exclusion2,
134 Register exclusion3) {
135 // We don't allow a GC during a store buffer overflow so there is no need to
136 // store the registers in any particular way, but we do have to store and
137 // restore them.
138 for (int i = 0; i < kNumberOfSavedRegs; i++) {
139 Register reg = saved_regs[i];
140 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
141 push(reg);
142 }
Steve Block6ded16b2010-05-10 14:33:55 +0100143 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100144 if (fp_mode == kSaveFPRegs) {
145 sub(esp, Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1)));
146 // Save all XMM registers except XMM0.
147 for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) {
148 XMMRegister reg = XMMRegister::from_code(i);
149 movsd(Operand(esp, (i - 1) * kDoubleSize), reg);
150 }
151 }
152}
153
154void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
155 Register exclusion2, Register exclusion3) {
156 if (fp_mode == kSaveFPRegs) {
157 // Restore all XMM registers except XMM0.
158 for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) {
159 XMMRegister reg = XMMRegister::from_code(i);
160 movsd(reg, Operand(esp, (i - 1) * kDoubleSize));
161 }
162 add(esp, Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1)));
163 }
164
165 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
166 Register reg = saved_regs[i];
167 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
168 pop(reg);
169 }
170 }
171}
172
173void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
174 Label* condition_met,
175 Label::Distance distance) {
176 const int mask =
177 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
178 CheckPageFlag(object, scratch, mask, cc, condition_met, distance);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100179}
Steve Block6ded16b2010-05-10 14:33:55 +0100180
Steve Blocka7e24c12009-10-30 11:49:00 +0000181
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100182void MacroAssembler::RememberedSetHelper(
183 Register object, // Only used for debug checks.
184 Register addr,
185 Register scratch,
186 SaveFPRegsMode save_fp,
187 MacroAssembler::RememberedSetFinalAction and_then) {
188 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100190 Label ok;
191 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
192 int3();
193 bind(&ok);
194 }
195 // Load store buffer top.
196 ExternalReference store_buffer =
197 ExternalReference::store_buffer_top(isolate());
198 mov(scratch, Operand::StaticVariable(store_buffer));
199 // Store pointer to buffer.
200 mov(Operand(scratch, 0), addr);
201 // Increment buffer top.
202 add(scratch, Immediate(kPointerSize));
203 // Write back new top of buffer.
204 mov(Operand::StaticVariable(store_buffer), scratch);
205 // Call stub on end of buffer.
206 // Check for end of buffer.
207 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
208 if (and_then == kReturnAtEnd) {
209 Label buffer_overflowed;
210 j(not_equal, &buffer_overflowed, Label::kNear);
211 ret(0);
212 bind(&buffer_overflowed);
213 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100215 j(equal, &done, Label::kNear);
216 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100218 CallStub(&store_buffer_overflow);
219 if (and_then == kReturnAtEnd) {
220 ret(0);
221 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 bind(&done);
224 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000225}
226
227
228void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
229 XMMRegister scratch_reg,
230 Register result_reg) {
231 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000232 Label conv_failure;
233 xorps(scratch_reg, scratch_reg);
234 cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +0000235 test(result_reg, Immediate(0xFFFFFF00));
236 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 cmp(result_reg, Immediate(0x1));
238 j(overflow, &conv_failure, Label::kNear);
239 mov(result_reg, Immediate(0));
240 setcc(sign, result_reg);
241 sub(result_reg, Immediate(1));
242 and_(result_reg, Immediate(255));
243 jmp(&done, Label::kNear);
244 bind(&conv_failure);
245 Move(result_reg, Immediate(0));
246 ucomisd(input_reg, scratch_reg);
247 j(below, &done, Label::kNear);
248 Move(result_reg, Immediate(255));
Ben Murdoch257744e2011-11-30 15:57:28 +0000249 bind(&done);
250}
251
252
253void MacroAssembler::ClampUint8(Register reg) {
254 Label done;
255 test(reg, Immediate(0xFFFFFF00));
256 j(zero, &done, Label::kNear);
257 setcc(negative, reg); // 1 if negative, 0 if positive.
258 dec_b(reg); // 0 if negative, 255 if positive.
259 bind(&done);
260}
261
262
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263void MacroAssembler::SlowTruncateToI(Register result_reg,
264 Register input_reg,
265 int offset) {
266 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
267 call(stub.GetCode(), RelocInfo::CODE_TARGET);
268}
269
270
271void MacroAssembler::TruncateDoubleToI(Register result_reg,
272 XMMRegister input_reg) {
273 Label done;
274 cvttsd2si(result_reg, Operand(input_reg));
275 cmp(result_reg, 0x1);
276 j(no_overflow, &done, Label::kNear);
277
278 sub(esp, Immediate(kDoubleSize));
279 movsd(MemOperand(esp, 0), input_reg);
280 SlowTruncateToI(result_reg, esp, 0);
281 add(esp, Immediate(kDoubleSize));
282 bind(&done);
283}
284
285
286void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
287 XMMRegister scratch,
288 MinusZeroMode minus_zero_mode,
289 Label* lost_precision, Label* is_nan,
290 Label* minus_zero, Label::Distance dst) {
291 DCHECK(!input_reg.is(scratch));
292 cvttsd2si(result_reg, Operand(input_reg));
293 Cvtsi2sd(scratch, Operand(result_reg));
294 ucomisd(scratch, input_reg);
295 j(not_equal, lost_precision, dst);
296 j(parity_even, is_nan, dst);
297 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
298 Label done;
299 // The integer converted back is equal to the original. We
300 // only have to test if we got -0 as an input.
301 test(result_reg, Operand(result_reg));
302 j(not_zero, &done, Label::kNear);
303 movmskpd(result_reg, input_reg);
304 // Bit 0 contains the sign of the double in input_reg.
305 // If input was positive, we are ok and return 0, otherwise
306 // jump to minus_zero.
307 and_(result_reg, 1);
308 j(not_zero, minus_zero, dst);
309 bind(&done);
310 }
311}
312
313
314void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
315 Register input_reg) {
316 Label done, slow_case;
317
318 if (CpuFeatures::IsSupported(SSE3)) {
319 CpuFeatureScope scope(this, SSE3);
320 Label convert;
321 // Use more powerful conversion when sse3 is available.
322 // Load x87 register with heap number.
323 fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
324 // Get exponent alone and check for too-big exponent.
325 mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
326 and_(result_reg, HeapNumber::kExponentMask);
327 const uint32_t kTooBigExponent =
328 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
329 cmp(Operand(result_reg), Immediate(kTooBigExponent));
330 j(greater_equal, &slow_case, Label::kNear);
331
332 // Reserve space for 64 bit answer.
333 sub(Operand(esp), Immediate(kDoubleSize));
334 // Do conversion, which cannot fail because we checked the exponent.
335 fisttp_d(Operand(esp, 0));
336 mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
337 add(Operand(esp), Immediate(kDoubleSize));
338 jmp(&done, Label::kNear);
339
340 // Slow case.
341 bind(&slow_case);
342 if (input_reg.is(result_reg)) {
343 // Input is clobbered. Restore number from fpu stack
344 sub(Operand(esp), Immediate(kDoubleSize));
345 fstp_d(Operand(esp, 0));
346 SlowTruncateToI(result_reg, esp, 0);
347 add(esp, Immediate(kDoubleSize));
348 } else {
349 fstp(0);
350 SlowTruncateToI(result_reg, input_reg);
351 }
352 } else {
353 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
354 cvttsd2si(result_reg, Operand(xmm0));
355 cmp(result_reg, 0x1);
356 j(no_overflow, &done, Label::kNear);
357 // Check if the input was 0x8000000 (kMinInt).
358 // If no, then we got an overflow and we deoptimize.
359 ExternalReference min_int = ExternalReference::address_of_min_int();
360 ucomisd(xmm0, Operand::StaticVariable(min_int));
361 j(not_equal, &slow_case, Label::kNear);
362 j(parity_even, &slow_case, Label::kNear); // NaN.
363 jmp(&done, Label::kNear);
364
365 // Slow case.
366 bind(&slow_case);
367 if (input_reg.is(result_reg)) {
368 // Input is clobbered. Restore number from double scratch.
369 sub(esp, Immediate(kDoubleSize));
370 movsd(MemOperand(esp, 0), xmm0);
371 SlowTruncateToI(result_reg, esp, 0);
372 add(esp, Immediate(kDoubleSize));
373 } else {
374 SlowTruncateToI(result_reg, input_reg);
375 }
376 }
377 bind(&done);
378}
379
380
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400381void MacroAssembler::LoadUint32(XMMRegister dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382 Label done;
383 cmp(src, Immediate(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000385 Cvtsi2sd(dst, src);
386 j(not_sign, &done, Label::kNear);
387 addsd(dst, Operand::StaticVariable(uint32_bias));
388 bind(&done);
389}
390
391
392void MacroAssembler::RecordWriteArray(
393 Register object,
394 Register value,
395 Register index,
396 SaveFPRegsMode save_fp,
397 RememberedSetAction remembered_set_action,
398 SmiCheck smi_check,
399 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100400 // First, check if a write barrier is even needed. The tests below
401 // catch stores of Smis.
402 Label done;
403
404 // Skip barrier if writing a smi.
405 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100407 test(value, Immediate(kSmiTagMask));
408 j(zero, &done);
409 }
410
411 // Array access: calculate the destination address in the same manner as
412 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
413 // into an array of words.
414 Register dst = index;
415 lea(dst, Operand(object, index, times_half_pointer_size,
416 FixedArray::kHeaderSize - kHeapObjectTag));
417
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 RecordWrite(object, dst, value, save_fp, remembered_set_action,
419 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100420
421 bind(&done);
422
423 // Clobber clobbered input registers when running with the debug-code flag
424 // turned on to provoke errors.
425 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
427 mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000428 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000429}
430
431
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100432void MacroAssembler::RecordWriteField(
433 Register object,
434 int offset,
435 Register value,
436 Register dst,
437 SaveFPRegsMode save_fp,
438 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 SmiCheck smi_check,
440 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100441 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100442 // catch stores of Smis.
Ben Murdoch257744e2011-11-30 15:57:28 +0000443 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +0000444
445 // Skip barrier if writing a smi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100446 if (smi_check == INLINE_SMI_CHECK) {
447 JumpIfSmi(value, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000448 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100449
450 // Although the object register is tagged, the offset is relative to the start
451 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453
454 lea(dst, FieldOperand(object, offset));
455 if (emit_debug_code()) {
456 Label ok;
457 test_b(dst, (1 << kPointerSizeLog2) - 1);
458 j(zero, &ok, Label::kNear);
459 int3();
460 bind(&ok);
461 }
462
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 RecordWrite(object, dst, value, save_fp, remembered_set_action,
464 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000465
466 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000467
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100468 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000469 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100470 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
472 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000473 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000474}
475
476
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477void MacroAssembler::RecordWriteForMap(
478 Register object,
479 Handle<Map> map,
480 Register scratch1,
481 Register scratch2,
482 SaveFPRegsMode save_fp) {
483 Label done;
484
485 Register address = scratch1;
486 Register value = scratch2;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100487 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000488 Label ok;
489 lea(address, FieldOperand(object, HeapObject::kMapOffset));
490 test_b(address, (1 << kPointerSizeLog2) - 1);
491 j(zero, &ok, Label::kNear);
492 int3();
493 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 }
495
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 DCHECK(!object.is(value));
497 DCHECK(!object.is(address));
498 DCHECK(!value.is(address));
499 AssertNotSmi(object);
500
501 if (!FLAG_incremental_marking) {
502 return;
503 }
504
505 // Compute the address.
506 lea(address, FieldOperand(object, HeapObject::kMapOffset));
507
508 // A single check of the map's pages interesting flag suffices, since it is
509 // only set during incremental collection, and then it's also guaranteed that
510 // the from object's page's interesting flag is also set. This optimization
511 // relies on the fact that maps can never be in new space.
512 DCHECK(!isolate()->heap()->InNewSpace(*map));
513 CheckPageFlagForMap(map,
514 MemoryChunk::kPointersToHereAreInterestingMask,
515 zero,
516 &done,
517 Label::kNear);
518
519 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
520 save_fp);
521 CallStub(&stub);
522
523 bind(&done);
524
525 // Count number of write barriers in generated code.
526 isolate()->counters()->write_barriers_static()->Increment();
527 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
528
529 // Clobber clobbered input registers when running with the debug-code flag
530 // turned on to provoke errors.
531 if (emit_debug_code()) {
532 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
533 mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
534 mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
535 }
536}
537
538
539void MacroAssembler::RecordWrite(
540 Register object,
541 Register address,
542 Register value,
543 SaveFPRegsMode fp_mode,
544 RememberedSetAction remembered_set_action,
545 SmiCheck smi_check,
546 PointersToHereCheck pointers_to_here_check_for_value) {
547 DCHECK(!object.is(value));
548 DCHECK(!object.is(address));
549 DCHECK(!value.is(address));
550 AssertNotSmi(object);
551
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100552 if (remembered_set_action == OMIT_REMEMBERED_SET &&
553 !FLAG_incremental_marking) {
554 return;
555 }
556
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100558 Label ok;
559 cmp(value, Operand(address, 0));
560 j(equal, &ok, Label::kNear);
561 int3();
562 bind(&ok);
563 }
564
Steve Block8defd9f2010-07-08 12:39:36 +0100565 // First, check if a write barrier is even needed. The tests below
566 // catch stores of Smis and stores into young gen.
567 Label done;
568
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100569 if (smi_check == INLINE_SMI_CHECK) {
570 // Skip barrier if writing a smi.
571 JumpIfSmi(value, &done, Label::kNear);
572 }
Steve Block8defd9f2010-07-08 12:39:36 +0100573
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
575 CheckPageFlag(value,
576 value, // Used as scratch.
577 MemoryChunk::kPointersToHereAreInterestingMask,
578 zero,
579 &done,
580 Label::kNear);
581 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 CheckPageFlag(object,
583 value, // Used as scratch.
584 MemoryChunk::kPointersFromHereAreInterestingMask,
585 zero,
586 &done,
587 Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100588
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
590 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100592
593 bind(&done);
594
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 // Count number of write barriers in generated code.
596 isolate()->counters()->write_barriers_static()->Increment();
597 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
598
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100599 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100600 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100601 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
603 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
Steve Block8defd9f2010-07-08 12:39:36 +0100604 }
605}
606
Ben Murdoch097c5b22016-05-18 11:27:45 +0100607void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
608 Register code_entry,
609 Register scratch) {
610 const int offset = JSFunction::kCodeEntryOffset;
611
612 // Since a code entry (value) is always in old space, we don't need to update
613 // remembered set. If incremental marking is off, there is nothing for us to
614 // do.
615 if (!FLAG_incremental_marking) return;
616
617 DCHECK(!js_function.is(code_entry));
618 DCHECK(!js_function.is(scratch));
619 DCHECK(!code_entry.is(scratch));
620 AssertNotSmi(js_function);
621
622 if (emit_debug_code()) {
623 Label ok;
624 lea(scratch, FieldOperand(js_function, offset));
625 cmp(code_entry, Operand(scratch, 0));
626 j(equal, &ok, Label::kNear);
627 int3();
628 bind(&ok);
629 }
630
631 // First, check if a write barrier is even needed. The tests below
632 // catch stores of Smis and stores into young gen.
633 Label done;
634
635 CheckPageFlag(code_entry, scratch,
636 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
637 Label::kNear);
638 CheckPageFlag(js_function, scratch,
639 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
640 Label::kNear);
641
642 // Save input registers.
643 push(js_function);
644 push(code_entry);
645
646 const Register dst = scratch;
647 lea(dst, FieldOperand(js_function, offset));
648
649 // Save caller-saved registers.
650 PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
651
652 int argument_count = 3;
653 PrepareCallCFunction(argument_count, code_entry);
654 mov(Operand(esp, 0 * kPointerSize), js_function);
655 mov(Operand(esp, 1 * kPointerSize), dst); // Slot.
656 mov(Operand(esp, 2 * kPointerSize),
657 Immediate(ExternalReference::isolate_address(isolate())));
658
659 {
660 AllowExternalCallThatCantCauseGC scope(this);
661 CallCFunction(
662 ExternalReference::incremental_marking_record_write_code_entry_function(
663 isolate()),
664 argument_count);
665 }
666
667 // Restore caller-saved registers.
668 PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
669
670 // Restore input registers.
671 pop(code_entry);
672 pop(js_function);
673
674 bind(&done);
675}
Steve Block8defd9f2010-07-08 12:39:36 +0100676
Andrei Popescu402d9372010-02-26 13:31:12 +0000677void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 Move(eax, Immediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000679 mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
680 isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 CEntryStub ces(isolate(), 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000682 call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +0000683}
Steve Blocka7e24c12009-10-30 11:49:00 +0000684
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100685
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
687 xorps(dst, dst);
688 cvtsi2sd(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000689}
690
691
Ben Murdoch097c5b22016-05-18 11:27:45 +0100692void MacroAssembler::Cvtui2ss(XMMRegister dst, Register src, Register tmp) {
693 Label msb_set_src;
694 Label jmp_return;
695 test(src, src);
696 j(sign, &msb_set_src, Label::kNear);
697 cvtsi2ss(dst, src);
698 jmp(&jmp_return, Label::kNear);
699 bind(&msb_set_src);
700 mov(tmp, src);
701 shr(src, 1);
702 // Recover the least significant bit to avoid rounding errors.
703 and_(tmp, Immediate(1));
704 or_(src, tmp);
705 cvtsi2ss(dst, src);
706 addss(dst, dst);
707 bind(&jmp_return);
708}
709
710
Steve Block053d10c2011-06-13 19:13:29 +0100711bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
712 static const int kMaxImmediateBits = 17;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713 if (!RelocInfo::IsNone(x.rmode_)) return false;
Steve Block053d10c2011-06-13 19:13:29 +0100714 return !is_intn(x.x_, kMaxImmediateBits);
715}
716
717
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000718void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
Steve Block053d10c2011-06-13 19:13:29 +0100719 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000720 Move(dst, Immediate(x.x_ ^ jit_cookie()));
Steve Block053d10c2011-06-13 19:13:29 +0100721 xor_(dst, jit_cookie());
722 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000723 Move(dst, x);
Steve Block053d10c2011-06-13 19:13:29 +0100724 }
725}
726
727
728void MacroAssembler::SafePush(const Immediate& x) {
729 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
730 push(Immediate(x.x_ ^ jit_cookie()));
731 xor_(Operand(esp, 0), Immediate(jit_cookie()));
732 } else {
733 push(x);
734 }
735}
736
737
Steve Blocka7e24c12009-10-30 11:49:00 +0000738void MacroAssembler::CmpObjectType(Register heap_object,
739 InstanceType type,
740 Register map) {
741 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
742 CmpInstanceType(map, type);
743}
744
745
746void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
747 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
748 static_cast<int8_t>(type));
749}
750
751
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000752void MacroAssembler::CheckFastElements(Register map,
753 Label* fail,
754 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
756 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
757 STATIC_ASSERT(FAST_ELEMENTS == 2);
758 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000759 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000760 Map::kMaximumBitField2FastHoleyElementValue);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000761 j(above, fail, distance);
762}
763
764
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100765void MacroAssembler::CheckFastObjectElements(Register map,
766 Label* fail,
767 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000768 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
769 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
770 STATIC_ASSERT(FAST_ELEMENTS == 2);
771 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100772 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 Map::kMaximumBitField2FastHoleySmiElementValue);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100774 j(below_equal, fail, distance);
775 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000776 Map::kMaximumBitField2FastHoleyElementValue);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100777 j(above, fail, distance);
778}
779
780
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781void MacroAssembler::CheckFastSmiElements(Register map,
782 Label* fail,
783 Label::Distance distance) {
784 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
785 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100786 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787 Map::kMaximumBitField2FastHoleySmiElementValue);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100788 j(above, fail, distance);
789}
790
791
792void MacroAssembler::StoreNumberToDoubleElements(
793 Register maybe_number,
794 Register elements,
795 Register key,
796 Register scratch1,
797 XMMRegister scratch2,
798 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800 Label smi_value, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100801 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
802
803 CheckMap(maybe_number,
804 isolate()->factory()->heap_number_map(),
805 fail,
806 DONT_DO_SMI_CHECK);
807
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000808 // Double value, turn potential sNaN into qNaN.
809 Move(scratch2, 1.0);
810 mulsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
811 jmp(&done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100812
813 bind(&smi_value);
814 // Value is a smi. Convert to a double and store.
815 // Preserve original value.
816 mov(scratch1, maybe_number);
817 SmiUntag(scratch1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000818 Cvtsi2sd(scratch2, scratch1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000819 bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000820 movsd(FieldOperand(elements, key, times_4,
821 FixedDoubleArray::kHeaderSize - elements_offset),
822 scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100823}
824
825
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000826void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100827 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100828}
829
830
Andrei Popescu31002712010-02-23 13:46:05 +0000831void MacroAssembler::CheckMap(Register obj,
832 Handle<Map> map,
833 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000835 if (smi_check_type == DO_SMI_CHECK) {
836 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +0000837 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100838
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000839 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +0000840 j(not_equal, fail);
841}
842
843
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400844void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
845 Register scratch2, Handle<WeakCell> cell,
846 Handle<Code> success,
847 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000848 Label fail;
849 if (smi_check_type == DO_SMI_CHECK) {
850 JumpIfSmi(obj, &fail);
851 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400852 mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
853 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000854 j(equal, success);
855
856 bind(&fail);
857}
858
859
Leon Clarkee46be812010-01-19 14:06:41 +0000860Condition MacroAssembler::IsObjectStringType(Register heap_object,
861 Register map,
862 Register instance_type) {
863 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
864 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000865 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +0000866 test(instance_type, Immediate(kIsNotStringMask));
867 return zero;
868}
869
870
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871Condition MacroAssembler::IsObjectNameType(Register heap_object,
872 Register map,
873 Register instance_type) {
874 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
875 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
876 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
877 return below_equal;
878}
879
880
Steve Blocka7e24c12009-10-30 11:49:00 +0000881void MacroAssembler::FCmp() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000882 fucomip();
883 fstp(0);
884}
885
886
887void MacroAssembler::AssertNumber(Register object) {
888 if (emit_debug_code()) {
889 Label ok;
890 JumpIfSmi(object, &ok);
891 cmp(FieldOperand(object, HeapObject::kMapOffset),
892 isolate()->factory()->heap_number_map());
893 Check(equal, kOperandNotANumber);
894 bind(&ok);
Steve Block3ce2e202009-11-05 08:53:23 +0000895 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000896}
897
898
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899void MacroAssembler::AssertSmi(Register object) {
900 if (emit_debug_code()) {
901 test(object, Immediate(kSmiTagMask));
902 Check(equal, kOperandIsNotASmi);
903 }
Andrei Popescu402d9372010-02-26 13:31:12 +0000904}
905
906
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000907void MacroAssembler::AssertString(Register object) {
908 if (emit_debug_code()) {
909 test(object, Immediate(kSmiTagMask));
910 Check(not_equal, kOperandIsASmiAndNotAString);
911 push(object);
912 mov(object, FieldOperand(object, HeapObject::kMapOffset));
913 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
914 pop(object);
915 Check(below, kOperandIsNotAString);
916 }
Iain Merrick75681382010-08-19 15:07:18 +0100917}
918
919
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920void MacroAssembler::AssertName(Register object) {
921 if (emit_debug_code()) {
922 test(object, Immediate(kSmiTagMask));
923 Check(not_equal, kOperandIsASmiAndNotAName);
924 push(object);
925 mov(object, FieldOperand(object, HeapObject::kMapOffset));
926 CmpInstanceType(object, LAST_NAME_TYPE);
927 pop(object);
928 Check(below_equal, kOperandIsNotAName);
929 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100930}
931
932
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000933void MacroAssembler::AssertFunction(Register object) {
934 if (emit_debug_code()) {
935 test(object, Immediate(kSmiTagMask));
936 Check(not_equal, kOperandIsASmiAndNotAFunction);
937 Push(object);
938 CmpObjectType(object, JS_FUNCTION_TYPE, object);
939 Pop(object);
940 Check(equal, kOperandIsNotAFunction);
941 }
942}
943
944
945void MacroAssembler::AssertBoundFunction(Register object) {
946 if (emit_debug_code()) {
947 test(object, Immediate(kSmiTagMask));
948 Check(not_equal, kOperandIsASmiAndNotABoundFunction);
949 Push(object);
950 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
951 Pop(object);
952 Check(equal, kOperandIsNotABoundFunction);
953 }
954}
955
956
Ben Murdoch097c5b22016-05-18 11:27:45 +0100957void MacroAssembler::AssertReceiver(Register object) {
958 if (emit_debug_code()) {
959 test(object, Immediate(kSmiTagMask));
960 Check(not_equal, kOperandIsASmiAndNotAReceiver);
961 Push(object);
962 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
963 CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
964 Pop(object);
965 Check(above_equal, kOperandIsNotAReceiver);
966 }
967}
968
969
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000970void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
971 if (emit_debug_code()) {
972 Label done_checking;
973 AssertNotSmi(object);
974 cmp(object, isolate()->factory()->undefined_value());
975 j(equal, &done_checking);
976 cmp(FieldOperand(object, 0),
977 Immediate(isolate()->factory()->allocation_site_map()));
978 Assert(equal, kExpectedUndefinedOrCell);
979 bind(&done_checking);
980 }
981}
982
983
984void MacroAssembler::AssertNotSmi(Register object) {
985 if (emit_debug_code()) {
986 test(object, Immediate(kSmiTagMask));
987 Check(not_equal, kOperandIsASmi);
988 }
989}
990
991
992void MacroAssembler::StubPrologue() {
993 push(ebp); // Caller's frame pointer.
994 mov(ebp, esp);
995 push(esi); // Callee's context.
996 push(Immediate(Smi::FromInt(StackFrame::STUB)));
997}
998
999
1000void MacroAssembler::Prologue(bool code_pre_aging) {
1001 PredictableCodeSizeScope predictible_code_size_scope(this,
1002 kNoCodeAgeSequenceLength);
1003 if (code_pre_aging) {
1004 // Pre-age the code.
1005 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
1006 RelocInfo::CODE_AGE_SEQUENCE);
1007 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
1008 } else {
1009 push(ebp); // Caller's frame pointer.
1010 mov(ebp, esp);
1011 push(esi); // Callee's context.
1012 push(edi); // Callee's JS function.
1013 }
Steve Block6ded16b2010-05-10 14:33:55 +01001014}
1015
1016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001017void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1018 mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1019 mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
1020 mov(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
1021}
1022
1023
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001024void MacroAssembler::EnterFrame(StackFrame::Type type,
1025 bool load_constant_pool_pointer_reg) {
1026 // Out-of-line constant pool not implemented on ia32.
1027 UNREACHABLE();
1028}
1029
1030
Steve Blocka7e24c12009-10-30 11:49:00 +00001031void MacroAssembler::EnterFrame(StackFrame::Type type) {
1032 push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001033 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001034 push(esi);
1035 push(Immediate(Smi::FromInt(type)));
1036 push(Immediate(CodeObject()));
Steve Block44f0eee2011-05-26 01:26:41 +01001037 if (emit_debug_code()) {
1038 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001039 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +00001040 }
1041}
1042
1043
1044void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +01001045 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001046 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
1047 Immediate(Smi::FromInt(type)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001048 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001049 }
1050 leave();
1051}
1052
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001053
1054void MacroAssembler::EnterExitFramePrologue() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001055 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056 DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1057 DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1058 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00001059 push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001060 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001061
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001062 // Reserve room for entry stack pointer and push the code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001063 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +00001064 push(Immediate(0)); // Saved entry sp, patched before call.
1065 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +00001066
1067 // Save the frame pointer and the context in top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
1069 ExternalReference context_address(Isolate::kContextAddress, isolate());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001070 ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001071 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
1072 mov(Operand::StaticVariable(context_address), esi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001073 mov(Operand::StaticVariable(c_function_address), ebx);
Steve Blockd0582a62009-12-15 09:54:21 +00001074}
Steve Blocka7e24c12009-10-30 11:49:00 +00001075
Steve Blocka7e24c12009-10-30 11:49:00 +00001076
Ben Murdochb0fe1622011-05-05 13:52:32 +01001077void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
1078 // Optionally save all XMM registers.
1079 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080 int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
1081 argc * kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001082 sub(esp, Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +01001083 const int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001084 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001085 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001087 }
1088 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001089 sub(esp, Immediate(argc * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001090 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001091
1092 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001093 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +00001094 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +00001096 and_(esp, -kFrameAlignment);
1097 }
1098
1099 // Patch the saved entry sp.
1100 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1101}
1102
1103
Ben Murdoch097c5b22016-05-18 11:27:45 +01001104void MacroAssembler::EnterExitFrame(int argc, bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001105 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +00001106
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001107 // Set up argc and argv in callee-saved registers.
Steve Blockd0582a62009-12-15 09:54:21 +00001108 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001109 mov(edi, eax);
Steve Blockd0582a62009-12-15 09:54:21 +00001110 lea(esi, Operand(ebp, eax, times_4, offset));
1111
Steve Block44f0eee2011-05-26 01:26:41 +01001112 // Reserve space for argc, argv and isolate.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001113 EnterExitFrameEpilogue(argc, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +00001114}
1115
1116
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001117void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001118 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001119 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001120}
1121
1122
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001123void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001124 // Optionally restore all XMM registers.
1125 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +01001126 const int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001128 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129 movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001130 }
1131 }
1132
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001133 if (pop_arguments) {
1134 // Get the return address from the stack and restore the frame pointer.
1135 mov(ecx, Operand(ebp, 1 * kPointerSize));
1136 mov(ebp, Operand(ebp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001138 // Pop the arguments and the receiver from the caller stack.
1139 lea(esp, Operand(esi, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001140
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001141 // Push the return address to get ready to return.
1142 push(ecx);
1143 } else {
1144 // Otherwise just leave the exit frame.
1145 leave();
1146 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001147
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001149}
1150
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151
1152void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001153 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00001154 ExternalReference context_address(Isolate::kContextAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001155 if (restore_context) {
1156 mov(esi, Operand::StaticVariable(context_address));
1157 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001158#ifdef DEBUG
1159 mov(Operand::StaticVariable(context_address), Immediate(0));
1160#endif
1161
Steve Blocka7e24c12009-10-30 11:49:00 +00001162 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00001163 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01001164 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001165 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1166}
1167
1168
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001169void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001170 mov(esp, ebp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001171 pop(ebp);
1172
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001174}
1175
1176
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001177void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001178 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001179 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001180 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001181
1182 // Link the current handler as the next handler.
1183 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1184 push(Operand::StaticVariable(handler_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001185
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001186 // Set this new handler as the current one.
1187 mov(Operand::StaticVariable(handler_address), esp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001188}
1189
1190
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001192 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001193 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1194 pop(Operand::StaticVariable(handler_address));
1195 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1196}
1197
1198
Steve Blocka7e24c12009-10-30 11:49:00 +00001199void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001200 Register scratch1,
1201 Register scratch2,
Steve Blocka7e24c12009-10-30 11:49:00 +00001202 Label* miss) {
1203 Label same_contexts;
1204
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 DCHECK(!holder_reg.is(scratch1));
1206 DCHECK(!holder_reg.is(scratch2));
1207 DCHECK(!scratch1.is(scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001208
1209 // Load current lexical context from the stack frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001211
1212 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01001213 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214 cmp(scratch1, Immediate(0));
1215 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001216 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001218 mov(scratch1, ContextOperand(scratch1, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001219
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001220 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001221 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001222 // Read the first word and compare to native_context_map.
1223 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1224 isolate()->factory()->native_context_map());
1225 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001226 }
1227
1228 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001229 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001230 j(equal, &same_contexts);
Steve Blocka7e24c12009-10-30 11:49:00 +00001231
1232 // Compare security tokens, save holder_reg on the stack so we can use it
1233 // as a temporary register.
1234 //
Steve Blocka7e24c12009-10-30 11:49:00 +00001235 // Check that the security token in the calling global object is
1236 // compatible with the security token in the receiving global
1237 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001238 mov(scratch2,
1239 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001240
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001242 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001243 cmp(scratch2, isolate()->factory()->null_value());
1244 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001245
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001246 // Read the first word and compare to native_context_map(),
1247 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1248 isolate()->factory()->native_context_map());
1249 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001250 }
1251
1252 int token_offset = Context::kHeaderSize +
1253 Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001254 mov(scratch1, FieldOperand(scratch1, token_offset));
1255 cmp(scratch1, FieldOperand(scratch2, token_offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001256 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001257
1258 bind(&same_contexts);
1259}
1260
1261
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001262// Compute the hash code from the untagged key. This must be kept in sync with
1263// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1264// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001265//
1266// Note: r0 will contain hash code
1267void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1268 // Xor original key with a seed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269 if (serializer_enabled()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001270 ExternalReference roots_array_start =
1271 ExternalReference::roots_array_start(isolate());
Ben Murdochc7cc0282012-03-05 14:35:55 +00001272 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001273 mov(scratch,
1274 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001275 SmiUntag(scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001276 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001277 } else {
1278 int32_t seed = isolate()->heap()->HashSeed();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001279 xor_(r0, Immediate(seed));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001280 }
1281
1282 // hash = ~hash + (hash << 15);
1283 mov(scratch, r0);
1284 not_(r0);
1285 shl(scratch, 15);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001286 add(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001287 // hash = hash ^ (hash >> 12);
1288 mov(scratch, r0);
1289 shr(scratch, 12);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001290 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001291 // hash = hash + (hash << 2);
1292 lea(r0, Operand(r0, r0, times_4, 0));
1293 // hash = hash ^ (hash >> 4);
1294 mov(scratch, r0);
1295 shr(scratch, 4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001296 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001297 // hash = hash * 2057;
1298 imul(r0, r0, 2057);
1299 // hash = hash ^ (hash >> 16);
1300 mov(scratch, r0);
1301 shr(scratch, 16);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001302 xor_(r0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001303 and_(r0, 0x3fffffff);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001304}
1305
1306
1307
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001308void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1309 Register elements,
1310 Register key,
1311 Register r0,
1312 Register r1,
1313 Register r2,
1314 Register result) {
1315 // Register use:
1316 //
1317 // elements - holds the slow-case elements of the receiver and is unchanged.
1318 //
1319 // key - holds the smi key on entry and is unchanged.
1320 //
1321 // Scratch registers:
1322 //
1323 // r0 - holds the untagged key on entry and holds the hash once computed.
1324 //
1325 // r1 - used to hold the capacity mask of the dictionary
1326 //
1327 // r2 - used for the index into the dictionary.
1328 //
1329 // result - holds the result on exit if the load succeeds and we fall through.
1330
1331 Label done;
1332
Ben Murdochc7cc0282012-03-05 14:35:55 +00001333 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001334
1335 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001336 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001337 shr(r1, kSmiTagSize); // convert smi to int
1338 dec(r1);
1339
1340 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001341 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001342 // Use r2 for index calculations and keep the hash intact in r0.
1343 mov(r2, r0);
1344 // Compute the masked index: (hash + i + i * i) & mask.
1345 if (i > 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001346 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001347 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001348 and_(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001349
1350 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001351 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001352 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1353
1354 // Check if the key matches.
1355 cmp(key, FieldOperand(elements,
1356 r2,
1357 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00001358 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001359 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001360 j(equal, &done);
1361 } else {
1362 j(not_equal, miss);
1363 }
1364 }
1365
1366 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001367 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001368 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001369 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370 DCHECK_EQ(DATA, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001371 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00001372 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001373 j(not_zero, miss);
1374
1375 // Get the value at the masked, scaled index.
1376 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001377 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001378 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1379}
1380
1381
Steve Blocka7e24c12009-10-30 11:49:00 +00001382void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 Register scratch,
1384 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385 ExternalReference allocation_top =
1386 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001387
1388 // Just return if allocation top is already known.
1389 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1390 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 DCHECK(scratch.is(no_reg));
Steve Blocka7e24c12009-10-30 11:49:00 +00001392#ifdef DEBUG
1393 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001394 cmp(result, Operand::StaticVariable(allocation_top));
1395 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00001396#endif
1397 return;
1398 }
1399
1400 // Move address of new object to result. Use scratch register if available.
1401 if (scratch.is(no_reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402 mov(result, Operand::StaticVariable(allocation_top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001403 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404 mov(scratch, Immediate(allocation_top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001405 mov(result, Operand(scratch, 0));
1406 }
1407}
1408
1409
1410void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001411 Register scratch,
1412 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01001413 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00001414 test(result_end, Immediate(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00001416 }
1417
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001418 ExternalReference allocation_top =
1419 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001420
1421 // Update new top. Use scratch if available.
1422 if (scratch.is(no_reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001423 mov(Operand::StaticVariable(allocation_top), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00001424 } else {
1425 mov(Operand(scratch, 0), result_end);
1426 }
1427}
1428
1429
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001430void MacroAssembler::Allocate(int object_size,
1431 Register result,
1432 Register result_end,
1433 Register scratch,
1434 Label* gc_required,
1435 AllocationFlags flags) {
1436 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1437 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07001438 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001439 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001440 // Trash the registers to simulate an allocation failure.
1441 mov(result, Immediate(0x7091));
1442 if (result_end.is_valid()) {
1443 mov(result_end, Immediate(0x7191));
1444 }
1445 if (scratch.is_valid()) {
1446 mov(scratch, Immediate(0x7291));
1447 }
1448 }
1449 jmp(gc_required);
1450 return;
1451 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001452 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001453
1454 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001455 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001456
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001457 ExternalReference allocation_limit =
1458 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1459
1460 // Align the next allocation. Storing the filler map without checking top is
1461 // safe in new-space because the limit of the heap is aligned there.
1462 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001463 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1464 Label aligned;
1465 test(result, Immediate(kDoubleAlignmentMask));
1466 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001467 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 cmp(result, Operand::StaticVariable(allocation_limit));
1469 j(above_equal, gc_required);
1470 }
1471 mov(Operand(result, 0),
1472 Immediate(isolate()->factory()->one_pointer_filler_map()));
1473 add(result, Immediate(kDoubleSize / 2));
1474 bind(&aligned);
1475 }
1476
1477 // Calculate new top and bail out if space is exhausted.
Ben Murdochbb769b22010-08-11 14:56:33 +01001478 Register top_reg = result_end.is_valid() ? result_end : result;
Steve Block1e0659c2011-05-24 12:43:12 +01001479 if (!top_reg.is(result)) {
1480 mov(top_reg, result);
Ben Murdochbb769b22010-08-11 14:56:33 +01001481 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001482 add(top_reg, Immediate(object_size));
Ben Murdoch257744e2011-11-30 15:57:28 +00001483 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001484 cmp(top_reg, Operand::StaticVariable(allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001485 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001486
Leon Clarkee46be812010-01-19 14:06:41 +00001487 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488 UpdateAllocationTopHelper(top_reg, scratch, flags);
Ben Murdochbb769b22010-08-11 14:56:33 +01001489
1490 // Tag result if requested.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001491 bool tag_result = (flags & TAG_OBJECT) != 0;
Ben Murdochbb769b22010-08-11 14:56:33 +01001492 if (top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001493 if (tag_result) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001494 sub(result, Immediate(object_size - kHeapObjectTag));
Ben Murdochbb769b22010-08-11 14:56:33 +01001495 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001496 sub(result, Immediate(object_size));
Ben Murdochbb769b22010-08-11 14:56:33 +01001497 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001498 } else if (tag_result) {
1499 DCHECK(kHeapObjectTag == 1);
1500 inc(result);
Ben Murdochbb769b22010-08-11 14:56:33 +01001501 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001502}
1503
1504
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001505void MacroAssembler::Allocate(int header_size,
1506 ScaleFactor element_size,
1507 Register element_count,
1508 RegisterValueType element_count_type,
1509 Register result,
1510 Register result_end,
1511 Register scratch,
1512 Label* gc_required,
1513 AllocationFlags flags) {
1514 DCHECK((flags & SIZE_IN_WORDS) == 0);
John Reck59135872010-11-02 12:39:01 -07001515 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001516 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001517 // Trash the registers to simulate an allocation failure.
1518 mov(result, Immediate(0x7091));
1519 mov(result_end, Immediate(0x7191));
1520 if (scratch.is_valid()) {
1521 mov(scratch, Immediate(0x7291));
1522 }
1523 // Register element_count is not modified by the function.
1524 }
1525 jmp(gc_required);
1526 return;
1527 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001528 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001529
1530 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001531 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001532
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001533 ExternalReference allocation_limit =
1534 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block1e0659c2011-05-24 12:43:12 +01001535
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001536 // Align the next allocation. Storing the filler map without checking top is
1537 // safe in new-space because the limit of the heap is aligned there.
1538 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1540 Label aligned;
1541 test(result, Immediate(kDoubleAlignmentMask));
1542 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001543 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001544 cmp(result, Operand::StaticVariable(allocation_limit));
1545 j(above_equal, gc_required);
1546 }
1547 mov(Operand(result, 0),
1548 Immediate(isolate()->factory()->one_pointer_filler_map()));
1549 add(result, Immediate(kDoubleSize / 2));
1550 bind(&aligned);
1551 }
1552
1553 // Calculate new top and bail out if space is exhausted.
Steve Block1e0659c2011-05-24 12:43:12 +01001554 // We assume that element_count*element_size + header_size does not
1555 // overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1557 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1558 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1559 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1560 DCHECK(element_size >= times_2);
1561 DCHECK(kSmiTagSize == 1);
1562 element_size = static_cast<ScaleFactor>(element_size - 1);
1563 } else {
1564 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1565 }
Steve Block1e0659c2011-05-24 12:43:12 +01001566 lea(result_end, Operand(element_count, element_size, header_size));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001567 add(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01001568 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001569 cmp(result_end, Operand::StaticVariable(allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00001570 j(above, gc_required);
1571
Steve Blocka7e24c12009-10-30 11:49:00 +00001572 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001573 DCHECK(kHeapObjectTag == 1);
1574 inc(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001575 }
Leon Clarkee46be812010-01-19 14:06:41 +00001576
1577 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001578 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001579}
1580
1581
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582void MacroAssembler::Allocate(Register object_size,
1583 Register result,
1584 Register result_end,
1585 Register scratch,
1586 Label* gc_required,
1587 AllocationFlags flags) {
1588 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
John Reck59135872010-11-02 12:39:01 -07001589 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001590 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001591 // Trash the registers to simulate an allocation failure.
1592 mov(result, Immediate(0x7091));
1593 mov(result_end, Immediate(0x7191));
1594 if (scratch.is_valid()) {
1595 mov(scratch, Immediate(0x7291));
1596 }
1597 // object_size is left unchanged by this function.
1598 }
1599 jmp(gc_required);
1600 return;
1601 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001602 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001603
1604 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001605 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001606
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001607 ExternalReference allocation_limit =
1608 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1609
1610 // Align the next allocation. Storing the filler map without checking top is
1611 // safe in new-space because the limit of the heap is aligned there.
1612 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001613 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1614 Label aligned;
1615 test(result, Immediate(kDoubleAlignmentMask));
1616 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 cmp(result, Operand::StaticVariable(allocation_limit));
1619 j(above_equal, gc_required);
1620 }
1621 mov(Operand(result, 0),
1622 Immediate(isolate()->factory()->one_pointer_filler_map()));
1623 add(result, Immediate(kDoubleSize / 2));
1624 bind(&aligned);
1625 }
1626
1627 // Calculate new top and bail out if space is exhausted.
Steve Blocka7e24c12009-10-30 11:49:00 +00001628 if (!object_size.is(result_end)) {
1629 mov(result_end, object_size);
1630 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001631 add(result_end, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001632 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001633 cmp(result_end, Operand::StaticVariable(allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001634 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001635
Steve Blocka7e24c12009-10-30 11:49:00 +00001636 // Tag result if requested.
1637 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001638 DCHECK(kHeapObjectTag == 1);
1639 inc(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001640 }
Leon Clarkee46be812010-01-19 14:06:41 +00001641
1642 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001643 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001644}
1645
1646
Steve Block3ce2e202009-11-05 08:53:23 +00001647void MacroAssembler::AllocateHeapNumber(Register result,
1648 Register scratch1,
1649 Register scratch2,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001650 Label* gc_required,
1651 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001652 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1654 TAG_OBJECT);
1655
1656 Handle<Map> map = mode == MUTABLE
1657 ? isolate()->factory()->mutable_heap_number_map()
1658 : isolate()->factory()->heap_number_map();
Steve Block3ce2e202009-11-05 08:53:23 +00001659
1660 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001661 mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
Steve Block3ce2e202009-11-05 08:53:23 +00001662}
1663
1664
Steve Blockd0582a62009-12-15 09:54:21 +00001665void MacroAssembler::AllocateTwoByteString(Register result,
1666 Register length,
1667 Register scratch1,
1668 Register scratch2,
1669 Register scratch3,
1670 Label* gc_required) {
1671 // Calculate the number of bytes needed for the characters in the string while
1672 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1674 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00001675 // scratch1 = length * 2 + kObjectAlignmentMask.
1676 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001677 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001678
1679 // Allocate two byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001680 Allocate(SeqTwoByteString::kHeaderSize,
1681 times_1,
1682 scratch1,
1683 REGISTER_VALUE_IS_INT32,
1684 result,
1685 scratch2,
1686 scratch3,
1687 gc_required,
1688 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001689
1690 // Set the map, length and hash field.
1691 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001692 Immediate(isolate()->factory()->string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001693 mov(scratch1, length);
1694 SmiTag(scratch1);
1695 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001696 mov(FieldOperand(result, String::kHashFieldOffset),
1697 Immediate(String::kEmptyHashField));
1698}
1699
1700
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001701void MacroAssembler::AllocateOneByteString(Register result, Register length,
1702 Register scratch1, Register scratch2,
1703 Register scratch3,
1704 Label* gc_required) {
Steve Blockd0582a62009-12-15 09:54:21 +00001705 // Calculate the number of bytes needed for the characters in the string while
1706 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001707 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00001708 mov(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001709 DCHECK(kCharSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001710 add(scratch1, Immediate(kObjectAlignmentMask));
1711 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001712
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713 // Allocate one-byte string in new space.
1714 Allocate(SeqOneByteString::kHeaderSize,
1715 times_1,
1716 scratch1,
1717 REGISTER_VALUE_IS_INT32,
1718 result,
1719 scratch2,
1720 scratch3,
1721 gc_required,
1722 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001723
1724 // Set the map, length and hash field.
1725 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726 Immediate(isolate()->factory()->one_byte_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001727 mov(scratch1, length);
1728 SmiTag(scratch1);
1729 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001730 mov(FieldOperand(result, String::kHashFieldOffset),
1731 Immediate(String::kEmptyHashField));
1732}
1733
1734
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001735void MacroAssembler::AllocateOneByteString(Register result, int length,
1736 Register scratch1, Register scratch2,
1737 Label* gc_required) {
1738 DCHECK(length > 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001739
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740 // Allocate one-byte string in new space.
1741 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1742 gc_required, TAG_OBJECT);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001743
1744 // Set the map, length and hash field.
1745 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 Immediate(isolate()->factory()->one_byte_string_map()));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001747 mov(FieldOperand(result, String::kLengthOffset),
1748 Immediate(Smi::FromInt(length)));
1749 mov(FieldOperand(result, String::kHashFieldOffset),
1750 Immediate(String::kEmptyHashField));
1751}
1752
1753
Ben Murdoch589d6972011-11-30 16:04:58 +00001754void MacroAssembler::AllocateTwoByteConsString(Register result,
Steve Blockd0582a62009-12-15 09:54:21 +00001755 Register scratch1,
1756 Register scratch2,
1757 Label* gc_required) {
1758 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1760 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001761
1762 // Set the map. The other fields are left uninitialized.
1763 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001764 Immediate(isolate()->factory()->cons_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001765}
1766
1767
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001768void MacroAssembler::AllocateOneByteConsString(Register result,
1769 Register scratch1,
1770 Register scratch2,
1771 Label* gc_required) {
1772 Allocate(ConsString::kSize,
1773 result,
1774 scratch1,
1775 scratch2,
1776 gc_required,
1777 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001778
1779 // Set the map. The other fields are left uninitialized.
1780 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001781 Immediate(isolate()->factory()->cons_one_byte_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001782}
1783
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001784
Ben Murdoch589d6972011-11-30 16:04:58 +00001785void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001786 Register scratch1,
1787 Register scratch2,
1788 Label* gc_required) {
1789 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001790 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1791 TAG_OBJECT);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001792
1793 // Set the map. The other fields are left uninitialized.
1794 mov(FieldOperand(result, HeapObject::kMapOffset),
1795 Immediate(isolate()->factory()->sliced_string_map()));
1796}
1797
1798
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001799void MacroAssembler::AllocateOneByteSlicedString(Register result,
1800 Register scratch1,
1801 Register scratch2,
1802 Label* gc_required) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001803 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001804 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1805 TAG_OBJECT);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001806
1807 // Set the map. The other fields are left uninitialized.
1808 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001810}
1811
1812
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001813void MacroAssembler::AllocateJSValue(Register result, Register constructor,
1814 Register value, Register scratch,
1815 Label* gc_required) {
1816 DCHECK(!result.is(constructor));
1817 DCHECK(!result.is(scratch));
1818 DCHECK(!result.is(value));
1819
1820 // Allocate JSValue in new space.
1821 Allocate(JSValue::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
1822
1823 // Initialize the JSValue.
1824 LoadGlobalFunctionInitialMap(constructor, scratch);
1825 mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
1826 LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
1827 mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
1828 mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
1829 mov(FieldOperand(result, JSValue::kValueOffset), value);
1830 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1831}
1832
1833
Ben Murdochb8e0da22011-05-16 14:20:40 +01001834// Copy memory, byte-by-byte, from source to destination. Not optimized for
1835// long or aligned copies. The contents of scratch and length are destroyed.
1836// Source and destination are incremented by length.
1837// Many variants of movsb, loop unrolling, word moves, and indexed operands
1838// have been tried here already, and this is fastest.
1839// A simpler loop is faster on small copies, but 30% slower on large ones.
1840// The cld() instruction must have been emitted, to set the direction flag(),
1841// before calling this function.
1842void MacroAssembler::CopyBytes(Register source,
1843 Register destination,
1844 Register length,
1845 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001846 Label short_loop, len4, len8, len12, done, short_string;
1847 DCHECK(source.is(esi));
1848 DCHECK(destination.is(edi));
1849 DCHECK(length.is(ecx));
1850 cmp(length, Immediate(4));
1851 j(below, &short_string, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001852
1853 // Because source is 4-byte aligned in our uses of this function,
1854 // we keep source aligned for the rep_movs call by copying the odd bytes
1855 // at the end of the ranges.
1856 mov(scratch, Operand(source, length, times_1, -4));
1857 mov(Operand(destination, length, times_1, -4), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001858
1859 cmp(length, Immediate(8));
1860 j(below_equal, &len4, Label::kNear);
1861 cmp(length, Immediate(12));
1862 j(below_equal, &len8, Label::kNear);
1863 cmp(length, Immediate(16));
1864 j(below_equal, &len12, Label::kNear);
1865
Ben Murdochb8e0da22011-05-16 14:20:40 +01001866 mov(scratch, ecx);
1867 shr(ecx, 2);
1868 rep_movs();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001869 and_(scratch, Immediate(0x3));
1870 add(destination, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001871 jmp(&done, Label::kNear);
1872
1873 bind(&len12);
1874 mov(scratch, Operand(source, 8));
1875 mov(Operand(destination, 8), scratch);
1876 bind(&len8);
1877 mov(scratch, Operand(source, 4));
1878 mov(Operand(destination, 4), scratch);
1879 bind(&len4);
1880 mov(scratch, Operand(source, 0));
1881 mov(Operand(destination, 0), scratch);
1882 add(destination, length);
1883 jmp(&done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001884
1885 bind(&short_string);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001886 test(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 j(zero, &done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001888
1889 bind(&short_loop);
1890 mov_b(scratch, Operand(source, 0));
1891 mov_b(Operand(destination, 0), scratch);
1892 inc(source);
1893 inc(destination);
1894 dec(length);
1895 j(not_zero, &short_loop);
1896
1897 bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001898}
1899
Steve Blockd0582a62009-12-15 09:54:21 +00001900
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
1902 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001903 Register filler) {
1904 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001905 jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001906 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001907 mov(Operand(current_address, 0), filler);
1908 add(current_address, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001909 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001910 cmp(current_address, end_address);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001911 j(below, &loop, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001912}
1913
1914
1915void MacroAssembler::BooleanBitTest(Register object,
1916 int field_offset,
1917 int bit_index) {
1918 bit_index += kSmiTagSize + kSmiShiftSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001919 DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001920 int byte_index = bit_index / kBitsPerByte;
1921 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1922 test_b(FieldOperand(object, field_offset + byte_index),
1923 static_cast<byte>(1 << byte_bit_index));
1924}
1925
1926
1927
Steve Blocka7e24c12009-10-30 11:49:00 +00001928void MacroAssembler::NegativeZeroTest(Register result,
1929 Register op,
1930 Label* then_label) {
1931 Label ok;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001932 test(result, result);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001933 j(not_zero, &ok, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001934 test(op, op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001935 j(sign, then_label, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00001936 bind(&ok);
1937}
1938
1939
1940void MacroAssembler::NegativeZeroTest(Register result,
1941 Register op1,
1942 Register op2,
1943 Register scratch,
1944 Label* then_label) {
1945 Label ok;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001946 test(result, result);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001947 j(not_zero, &ok, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001948 mov(scratch, op1);
1949 or_(scratch, op2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001950 j(sign, then_label, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 bind(&ok);
1952}
1953
1954
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001955void MacroAssembler::GetMapConstructor(Register result, Register map,
1956 Register temp) {
1957 Label done, loop;
1958 mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
1959 bind(&loop);
1960 JumpIfSmi(result, &done, Label::kNear);
1961 CmpObjectType(result, MAP_TYPE, temp);
1962 j(not_equal, &done, Label::kNear);
1963 mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
1964 jmp(&loop);
1965 bind(&done);
1966}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001967
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001969void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
1970 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001971 // Get the prototype or initial map from the function.
1972 mov(result,
1973 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1974
1975 // If the prototype or initial map is the hole, don't return it and
1976 // simply miss the cache instead. This will allow us to allocate a
1977 // prototype object on-demand in the runtime system.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001978 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001979 j(equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001980
1981 // If the function does not have an initial map, we're done.
1982 Label done;
1983 CmpObjectType(result, MAP_TYPE, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001984 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00001985
1986 // Get the prototype from the initial map.
1987 mov(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001988
Steve Blocka7e24c12009-10-30 11:49:00 +00001989 // All done.
1990 bind(&done);
1991}
1992
1993
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001994void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1995 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
Ben Murdoch257744e2011-11-30 15:57:28 +00001996 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001997}
1998
1999
Steve Blockd0582a62009-12-15 09:54:21 +00002000void MacroAssembler::TailCallStub(CodeStub* stub) {
Steve Blockd0582a62009-12-15 09:54:21 +00002001 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
2002}
2003
2004
Steve Blocka7e24c12009-10-30 11:49:00 +00002005void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +00002007 ret((argc - 1) * kPointerSize);
2008}
2009
2010
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002011bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002012 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002013}
2014
2015
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002016void MacroAssembler::IndexFromHash(Register hash, Register index) {
2017 // The assert checks that the constants for the maximum number of digits
2018 // for an array index cached in the hash field and the number of bits
2019 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002020 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002021 (1 << String::kArrayIndexValueBits));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002022 if (!index.is(hash)) {
2023 mov(index, hash);
2024 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002025 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002026}
2027
2028
Steve Block44f0eee2011-05-26 01:26:41 +01002029void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002030 int num_arguments,
2031 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002032 // If the expected number of arguments of the runtime function is
2033 // constant, we check that the actual number of arguments match the
2034 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002035 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002036
Leon Clarke4515c472010-02-03 11:58:03 +00002037 // TODO(1236192): Most runtime routines don't need the number of
2038 // arguments passed in because it is constant. At some point we
2039 // should remove this need and make the runtime routine entry code
2040 // smarter.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041 Move(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002042 mov(ebx, Immediate(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002043 CEntryStub ces(isolate(), 1, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +00002044 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00002045}
2046
2047
Ben Murdochbb769b22010-08-11 14:56:33 +01002048void MacroAssembler::CallExternalReference(ExternalReference ref,
2049 int num_arguments) {
2050 mov(eax, Immediate(num_arguments));
2051 mov(ebx, Immediate(ref));
2052
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002053 CEntryStub stub(isolate(), 1);
Ben Murdochbb769b22010-08-11 14:56:33 +01002054 CallStub(&stub);
2055}
2056
2057
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002058void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2059 // ----------- S t a t e -------------
2060 // -- esp[0] : return address
2061 // -- esp[8] : argument num_arguments - 1
2062 // ...
2063 // -- esp[8 * num_arguments] : argument 0 (receiver)
2064 //
2065 // For runtime functions with variable arguments:
2066 // -- eax : number of arguments
2067 // -----------------------------------
Steve Block6ded16b2010-05-10 14:33:55 +01002068
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002069 const Runtime::Function* function = Runtime::FunctionForId(fid);
2070 DCHECK_EQ(1, function->result_size);
2071 if (function->nargs >= 0) {
2072 // TODO(1236192): Most runtime routines don't need the number of
2073 // arguments passed in because it is constant. At some point we
2074 // should remove this need and make the runtime routine entry code
2075 // smarter.
2076 mov(eax, Immediate(function->nargs));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002077 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002078 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Blockd0582a62009-12-15 09:54:21 +00002079}
2080
2081
Steve Block6ded16b2010-05-10 14:33:55 +01002082void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002083 // Set the entry point and jump to the C entry runtime stub.
2084 mov(ebx, Immediate(ext));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085 CEntryStub ces(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002086 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2087}
2088
2089
2090void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2091 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00002092 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002093 bool* definitely_mismatches,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002094 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002095 Label::Distance done_near,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002097 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002098 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00002099 Label invoke;
2100 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002101 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002102 mov(eax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002103 if (expected.immediate() == actual.immediate()) {
2104 definitely_matches = true;
2105 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00002106 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2107 if (expected.immediate() == sentinel) {
2108 // Don't worry about adapting arguments for builtins that
2109 // don't want that done. Skip adaption code by making it look
2110 // like we have a match between expected and actual number of
2111 // arguments.
2112 definitely_matches = true;
2113 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002114 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00002115 mov(ebx, expected.immediate());
2116 }
2117 }
2118 } else {
2119 if (actual.is_immediate()) {
2120 // Expected is in register, actual is immediate. This is the
2121 // case when we invoke function values without going through the
2122 // IC mechanism.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 mov(eax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002124 cmp(expected.reg(), actual.immediate());
2125 j(equal, &invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002126 DCHECK(expected.reg().is(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00002127 } else if (!expected.reg().is(actual.reg())) {
2128 // Both expected and actual are in (different) registers. This
2129 // is the case when we invoke functions using call and apply.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002130 cmp(expected.reg(), actual.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002131 j(equal, &invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002132 DCHECK(actual.reg().is(eax));
2133 DCHECK(expected.reg().is(ebx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002134 } else {
2135 Move(eax, actual.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002136 }
2137 }
2138
2139 if (!definitely_matches) {
2140 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01002141 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00002142 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002143 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
Steve Blocka7e24c12009-10-30 11:49:00 +00002144 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00002145 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002146 if (!*definitely_mismatches) {
2147 jmp(done, done_near);
2148 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002149 } else {
2150 jmp(adaptor, RelocInfo::CODE_TARGET);
2151 }
2152 bind(&invoke);
2153 }
2154}
2155
2156
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
2158 const ParameterCount& expected,
2159 const ParameterCount& actual) {
2160 Label skip_flooding;
2161 ExternalReference step_in_enabled =
2162 ExternalReference::debug_step_in_enabled_address(isolate());
2163 cmpb(Operand::StaticVariable(step_in_enabled), 0);
2164 j(equal, &skip_flooding);
2165 {
2166 FrameScope frame(this,
2167 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2168 if (expected.is_reg()) {
2169 SmiTag(expected.reg());
2170 Push(expected.reg());
2171 }
2172 if (actual.is_reg()) {
2173 SmiTag(actual.reg());
2174 Push(actual.reg());
2175 }
2176 if (new_target.is_valid()) {
2177 Push(new_target);
2178 }
2179 Push(fun);
2180 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002181 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002182 Pop(fun);
2183 if (new_target.is_valid()) {
2184 Pop(new_target);
2185 }
2186 if (actual.is_reg()) {
2187 Pop(actual.reg());
2188 SmiUntag(actual.reg());
2189 }
2190 if (expected.is_reg()) {
2191 Pop(expected.reg());
2192 SmiUntag(expected.reg());
2193 }
2194 }
2195 bind(&skip_flooding);
2196}
2197
2198
2199void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2200 const ParameterCount& expected,
2201 const ParameterCount& actual,
2202 InvokeFlag flag,
2203 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002204 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002205 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002206 DCHECK(function.is(edi));
2207 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
2208
2209 if (call_wrapper.NeedsDebugStepCheck()) {
2210 FloodFunctionIfStepping(function, new_target, expected, actual);
2211 }
2212
2213 // Clear the new.target register if not given.
2214 if (!new_target.is_valid()) {
2215 mov(edx, isolate()->factory()->undefined_value());
2216 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002217
Ben Murdoch257744e2011-11-30 15:57:28 +00002218 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002219 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002220 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2221 Label::kNear, call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002222 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002223 // We call indirectly through the code field in the function to
2224 // allow recompilation to take effect without changing any of the
2225 // call sites.
2226 Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002227 if (flag == CALL_FUNCTION) {
2228 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002229 call(code);
2230 call_wrapper.AfterCall();
2231 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002232 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002233 jmp(code);
2234 }
2235 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00002236 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002237}
2238
2239
Steve Blocka7e24c12009-10-30 11:49:00 +00002240void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002241 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00002242 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002243 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002244 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002245 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002247
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002248 DCHECK(fun.is(edi));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002249 mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002250 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002252 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00002253
2254 ParameterCount expected(ebx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002255 InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002256}
2257
2258
2259void MacroAssembler::InvokeFunction(Register fun,
2260 const ParameterCount& expected,
2261 const ParameterCount& actual,
2262 InvokeFlag flag,
2263 const CallWrapper& call_wrapper) {
2264 // You can't call a function without a valid frame.
2265 DCHECK(flag == JUMP_FUNCTION || has_frame());
2266
2267 DCHECK(fun.is(edi));
2268 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2269
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002270 InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002271}
2272
2273
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002274void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002275 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00002276 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002277 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002278 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002279 LoadHeapObject(edi, function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002280 InvokeFunction(edi, expected, actual, flag, call_wrapper);
Andrei Popescu402d9372010-02-26 13:31:12 +00002281}
2282
2283
Steve Blockd0582a62009-12-15 09:54:21 +00002284void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2285 if (context_chain_length > 0) {
2286 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002287 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002288 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002289 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002290 }
Steve Block1e0659c2011-05-24 12:43:12 +01002291 } else {
2292 // Slot is in the current function context. Move it into the
2293 // destination register in case we store into it (the write barrier
2294 // cannot be allowed to destroy the context in esi).
2295 mov(dst, esi);
2296 }
2297
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002298 // We should not have found a with context by walking the context chain
Steve Block1e0659c2011-05-24 12:43:12 +01002299 // (i.e., the static scope chain and runtime context chain do not agree).
2300 // A variable occurring in such a scope should have slot type LOOKUP and
2301 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01002302 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002303 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2304 isolate()->factory()->with_context_map());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002305 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00002306 }
2307}
2308
2309
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002310void MacroAssembler::LoadGlobalProxy(Register dst) {
2311 mov(dst, NativeContextOperand());
2312 mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
2313}
2314
2315
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002316void MacroAssembler::LoadTransitionedArrayMapConditional(
2317 ElementsKind expected_kind,
2318 ElementsKind transitioned_kind,
2319 Register map_in_out,
2320 Register scratch,
2321 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002322 DCHECK(IsFastElementsKind(expected_kind));
2323 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002324
2325 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002326 mov(scratch, NativeContextOperand());
2327 cmp(map_in_out,
2328 ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002329 j(not_equal, no_map_match);
2330
2331 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002332 mov(map_in_out,
2333 ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002334}
2335
2336
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002337void MacroAssembler::LoadGlobalFunction(int index, Register function) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002338 // Load the native context from the current context.
2339 mov(function, NativeContextOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002340 // Load the function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002341 mov(function, ContextOperand(function, index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002342}
2343
2344
2345void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2346 Register map) {
2347 // Load the initial map. The global functions all have initial maps.
2348 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002349 if (emit_debug_code()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002350 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002351 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002352 jmp(&ok);
2353 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002354 Abort(kGlobalFunctionsMustHaveInitialMap);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002355 bind(&ok);
2356 }
2357}
2358
Steve Blockd0582a62009-12-15 09:54:21 +00002359
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002360// Store the value in register src in the safepoint register stack
2361// slot for register dst.
2362void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2363 mov(SafepointRegisterSlot(dst), src);
2364}
2365
2366
2367void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2368 mov(SafepointRegisterSlot(dst), src);
2369}
2370
2371
2372void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2373 mov(dst, SafepointRegisterSlot(src));
2374}
2375
2376
2377Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2378 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2379}
2380
2381
Ben Murdochb0fe1622011-05-05 13:52:32 +01002382int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2383 // The registers are pushed starting with the lowest encoding,
2384 // which means that lowest encodings are furthest away from
2385 // the stack pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002386 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002387 return kNumSafepointRegisters - reg_code - 1;
2388}
2389
2390
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002391void MacroAssembler::LoadHeapObject(Register result,
2392 Handle<HeapObject> object) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002393 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002394 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002395 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2396 mov(result, Operand::ForCell(cell));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002397 } else {
2398 mov(result, object);
2399 }
2400}
2401
2402
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002403void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2404 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002405 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002406 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2407 cmp(reg, Operand::ForCell(cell));
2408 } else {
2409 cmp(reg, object);
2410 }
2411}
2412
2413
2414void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2415 AllowDeferredHandleDereference using_raw_address;
2416 if (isolate()->heap()->InNewSpace(*object)) {
2417 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2418 push(Operand::ForCell(cell));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002419 } else {
2420 Push(object);
2421 }
2422}
2423
2424
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002425void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2426 Register scratch) {
2427 mov(scratch, cell);
2428 cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2429}
2430
2431
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002432void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002433 mov(value, cell);
2434 mov(value, FieldOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002435}
2436
2437
2438void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2439 Label* miss) {
2440 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002441 JumpIfSmi(value, miss);
2442}
2443
2444
Steve Blocka7e24c12009-10-30 11:49:00 +00002445void MacroAssembler::Ret() {
2446 ret(0);
2447}
2448
2449
Steve Block1e0659c2011-05-24 12:43:12 +01002450void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2451 if (is_uint16(bytes_dropped)) {
2452 ret(bytes_dropped);
2453 } else {
2454 pop(scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002455 add(esp, Immediate(bytes_dropped));
Steve Block1e0659c2011-05-24 12:43:12 +01002456 push(scratch);
2457 ret(0);
2458 }
2459}
2460
2461
Leon Clarkee46be812010-01-19 14:06:41 +00002462void MacroAssembler::Drop(int stack_elements) {
2463 if (stack_elements > 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002464 add(esp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00002465 }
2466}
2467
2468
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002469void MacroAssembler::Move(Register dst, Register src) {
2470 if (!dst.is(src)) {
2471 mov(dst, src);
2472 }
2473}
2474
2475
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002476void MacroAssembler::Move(Register dst, const Immediate& x) {
2477 if (x.is_zero()) {
2478 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2479 } else {
2480 mov(dst, x);
2481 }
2482}
2483
2484
2485void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2486 mov(dst, x);
2487}
2488
2489
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002490void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2491 if (src == 0) {
2492 pxor(dst, dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002493 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002494 unsigned cnt = base::bits::CountPopulation32(src);
2495 unsigned nlz = base::bits::CountLeadingZeros32(src);
2496 unsigned ntz = base::bits::CountTrailingZeros32(src);
2497 if (nlz + cnt + ntz == 32) {
2498 pcmpeqd(dst, dst);
2499 if (ntz == 0) {
2500 psrld(dst, 32 - cnt);
2501 } else {
2502 pslld(dst, 32 - cnt);
2503 if (nlz != 0) psrld(dst, nlz);
2504 }
2505 } else {
2506 push(eax);
2507 mov(eax, Immediate(src));
2508 movd(dst, Operand(eax));
2509 pop(eax);
2510 }
2511 }
2512}
2513
2514
2515void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002516 if (src == 0) {
2517 pxor(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002518 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002519 uint32_t lower = static_cast<uint32_t>(src);
2520 uint32_t upper = static_cast<uint32_t>(src >> 32);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002521 unsigned cnt = base::bits::CountPopulation64(src);
2522 unsigned nlz = base::bits::CountLeadingZeros64(src);
2523 unsigned ntz = base::bits::CountTrailingZeros64(src);
2524 if (nlz + cnt + ntz == 64) {
2525 pcmpeqd(dst, dst);
2526 if (ntz == 0) {
2527 psrlq(dst, 64 - cnt);
2528 } else {
2529 psllq(dst, 64 - cnt);
2530 if (nlz != 0) psrlq(dst, nlz);
2531 }
2532 } else if (lower == 0) {
2533 Move(dst, upper);
2534 psllq(dst, 32);
2535 } else if (CpuFeatures::IsSupported(SSE4_1)) {
2536 CpuFeatureScope scope(this, SSE4_1);
2537 push(eax);
2538 Move(eax, Immediate(lower));
2539 movd(dst, Operand(eax));
2540 Move(eax, Immediate(upper));
2541 pinsrd(dst, Operand(eax), 1);
2542 pop(eax);
2543 } else {
2544 push(Immediate(upper));
2545 push(Immediate(lower));
2546 movsd(dst, Operand(esp, 0));
2547 add(esp, Immediate(kDoubleSize));
2548 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002549 }
2550}
2551
2552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002553void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
2554 if (imm8 == 0) {
2555 movd(dst, src);
2556 return;
2557 }
2558 DCHECK_EQ(1, imm8);
2559 if (CpuFeatures::IsSupported(SSE4_1)) {
2560 CpuFeatureScope sse_scope(this, SSE4_1);
2561 pextrd(dst, src, imm8);
2562 return;
2563 }
2564 pshufd(xmm0, src, 1);
2565 movd(dst, xmm0);
2566}
2567
2568
2569void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
2570 DCHECK(imm8 == 0 || imm8 == 1);
2571 if (CpuFeatures::IsSupported(SSE4_1)) {
2572 CpuFeatureScope sse_scope(this, SSE4_1);
2573 pinsrd(dst, src, imm8);
2574 return;
2575 }
2576 movd(xmm0, src);
2577 if (imm8 == 1) {
2578 punpckldq(dst, xmm0);
2579 } else {
2580 DCHECK_EQ(0, imm8);
2581 psrlq(dst, 32);
2582 punpckldq(xmm0, dst);
2583 movaps(dst, xmm0);
2584 }
2585}
2586
2587
2588void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
2589 if (CpuFeatures::IsSupported(LZCNT)) {
2590 CpuFeatureScope scope(this, LZCNT);
2591 lzcnt(dst, src);
2592 return;
2593 }
2594 Label not_zero_src;
2595 bsr(dst, src);
2596 j(not_zero, &not_zero_src, Label::kNear);
2597 Move(dst, Immediate(63)); // 63^31 == 32
2598 bind(&not_zero_src);
2599 xor_(dst, Immediate(31)); // for x in [0..31], 31^x == 31-x.
2600}
2601
2602
2603void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
2604 if (CpuFeatures::IsSupported(BMI1)) {
2605 CpuFeatureScope scope(this, BMI1);
2606 tzcnt(dst, src);
2607 return;
2608 }
2609 Label not_zero_src;
2610 bsf(dst, src);
2611 j(not_zero, &not_zero_src, Label::kNear);
2612 Move(dst, Immediate(32)); // The result of tzcnt is 32 if src = 0.
2613 bind(&not_zero_src);
2614}
2615
2616
2617void MacroAssembler::Popcnt(Register dst, const Operand& src) {
2618 if (CpuFeatures::IsSupported(POPCNT)) {
2619 CpuFeatureScope scope(this, POPCNT);
2620 popcnt(dst, src);
2621 return;
2622 }
2623 UNREACHABLE();
2624}
2625
2626
Steve Blocka7e24c12009-10-30 11:49:00 +00002627void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2628 if (FLAG_native_code_counters && counter->Enabled()) {
2629 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2630 }
2631}
2632
2633
2634void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002635 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002636 if (FLAG_native_code_counters && counter->Enabled()) {
2637 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2638 if (value == 1) {
2639 inc(operand);
2640 } else {
2641 add(operand, Immediate(value));
2642 }
2643 }
2644}
2645
2646
2647void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002648 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002649 if (FLAG_native_code_counters && counter->Enabled()) {
2650 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2651 if (value == 1) {
2652 dec(operand);
2653 } else {
2654 sub(operand, Immediate(value));
2655 }
2656 }
2657}
2658
2659
Leon Clarked91b9f72010-01-27 17:25:45 +00002660void MacroAssembler::IncrementCounter(Condition cc,
2661 StatsCounter* counter,
2662 int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002663 DCHECK(value > 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002664 if (FLAG_native_code_counters && counter->Enabled()) {
2665 Label skip;
2666 j(NegateCondition(cc), &skip);
2667 pushfd();
2668 IncrementCounter(counter, value);
2669 popfd();
2670 bind(&skip);
2671 }
2672}
2673
2674
2675void MacroAssembler::DecrementCounter(Condition cc,
2676 StatsCounter* counter,
2677 int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002678 DCHECK(value > 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002679 if (FLAG_native_code_counters && counter->Enabled()) {
2680 Label skip;
2681 j(NegateCondition(cc), &skip);
2682 pushfd();
2683 DecrementCounter(counter, value);
2684 popfd();
2685 bind(&skip);
2686 }
2687}
2688
2689
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002690void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2691 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002692}
2693
2694
Iain Merrick75681382010-08-19 15:07:18 +01002695void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002696 if (emit_debug_code()) {
2697 Factory* factory = isolate()->factory();
Iain Merrick75681382010-08-19 15:07:18 +01002698 Label ok;
2699 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002700 Immediate(factory->fixed_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002701 j(equal, &ok);
2702 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002703 Immediate(factory->fixed_double_array_map()));
2704 j(equal, &ok);
2705 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002706 Immediate(factory->fixed_cow_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002707 j(equal, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002709 bind(&ok);
2710 }
2711}
2712
2713
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002714void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002715 Label L;
Ben Murdoch257744e2011-11-30 15:57:28 +00002716 j(cc, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002717 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002718 // will not return here
2719 bind(&L);
2720}
2721
2722
Steve Block6ded16b2010-05-10 14:33:55 +01002723void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002724 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01002725 int frame_alignment_mask = frame_alignment - 1;
2726 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002727 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01002728 Label alignment_as_expected;
2729 test(esp, Immediate(frame_alignment_mask));
2730 j(zero, &alignment_as_expected);
2731 // Abort if stack is not aligned.
2732 int3();
2733 bind(&alignment_as_expected);
2734 }
2735}
2736
2737
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002738void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002739#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002740 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002741 if (msg != NULL) {
2742 RecordComment("Abort message: ");
2743 RecordComment(msg);
2744 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002745
2746 if (FLAG_trap_on_abort) {
2747 int3();
2748 return;
2749 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002750#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002751
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002752 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002753 // Disable stub call restrictions to always allow calls to abort.
2754 if (!has_frame_) {
2755 // We don't actually want to generate a pile of code for this, so just
2756 // claim there is a stack frame, without generating one.
2757 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002758 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002759 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002760 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002761 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002762 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00002763 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00002764}
2765
2766
Ben Murdoch257744e2011-11-30 15:57:28 +00002767void MacroAssembler::LoadInstanceDescriptors(Register map,
2768 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002769 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2770}
2771
2772
2773void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2774 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2775 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
Iain Merrick75681382010-08-19 15:07:18 +01002776}
2777
2778
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002779void MacroAssembler::LoadAccessor(Register dst, Register holder,
2780 int accessor_index,
2781 AccessorComponent accessor) {
2782 mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
2783 LoadInstanceDescriptors(dst, dst);
2784 mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
2785 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
2786 : AccessorPair::kSetterOffset;
2787 mov(dst, FieldOperand(dst, offset));
2788}
2789
2790
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002791void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2792 Register scratch,
2793 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002794 DCHECK(is_uintn(power + HeapNumber::kExponentBias,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002795 HeapNumber::kExponentBits));
2796 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002797 movd(dst, scratch);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002798 psllq(dst, HeapNumber::kMantissaBits);
2799}
2800
2801
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002802void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2803 Register instance_type, Register scratch, Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002804 if (!scratch.is(instance_type)) {
2805 mov(scratch, instance_type);
2806 }
2807 and_(scratch,
2808 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002809 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
Andrei Popescu402d9372010-02-26 13:31:12 +00002810 j(not_equal, failure);
2811}
2812
2813
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002814void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2815 Register object2,
2816 Register scratch1,
2817 Register scratch2,
2818 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002819 // Check that both objects are not smis.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002820 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002821 mov(scratch1, object1);
2822 and_(scratch1, object2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002823 JumpIfSmi(scratch1, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00002824
2825 // Load instance type for both strings.
2826 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2827 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2828 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2829 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2830
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002831 // Check that both are flat one-byte strings.
2832 const int kFlatOneByteStringMask =
Leon Clarked91b9f72010-01-27 17:25:45 +00002833 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002834 const int kFlatOneByteStringTag =
2835 kStringTag | kOneByteStringTag | kSeqStringTag;
Leon Clarked91b9f72010-01-27 17:25:45 +00002836 // Interleave bits from both instance types and compare them in one check.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002837 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2838 and_(scratch1, kFlatOneByteStringMask);
2839 and_(scratch2, kFlatOneByteStringMask);
Leon Clarked91b9f72010-01-27 17:25:45 +00002840 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002841 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
Leon Clarked91b9f72010-01-27 17:25:45 +00002842 j(not_equal, failure);
2843}
2844
2845
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002846void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2847 Label* not_unique_name,
2848 Label::Distance distance) {
2849 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2850 Label succeed;
2851 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2852 j(zero, &succeed);
2853 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2854 j(not_equal, not_unique_name, distance);
2855
2856 bind(&succeed);
2857}
2858
2859
2860void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2861 Register index,
2862 Register value,
2863 uint32_t encoding_mask) {
2864 Label is_object;
2865 JumpIfNotSmi(string, &is_object, Label::kNear);
2866 Abort(kNonObject);
2867 bind(&is_object);
2868
2869 push(value);
2870 mov(value, FieldOperand(string, HeapObject::kMapOffset));
2871 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2872
2873 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2874 cmp(value, Immediate(encoding_mask));
2875 pop(value);
2876 Check(equal, kUnexpectedStringType);
2877
2878 // The index is assumed to be untagged coming in, tag it to compare with the
2879 // string length without using a temp register, it is restored at the end of
2880 // this function.
2881 SmiTag(index);
2882 Check(no_overflow, kIndexIsTooLarge);
2883
2884 cmp(index, FieldOperand(string, String::kLengthOffset));
2885 Check(less, kIndexIsTooLarge);
2886
2887 cmp(index, Immediate(Smi::FromInt(0)));
2888 Check(greater_equal, kIndexIsNegative);
2889
2890 // Restore the index
2891 SmiUntag(index);
2892}
2893
2894
Steve Block6ded16b2010-05-10 14:33:55 +01002895void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002896 int frame_alignment = base::OS::ActivationFrameAlignment();
Ben Murdoch8b112d22011-06-08 16:22:53 +01002897 if (frame_alignment != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01002898 // Make stack end at alignment and make room for num_arguments words
2899 // and the original value of esp.
2900 mov(scratch, esp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002901 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002902 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002903 and_(esp, -frame_alignment);
Steve Block6ded16b2010-05-10 14:33:55 +01002904 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2905 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002906 sub(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002907 }
2908}
2909
2910
2911void MacroAssembler::CallCFunction(ExternalReference function,
2912 int num_arguments) {
2913 // Trashing eax is ok as it will be the return value.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002914 mov(eax, Immediate(function));
Steve Block6ded16b2010-05-10 14:33:55 +01002915 CallCFunction(eax, num_arguments);
2916}
2917
2918
2919void MacroAssembler::CallCFunction(Register function,
2920 int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002921 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01002922 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01002923 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01002924 CheckStackAlignment();
2925 }
2926
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002927 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002928 if (base::OS::ActivationFrameAlignment() != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01002929 mov(esp, Operand(esp, num_arguments * kPointerSize));
2930 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002931 add(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002932 }
2933}
2934
2935
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002936#ifdef DEBUG
2937bool AreAliased(Register reg1,
2938 Register reg2,
2939 Register reg3,
2940 Register reg4,
2941 Register reg5,
2942 Register reg6,
2943 Register reg7,
2944 Register reg8) {
2945 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2946 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2947 reg7.is_valid() + reg8.is_valid();
2948
2949 RegList regs = 0;
2950 if (reg1.is_valid()) regs |= reg1.bit();
2951 if (reg2.is_valid()) regs |= reg2.bit();
2952 if (reg3.is_valid()) regs |= reg3.bit();
2953 if (reg4.is_valid()) regs |= reg4.bit();
2954 if (reg5.is_valid()) regs |= reg5.bit();
2955 if (reg6.is_valid()) regs |= reg6.bit();
2956 if (reg7.is_valid()) regs |= reg7.bit();
2957 if (reg8.is_valid()) regs |= reg8.bit();
2958 int n_of_non_aliasing_regs = NumRegs(regs);
2959
2960 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002961}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002962#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002963
2964
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002965CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002966 : address_(address),
2967 size_(size),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002968 masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002969 // Create a new macro assembler pointing to the address of the code to patch.
2970 // The size is adjusted with kGap on order for the assembler to generate size
2971 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002972 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00002973}
2974
2975
2976CodePatcher::~CodePatcher() {
2977 // Indicate that code has changed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002978 Assembler::FlushICache(masm_.isolate(), address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002979
2980 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002981 DCHECK(masm_.pc_ == address_ + size_);
2982 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00002983}
2984
2985
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002986void MacroAssembler::CheckPageFlag(
2987 Register object,
2988 Register scratch,
2989 int mask,
2990 Condition cc,
2991 Label* condition_met,
2992 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002993 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002994 if (scratch.is(object)) {
2995 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2996 } else {
2997 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2998 and_(scratch, object);
2999 }
3000 if (mask < (1 << kBitsPerByte)) {
3001 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3002 static_cast<uint8_t>(mask));
3003 } else {
3004 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3005 }
3006 j(cc, condition_met, condition_met_distance);
3007}
3008
3009
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003010void MacroAssembler::CheckPageFlagForMap(
3011 Handle<Map> map,
3012 int mask,
3013 Condition cc,
3014 Label* condition_met,
3015 Label::Distance condition_met_distance) {
3016 DCHECK(cc == zero || cc == not_zero);
3017 Page* page = Page::FromAddress(map->address());
3018 DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
3019 ExternalReference reference(ExternalReference::page_flags(page));
3020 // The inlined static address check of the page's flags relies
3021 // on maps never being compacted.
3022 DCHECK(!isolate()->heap()->mark_compact_collector()->
3023 IsOnEvacuationCandidate(*map));
3024 if (mask < (1 << kBitsPerByte)) {
3025 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3026 } else {
3027 test(Operand::StaticVariable(reference), Immediate(mask));
3028 }
3029 j(cc, condition_met, condition_met_distance);
3030}
3031
3032
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003033void MacroAssembler::JumpIfBlack(Register object,
3034 Register scratch0,
3035 Register scratch1,
3036 Label* on_black,
3037 Label::Distance on_black_near) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003038 HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
3039 1); // kBlackBitPattern.
3040 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003041}
3042
3043
3044void MacroAssembler::HasColor(Register object,
3045 Register bitmap_scratch,
3046 Register mask_scratch,
3047 Label* has_color,
3048 Label::Distance has_color_distance,
3049 int first_bit,
3050 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003051 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003052
3053 GetMarkBits(object, bitmap_scratch, mask_scratch);
3054
3055 Label other_color, word_boundary;
3056 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3057 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3058 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
3059 j(zero, &word_boundary, Label::kNear);
3060 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3061 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3062 jmp(&other_color, Label::kNear);
3063
3064 bind(&word_boundary);
3065 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3066
3067 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3068 bind(&other_color);
3069}
3070
3071
3072void MacroAssembler::GetMarkBits(Register addr_reg,
3073 Register bitmap_reg,
3074 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003075 DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003076 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3077 and_(bitmap_reg, addr_reg);
3078 mov(ecx, addr_reg);
3079 int shift =
3080 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3081 shr(ecx, shift);
3082 and_(ecx,
3083 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3084
3085 add(bitmap_reg, ecx);
3086 mov(ecx, addr_reg);
3087 shr(ecx, kPointerSizeLog2);
3088 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3089 mov(mask_reg, Immediate(1));
3090 shl_cl(mask_reg);
3091}
3092
3093
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003094void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3095 Register mask_scratch, Label* value_is_white,
3096 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003097 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003098 GetMarkBits(value, bitmap_scratch, mask_scratch);
3099
3100 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003101 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003102 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3103 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003104 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003105
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003106 // Since both black and grey have a 1 in the first position and white does
3107 // not have a 1 there we only need to check one bit.
3108 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003109 j(zero, value_is_white, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003110}
3111
3112
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003113void MacroAssembler::EnumLength(Register dst, Register map) {
3114 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3115 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3116 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3117 SmiTag(dst);
3118}
3119
3120
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003121void MacroAssembler::CheckEnumCache(Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003122 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003123 mov(ecx, eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003124
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003125 // Check if the enum length field is properly initialized, indicating that
3126 // there is an enum cache.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003127 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003128
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003129 EnumLength(edx, ebx);
3130 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3131 j(equal, call_runtime);
3132
3133 jmp(&start);
3134
3135 bind(&next);
3136 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003137
3138 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003139 EnumLength(edx, ebx);
3140 cmp(edx, Immediate(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003141 j(not_equal, call_runtime);
3142
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003143 bind(&start);
3144
3145 // Check that there are no elements. Register rcx contains the current JS
3146 // object we've reached through the prototype chain.
3147 Label no_elements;
3148 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3149 cmp(ecx, isolate()->factory()->empty_fixed_array());
3150 j(equal, &no_elements);
3151
3152 // Second chance, the object may be using the empty slow element dictionary.
3153 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3154 j(not_equal, call_runtime);
3155
3156 bind(&no_elements);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003157 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3158 cmp(ecx, isolate()->factory()->null_value());
3159 j(not_equal, &next);
3160}
3161
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003162
3163void MacroAssembler::TestJSArrayForAllocationMemento(
3164 Register receiver_reg,
3165 Register scratch_reg,
3166 Label* no_memento_found) {
3167 ExternalReference new_space_start =
3168 ExternalReference::new_space_start(isolate());
3169 ExternalReference new_space_allocation_top =
3170 ExternalReference::new_space_allocation_top_address(isolate());
3171
3172 lea(scratch_reg, Operand(receiver_reg,
3173 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3174 cmp(scratch_reg, Immediate(new_space_start));
3175 j(less, no_memento_found);
3176 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3177 j(greater, no_memento_found);
3178 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3179 Immediate(isolate()->factory()->allocation_memento_map()));
3180}
3181
3182
3183void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3184 Register object,
3185 Register scratch0,
3186 Register scratch1,
3187 Label* found) {
3188 DCHECK(!scratch1.is(scratch0));
3189 Factory* factory = isolate()->factory();
3190 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003191 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003192
3193 // scratch contained elements pointer.
3194 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003195 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3196 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3197 cmp(current, Immediate(factory->null_value()));
3198 j(equal, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003199
3200 // Loop based on the map going up the prototype chain.
3201 bind(&loop_again);
3202 mov(current, FieldOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003203 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3204 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3205 CmpInstanceType(current, JS_OBJECT_TYPE);
3206 j(below, found);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003207 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3208 DecodeField<Map::ElementsKindBits>(scratch1);
3209 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3210 j(equal, found);
3211 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3212 cmp(current, Immediate(factory->null_value()));
3213 j(not_equal, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003214
3215 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003216}
3217
3218
3219void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3220 DCHECK(!dividend.is(eax));
3221 DCHECK(!dividend.is(edx));
3222 base::MagicNumbersForDivision<uint32_t> mag =
3223 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3224 mov(eax, Immediate(mag.multiplier));
3225 imul(dividend);
3226 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3227 if (divisor > 0 && neg) add(edx, dividend);
3228 if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3229 if (mag.shift > 0) sar(edx, mag.shift);
3230 mov(eax, dividend);
3231 shr(eax, 31);
3232 add(edx, eax);
3233}
3234
3235
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003236} // namespace internal
3237} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01003238
3239#endif // V8_TARGET_ARCH_IA32