blob: 0c459ebfd420f5208d8b4b7c05ef6a26d831a4c3 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X87
6
7#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/debug/debug.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/runtime/runtime.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/x87/frames-x87.h"
14#include "src/x87/macro-assembler-x87.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015
16namespace v8 {
17namespace internal {
18
19// -------------------------------------------------------------------------
20// MacroAssembler implementation.
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 : Assembler(arg_isolate, buffer, size),
25 generating_stub_(false),
26 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000030 }
31}
32
33
34void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
35 DCHECK(!r.IsDouble());
36 if (r.IsInteger8()) {
37 movsx_b(dst, src);
38 } else if (r.IsUInteger8()) {
39 movzx_b(dst, src);
40 } else if (r.IsInteger16()) {
41 movsx_w(dst, src);
42 } else if (r.IsUInteger16()) {
43 movzx_w(dst, src);
44 } else {
45 mov(dst, src);
46 }
47}
48
49
50void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
51 DCHECK(!r.IsDouble());
52 if (r.IsInteger8() || r.IsUInteger8()) {
53 mov_b(dst, src);
54 } else if (r.IsInteger16() || r.IsUInteger16()) {
55 mov_w(dst, src);
56 } else {
57 if (r.IsHeapObject()) {
58 AssertNotSmi(src);
59 } else if (r.IsSmi()) {
60 AssertSmi(src);
61 }
62 mov(dst, src);
63 }
64}
65
66
67void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
68 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 mov(destination, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 return;
71 }
72 ExternalReference roots_array_start =
73 ExternalReference::roots_array_start(isolate());
74 mov(destination, Immediate(index));
75 mov(destination, Operand::StaticArray(destination,
76 times_pointer_size,
77 roots_array_start));
78}
79
80
81void MacroAssembler::StoreRoot(Register source,
82 Register scratch,
83 Heap::RootListIndex index) {
84 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
85 ExternalReference roots_array_start =
86 ExternalReference::roots_array_start(isolate());
87 mov(scratch, Immediate(index));
88 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
89 source);
90}
91
92
93void MacroAssembler::CompareRoot(Register with,
94 Register scratch,
95 Heap::RootListIndex index) {
96 ExternalReference roots_array_start =
97 ExternalReference::roots_array_start(isolate());
98 mov(scratch, Immediate(index));
99 cmp(with, Operand::StaticArray(scratch,
100 times_pointer_size,
101 roots_array_start));
102}
103
104
105void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
106 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000107 cmp(with, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108}
109
110
111void MacroAssembler::CompareRoot(const Operand& with,
112 Heap::RootListIndex index) {
113 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 cmp(with, isolate()->heap()->root_handle(index));
115}
116
117
118void MacroAssembler::PushRoot(Heap::RootListIndex index) {
119 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
120 Push(isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121}
122
Ben Murdoch097c5b22016-05-18 11:27:45 +0100123#define REG(Name) \
124 { Register::kCode_##Name }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125
Ben Murdoch097c5b22016-05-18 11:27:45 +0100126static const Register saved_regs[] = {REG(eax), REG(ecx), REG(edx)};
127
128#undef REG
129
130static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
131
132void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
133 Register exclusion1, Register exclusion2,
134 Register exclusion3) {
135 // We don't allow a GC during a store buffer overflow so there is no need to
136 // store the registers in any particular way, but we do have to store and
137 // restore them.
138 for (int i = 0; i < kNumberOfSavedRegs; i++) {
139 Register reg = saved_regs[i];
140 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
141 push(reg);
142 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100144 if (fp_mode == kSaveFPRegs) {
145 // Save FPU state in m108byte.
146 sub(esp, Immediate(108));
147 fnsave(Operand(esp, 0));
148 }
149}
150
151void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
152 Register exclusion2, Register exclusion3) {
153 if (fp_mode == kSaveFPRegs) {
154 // Restore FPU state in m108byte.
155 frstor(Operand(esp, 0));
156 add(esp, Immediate(108));
157 }
158
159 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
160 Register reg = saved_regs[i];
161 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
162 pop(reg);
163 }
164 }
165}
166
167void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
168 Label* condition_met,
169 Label::Distance distance) {
170 const int mask =
171 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
172 CheckPageFlag(object, scratch, mask, cc, condition_met, distance);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000173}
174
175
176void MacroAssembler::RememberedSetHelper(
177 Register object, // Only used for debug checks.
178 Register addr, Register scratch, SaveFPRegsMode save_fp,
179 MacroAssembler::RememberedSetFinalAction and_then) {
180 Label done;
181 if (emit_debug_code()) {
182 Label ok;
183 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
184 int3();
185 bind(&ok);
186 }
187 // Load store buffer top.
188 ExternalReference store_buffer =
189 ExternalReference::store_buffer_top(isolate());
190 mov(scratch, Operand::StaticVariable(store_buffer));
191 // Store pointer to buffer.
192 mov(Operand(scratch, 0), addr);
193 // Increment buffer top.
194 add(scratch, Immediate(kPointerSize));
195 // Write back new top of buffer.
196 mov(Operand::StaticVariable(store_buffer), scratch);
197 // Call stub on end of buffer.
198 // Check for end of buffer.
199 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
200 if (and_then == kReturnAtEnd) {
201 Label buffer_overflowed;
202 j(not_equal, &buffer_overflowed, Label::kNear);
203 ret(0);
204 bind(&buffer_overflowed);
205 } else {
206 DCHECK(and_then == kFallThroughAtEnd);
207 j(equal, &done, Label::kNear);
208 }
209 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
210 CallStub(&store_buffer_overflow);
211 if (and_then == kReturnAtEnd) {
212 ret(0);
213 } else {
214 DCHECK(and_then == kFallThroughAtEnd);
215 bind(&done);
216 }
217}
218
219
220void MacroAssembler::ClampTOSToUint8(Register result_reg) {
221 Label done, conv_failure;
222 sub(esp, Immediate(kPointerSize));
223 fnclex();
224 fist_s(Operand(esp, 0));
225 pop(result_reg);
226 X87CheckIA();
227 j(equal, &conv_failure, Label::kNear);
228 test(result_reg, Immediate(0xFFFFFF00));
229 j(zero, &done, Label::kNear);
230 setcc(sign, result_reg);
231 sub(result_reg, Immediate(1));
232 and_(result_reg, Immediate(255));
233 jmp(&done, Label::kNear);
234 bind(&conv_failure);
235 fnclex();
236 fldz();
237 fld(1);
238 FCmp();
239 setcc(below, result_reg); // 1 if negative, 0 if positive.
240 dec_b(result_reg); // 0 if negative, 255 if positive.
241 bind(&done);
242}
243
244
245void MacroAssembler::ClampUint8(Register reg) {
246 Label done;
247 test(reg, Immediate(0xFFFFFF00));
248 j(zero, &done, Label::kNear);
249 setcc(negative, reg); // 1 if negative, 0 if positive.
250 dec_b(reg); // 0 if negative, 255 if positive.
251 bind(&done);
252}
253
254
255void MacroAssembler::SlowTruncateToI(Register result_reg,
256 Register input_reg,
257 int offset) {
258 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
259 call(stub.GetCode(), RelocInfo::CODE_TARGET);
260}
261
262
263void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
264 sub(esp, Immediate(kDoubleSize));
265 fst_d(MemOperand(esp, 0));
266 SlowTruncateToI(result_reg, esp, 0);
267 add(esp, Immediate(kDoubleSize));
268}
269
270
271void MacroAssembler::X87TOSToI(Register result_reg,
272 MinusZeroMode minus_zero_mode,
273 Label* lost_precision, Label* is_nan,
274 Label* minus_zero, Label::Distance dst) {
275 Label done;
276 sub(esp, Immediate(kPointerSize));
277 fld(0);
278 fist_s(MemOperand(esp, 0));
279 fild_s(MemOperand(esp, 0));
280 pop(result_reg);
281 FCmp();
282 j(not_equal, lost_precision, dst);
283 j(parity_even, is_nan, dst);
284 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
285 test(result_reg, Operand(result_reg));
286 j(not_zero, &done, Label::kNear);
287 // To check for minus zero, we load the value again as float, and check
288 // if that is still 0.
289 sub(esp, Immediate(kPointerSize));
290 fst_s(MemOperand(esp, 0));
291 pop(result_reg);
292 test(result_reg, Operand(result_reg));
293 j(not_zero, minus_zero, dst);
294 }
295 bind(&done);
296}
297
298
299void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
300 Register input_reg) {
301 Label done, slow_case;
302
303 SlowTruncateToI(result_reg, input_reg);
304 bind(&done);
305}
306
307
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400308void MacroAssembler::LoadUint32NoSSE2(const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000309 Label done;
310 push(src);
311 fild_s(Operand(esp, 0));
312 cmp(src, Immediate(0));
313 j(not_sign, &done, Label::kNear);
314 ExternalReference uint32_bias =
315 ExternalReference::address_of_uint32_bias();
316 fld_d(Operand::StaticVariable(uint32_bias));
317 faddp(1);
318 bind(&done);
319 add(esp, Immediate(kPointerSize));
320}
321
322
323void MacroAssembler::RecordWriteArray(
324 Register object, Register value, Register index, SaveFPRegsMode save_fp,
325 RememberedSetAction remembered_set_action, SmiCheck smi_check,
326 PointersToHereCheck pointers_to_here_check_for_value) {
327 // First, check if a write barrier is even needed. The tests below
328 // catch stores of Smis.
329 Label done;
330
331 // Skip barrier if writing a smi.
332 if (smi_check == INLINE_SMI_CHECK) {
333 DCHECK_EQ(0, kSmiTag);
334 test(value, Immediate(kSmiTagMask));
335 j(zero, &done);
336 }
337
338 // Array access: calculate the destination address in the same manner as
339 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
340 // into an array of words.
341 Register dst = index;
342 lea(dst, Operand(object, index, times_half_pointer_size,
343 FixedArray::kHeaderSize - kHeapObjectTag));
344
345 RecordWrite(object, dst, value, save_fp, remembered_set_action,
346 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
347
348 bind(&done);
349
350 // Clobber clobbered input registers when running with the debug-code flag
351 // turned on to provoke errors.
352 if (emit_debug_code()) {
353 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
354 mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
355 }
356}
357
358
359void MacroAssembler::RecordWriteField(
360 Register object, int offset, Register value, Register dst,
361 SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action,
362 SmiCheck smi_check, PointersToHereCheck pointers_to_here_check_for_value) {
363 // First, check if a write barrier is even needed. The tests below
364 // catch stores of Smis.
365 Label done;
366
367 // Skip barrier if writing a smi.
368 if (smi_check == INLINE_SMI_CHECK) {
369 JumpIfSmi(value, &done, Label::kNear);
370 }
371
372 // Although the object register is tagged, the offset is relative to the start
373 // of the object, so so offset must be a multiple of kPointerSize.
374 DCHECK(IsAligned(offset, kPointerSize));
375
376 lea(dst, FieldOperand(object, offset));
377 if (emit_debug_code()) {
378 Label ok;
379 test_b(dst, (1 << kPointerSizeLog2) - 1);
380 j(zero, &ok, Label::kNear);
381 int3();
382 bind(&ok);
383 }
384
385 RecordWrite(object, dst, value, save_fp, remembered_set_action,
386 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
387
388 bind(&done);
389
390 // Clobber clobbered input registers when running with the debug-code flag
391 // turned on to provoke errors.
392 if (emit_debug_code()) {
393 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
394 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
395 }
396}
397
398
399void MacroAssembler::RecordWriteForMap(Register object, Handle<Map> map,
400 Register scratch1, Register scratch2,
401 SaveFPRegsMode save_fp) {
402 Label done;
403
404 Register address = scratch1;
405 Register value = scratch2;
406 if (emit_debug_code()) {
407 Label ok;
408 lea(address, FieldOperand(object, HeapObject::kMapOffset));
409 test_b(address, (1 << kPointerSizeLog2) - 1);
410 j(zero, &ok, Label::kNear);
411 int3();
412 bind(&ok);
413 }
414
415 DCHECK(!object.is(value));
416 DCHECK(!object.is(address));
417 DCHECK(!value.is(address));
418 AssertNotSmi(object);
419
420 if (!FLAG_incremental_marking) {
421 return;
422 }
423
424 // Compute the address.
425 lea(address, FieldOperand(object, HeapObject::kMapOffset));
426
427 // A single check of the map's pages interesting flag suffices, since it is
428 // only set during incremental collection, and then it's also guaranteed that
429 // the from object's page's interesting flag is also set. This optimization
430 // relies on the fact that maps can never be in new space.
431 DCHECK(!isolate()->heap()->InNewSpace(*map));
432 CheckPageFlagForMap(map,
433 MemoryChunk::kPointersToHereAreInterestingMask,
434 zero,
435 &done,
436 Label::kNear);
437
438 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
439 save_fp);
440 CallStub(&stub);
441
442 bind(&done);
443
444 // Count number of write barriers in generated code.
445 isolate()->counters()->write_barriers_static()->Increment();
446 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
447
448 // Clobber clobbered input registers when running with the debug-code flag
449 // turned on to provoke errors.
450 if (emit_debug_code()) {
451 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
452 mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
453 mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
454 }
455}
456
457
458void MacroAssembler::RecordWrite(
459 Register object, Register address, Register value, SaveFPRegsMode fp_mode,
460 RememberedSetAction remembered_set_action, SmiCheck smi_check,
461 PointersToHereCheck pointers_to_here_check_for_value) {
462 DCHECK(!object.is(value));
463 DCHECK(!object.is(address));
464 DCHECK(!value.is(address));
465 AssertNotSmi(object);
466
467 if (remembered_set_action == OMIT_REMEMBERED_SET &&
468 !FLAG_incremental_marking) {
469 return;
470 }
471
472 if (emit_debug_code()) {
473 Label ok;
474 cmp(value, Operand(address, 0));
475 j(equal, &ok, Label::kNear);
476 int3();
477 bind(&ok);
478 }
479
480 // First, check if a write barrier is even needed. The tests below
481 // catch stores of Smis and stores into young gen.
482 Label done;
483
484 if (smi_check == INLINE_SMI_CHECK) {
485 // Skip barrier if writing a smi.
486 JumpIfSmi(value, &done, Label::kNear);
487 }
488
489 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
490 CheckPageFlag(value,
491 value, // Used as scratch.
492 MemoryChunk::kPointersToHereAreInterestingMask,
493 zero,
494 &done,
495 Label::kNear);
496 }
497 CheckPageFlag(object,
498 value, // Used as scratch.
499 MemoryChunk::kPointersFromHereAreInterestingMask,
500 zero,
501 &done,
502 Label::kNear);
503
504 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
505 fp_mode);
506 CallStub(&stub);
507
508 bind(&done);
509
510 // Count number of write barriers in generated code.
511 isolate()->counters()->write_barriers_static()->Increment();
512 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
513
514 // Clobber clobbered registers when running with the debug-code flag
515 // turned on to provoke errors.
516 if (emit_debug_code()) {
517 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
518 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
519 }
520}
521
Ben Murdoch097c5b22016-05-18 11:27:45 +0100522void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
523 Register code_entry,
524 Register scratch) {
525 const int offset = JSFunction::kCodeEntryOffset;
526
527 // Since a code entry (value) is always in old space, we don't need to update
528 // remembered set. If incremental marking is off, there is nothing for us to
529 // do.
530 if (!FLAG_incremental_marking) return;
531
532 DCHECK(!js_function.is(code_entry));
533 DCHECK(!js_function.is(scratch));
534 DCHECK(!code_entry.is(scratch));
535 AssertNotSmi(js_function);
536
537 if (emit_debug_code()) {
538 Label ok;
539 lea(scratch, FieldOperand(js_function, offset));
540 cmp(code_entry, Operand(scratch, 0));
541 j(equal, &ok, Label::kNear);
542 int3();
543 bind(&ok);
544 }
545
546 // First, check if a write barrier is even needed. The tests below
547 // catch stores of Smis and stores into young gen.
548 Label done;
549
550 CheckPageFlag(code_entry, scratch,
551 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
552 Label::kNear);
553 CheckPageFlag(js_function, scratch,
554 MemoryChunk::kPointersFromHereAreInterestingMask, zero, &done,
555 Label::kNear);
556
557 // Save input registers.
558 push(js_function);
559 push(code_entry);
560
561 const Register dst = scratch;
562 lea(dst, FieldOperand(js_function, offset));
563
564 // Save caller-saved registers.
565 PushCallerSaved(kDontSaveFPRegs, js_function, code_entry);
566
567 int argument_count = 3;
568 PrepareCallCFunction(argument_count, code_entry);
569 mov(Operand(esp, 0 * kPointerSize), js_function);
570 mov(Operand(esp, 1 * kPointerSize), dst); // Slot.
571 mov(Operand(esp, 2 * kPointerSize),
572 Immediate(ExternalReference::isolate_address(isolate())));
573
574 {
575 AllowExternalCallThatCantCauseGC scope(this);
576 CallCFunction(
577 ExternalReference::incremental_marking_record_write_code_entry_function(
578 isolate()),
579 argument_count);
580 }
581
582 // Restore caller-saved registers.
583 PopCallerSaved(kDontSaveFPRegs, js_function, code_entry);
584
585 // Restore input registers.
586 pop(code_entry);
587 pop(js_function);
588
589 bind(&done);
590}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000591
592void MacroAssembler::DebugBreak() {
593 Move(eax, Immediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594 mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
595 isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000596 CEntryStub ces(isolate(), 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000597 call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598}
599
600
601bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
602 static const int kMaxImmediateBits = 17;
603 if (!RelocInfo::IsNone(x.rmode_)) return false;
604 return !is_intn(x.x_, kMaxImmediateBits);
605}
606
607
608void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
609 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
610 Move(dst, Immediate(x.x_ ^ jit_cookie()));
611 xor_(dst, jit_cookie());
612 } else {
613 Move(dst, x);
614 }
615}
616
617
618void MacroAssembler::SafePush(const Immediate& x) {
619 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
620 push(Immediate(x.x_ ^ jit_cookie()));
621 xor_(Operand(esp, 0), Immediate(jit_cookie()));
622 } else {
623 push(x);
624 }
625}
626
627
628void MacroAssembler::CmpObjectType(Register heap_object,
629 InstanceType type,
630 Register map) {
631 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
632 CmpInstanceType(map, type);
633}
634
635
636void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
637 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
638 static_cast<int8_t>(type));
639}
640
641
642void MacroAssembler::CheckFastElements(Register map,
643 Label* fail,
644 Label::Distance distance) {
645 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
646 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
647 STATIC_ASSERT(FAST_ELEMENTS == 2);
648 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
649 cmpb(FieldOperand(map, Map::kBitField2Offset),
650 Map::kMaximumBitField2FastHoleyElementValue);
651 j(above, fail, distance);
652}
653
654
655void MacroAssembler::CheckFastObjectElements(Register map,
656 Label* fail,
657 Label::Distance distance) {
658 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
659 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
660 STATIC_ASSERT(FAST_ELEMENTS == 2);
661 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
662 cmpb(FieldOperand(map, Map::kBitField2Offset),
663 Map::kMaximumBitField2FastHoleySmiElementValue);
664 j(below_equal, fail, distance);
665 cmpb(FieldOperand(map, Map::kBitField2Offset),
666 Map::kMaximumBitField2FastHoleyElementValue);
667 j(above, fail, distance);
668}
669
670
671void MacroAssembler::CheckFastSmiElements(Register map,
672 Label* fail,
673 Label::Distance distance) {
674 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
675 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
676 cmpb(FieldOperand(map, Map::kBitField2Offset),
677 Map::kMaximumBitField2FastHoleySmiElementValue);
678 j(above, fail, distance);
679}
680
681
682void MacroAssembler::StoreNumberToDoubleElements(
683 Register maybe_number,
684 Register elements,
685 Register key,
686 Register scratch,
687 Label* fail,
688 int elements_offset) {
689 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
690 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
691
692 CheckMap(maybe_number,
693 isolate()->factory()->heap_number_map(),
694 fail,
695 DONT_DO_SMI_CHECK);
696
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698 jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699
700 bind(&smi_value);
701 // Value is a smi. Convert to a double and store.
702 // Preserve original value.
703 mov(scratch, maybe_number);
704 SmiUntag(scratch);
705 push(scratch);
706 fild_s(Operand(esp, 0));
707 pop(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000708 bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709 fstp_d(FieldOperand(elements, key, times_4,
710 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711}
712
713
714void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
715 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
716}
717
718
719void MacroAssembler::CheckMap(Register obj,
720 Handle<Map> map,
721 Label* fail,
722 SmiCheckType smi_check_type) {
723 if (smi_check_type == DO_SMI_CHECK) {
724 JumpIfSmi(obj, fail);
725 }
726
727 CompareMap(obj, map);
728 j(not_equal, fail);
729}
730
731
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400732void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
733 Register scratch2, Handle<WeakCell> cell,
734 Handle<Code> success,
735 SmiCheckType smi_check_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000736 Label fail;
737 if (smi_check_type == DO_SMI_CHECK) {
738 JumpIfSmi(obj, &fail);
739 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400740 mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
741 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000742 j(equal, success);
743
744 bind(&fail);
745}
746
747
748Condition MacroAssembler::IsObjectStringType(Register heap_object,
749 Register map,
750 Register instance_type) {
751 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
752 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
753 STATIC_ASSERT(kNotStringTag != 0);
754 test(instance_type, Immediate(kIsNotStringMask));
755 return zero;
756}
757
758
759Condition MacroAssembler::IsObjectNameType(Register heap_object,
760 Register map,
761 Register instance_type) {
762 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
763 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
764 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
765 return below_equal;
766}
767
768
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000769void MacroAssembler::FCmp() {
770 fucompp();
771 push(eax);
772 fnstsw_ax();
773 sahf();
774 pop(eax);
775}
776
777
778void MacroAssembler::FXamMinusZero() {
779 fxam();
780 push(eax);
781 fnstsw_ax();
782 and_(eax, Immediate(0x4700));
783 // For minus zero, C3 == 1 && C1 == 1.
784 cmp(eax, Immediate(0x4200));
785 pop(eax);
786 fstp(0);
787}
788
789
790void MacroAssembler::FXamSign() {
791 fxam();
792 push(eax);
793 fnstsw_ax();
794 // For negative value (including -0.0), C1 == 1.
795 and_(eax, Immediate(0x0200));
796 pop(eax);
797 fstp(0);
798}
799
800
801void MacroAssembler::X87CheckIA() {
802 push(eax);
803 fnstsw_ax();
804 // For #IA, IE == 1 && SF == 0.
805 and_(eax, Immediate(0x0041));
806 cmp(eax, Immediate(0x0001));
807 pop(eax);
808}
809
810
811// rc=00B, round to nearest.
812// rc=01B, round down.
813// rc=10B, round up.
814// rc=11B, round toward zero.
815void MacroAssembler::X87SetRC(int rc) {
816 sub(esp, Immediate(kPointerSize));
817 fnstcw(MemOperand(esp, 0));
818 and_(MemOperand(esp, 0), Immediate(0xF3FF));
819 or_(MemOperand(esp, 0), Immediate(rc));
820 fldcw(MemOperand(esp, 0));
821 add(esp, Immediate(kPointerSize));
822}
823
824
825void MacroAssembler::X87SetFPUCW(int cw) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000826 RecordComment("-- X87SetFPUCW start --");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 push(Immediate(cw));
828 fldcw(MemOperand(esp, 0));
829 add(esp, Immediate(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000830 RecordComment("-- X87SetFPUCW end--");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831}
832
833
834void MacroAssembler::AssertNumber(Register object) {
835 if (emit_debug_code()) {
836 Label ok;
837 JumpIfSmi(object, &ok);
838 cmp(FieldOperand(object, HeapObject::kMapOffset),
839 isolate()->factory()->heap_number_map());
840 Check(equal, kOperandNotANumber);
841 bind(&ok);
842 }
843}
844
845
846void MacroAssembler::AssertSmi(Register object) {
847 if (emit_debug_code()) {
848 test(object, Immediate(kSmiTagMask));
849 Check(equal, kOperandIsNotASmi);
850 }
851}
852
853
854void MacroAssembler::AssertString(Register object) {
855 if (emit_debug_code()) {
856 test(object, Immediate(kSmiTagMask));
857 Check(not_equal, kOperandIsASmiAndNotAString);
858 push(object);
859 mov(object, FieldOperand(object, HeapObject::kMapOffset));
860 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
861 pop(object);
862 Check(below, kOperandIsNotAString);
863 }
864}
865
866
867void MacroAssembler::AssertName(Register object) {
868 if (emit_debug_code()) {
869 test(object, Immediate(kSmiTagMask));
870 Check(not_equal, kOperandIsASmiAndNotAName);
871 push(object);
872 mov(object, FieldOperand(object, HeapObject::kMapOffset));
873 CmpInstanceType(object, LAST_NAME_TYPE);
874 pop(object);
875 Check(below_equal, kOperandIsNotAName);
876 }
877}
878
879
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000880void MacroAssembler::AssertFunction(Register object) {
881 if (emit_debug_code()) {
882 test(object, Immediate(kSmiTagMask));
883 Check(not_equal, kOperandIsASmiAndNotAFunction);
884 Push(object);
885 CmpObjectType(object, JS_FUNCTION_TYPE, object);
886 Pop(object);
887 Check(equal, kOperandIsNotAFunction);
888 }
889}
890
891
892void MacroAssembler::AssertBoundFunction(Register object) {
893 if (emit_debug_code()) {
894 test(object, Immediate(kSmiTagMask));
895 Check(not_equal, kOperandIsASmiAndNotABoundFunction);
896 Push(object);
897 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
898 Pop(object);
899 Check(equal, kOperandIsNotABoundFunction);
900 }
901}
902
Ben Murdoch097c5b22016-05-18 11:27:45 +0100903void MacroAssembler::AssertReceiver(Register object) {
904 if (emit_debug_code()) {
905 test(object, Immediate(kSmiTagMask));
906 Check(not_equal, kOperandIsASmiAndNotAReceiver);
907 Push(object);
908 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
909 CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object);
910 Pop(object);
911 Check(above_equal, kOperandIsNotAReceiver);
912 }
913}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000914
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000915void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
916 if (emit_debug_code()) {
917 Label done_checking;
918 AssertNotSmi(object);
919 cmp(object, isolate()->factory()->undefined_value());
920 j(equal, &done_checking);
921 cmp(FieldOperand(object, 0),
922 Immediate(isolate()->factory()->allocation_site_map()));
923 Assert(equal, kExpectedUndefinedOrCell);
924 bind(&done_checking);
925 }
926}
927
928
929void MacroAssembler::AssertNotSmi(Register object) {
930 if (emit_debug_code()) {
931 test(object, Immediate(kSmiTagMask));
932 Check(not_equal, kOperandIsASmi);
933 }
934}
935
936
937void MacroAssembler::StubPrologue() {
938 push(ebp); // Caller's frame pointer.
939 mov(ebp, esp);
940 push(esi); // Callee's context.
941 push(Immediate(Smi::FromInt(StackFrame::STUB)));
942}
943
944
945void MacroAssembler::Prologue(bool code_pre_aging) {
946 PredictableCodeSizeScope predictible_code_size_scope(this,
947 kNoCodeAgeSequenceLength);
948 if (code_pre_aging) {
949 // Pre-age the code.
950 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
951 RelocInfo::CODE_AGE_SEQUENCE);
952 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
953 } else {
954 push(ebp); // Caller's frame pointer.
955 mov(ebp, esp);
956 push(esi); // Callee's context.
957 push(edi); // Callee's JS function.
958 }
959}
960
961
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000962void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
963 mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
964 mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
965 mov(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
966}
967
968
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400969void MacroAssembler::EnterFrame(StackFrame::Type type,
970 bool load_constant_pool_pointer_reg) {
971 // Out-of-line constant pool not implemented on x87.
972 UNREACHABLE();
973}
974
975
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000976void MacroAssembler::EnterFrame(StackFrame::Type type) {
977 push(ebp);
978 mov(ebp, esp);
979 push(esi);
980 push(Immediate(Smi::FromInt(type)));
981 push(Immediate(CodeObject()));
982 if (emit_debug_code()) {
983 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
984 Check(not_equal, kCodeObjectNotProperlyPatched);
985 }
986}
987
988
989void MacroAssembler::LeaveFrame(StackFrame::Type type) {
990 if (emit_debug_code()) {
991 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
992 Immediate(Smi::FromInt(type)));
993 Check(equal, kStackFrameTypesMustMatch);
994 }
995 leave();
996}
997
998
999void MacroAssembler::EnterExitFramePrologue() {
1000 // Set up the frame structure on the stack.
1001 DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1002 DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1003 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1004 push(ebp);
1005 mov(ebp, esp);
1006
1007 // Reserve room for entry stack pointer and push the code object.
1008 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
1009 push(Immediate(0)); // Saved entry sp, patched before call.
1010 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
1011
1012 // Save the frame pointer and the context in top.
1013 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
1014 ExternalReference context_address(Isolate::kContextAddress, isolate());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001015 ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001016 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
1017 mov(Operand::StaticVariable(context_address), esi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001018 mov(Operand::StaticVariable(c_function_address), ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019}
1020
1021
1022void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
1023 // Optionally save FPU state.
1024 if (save_doubles) {
1025 // Store FPU state to m108byte.
1026 int space = 108 + argc * kPointerSize;
1027 sub(esp, Immediate(space));
1028 const int offset = -2 * kPointerSize; // entry fp + code object.
1029 fnsave(MemOperand(ebp, offset - 108));
1030 } else {
1031 sub(esp, Immediate(argc * kPointerSize));
1032 }
1033
1034 // Get the required frame alignment for the OS.
1035 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
1036 if (kFrameAlignment > 0) {
1037 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
1038 and_(esp, -kFrameAlignment);
1039 }
1040
1041 // Patch the saved entry sp.
1042 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1043}
1044
1045
Ben Murdoch097c5b22016-05-18 11:27:45 +01001046void MacroAssembler::EnterExitFrame(int argc, bool save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001047 EnterExitFramePrologue();
1048
1049 // Set up argc and argv in callee-saved registers.
1050 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1051 mov(edi, eax);
1052 lea(esi, Operand(ebp, eax, times_4, offset));
1053
1054 // Reserve space for argc, argv and isolate.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001055 EnterExitFrameEpilogue(argc, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056}
1057
1058
1059void MacroAssembler::EnterApiExitFrame(int argc) {
1060 EnterExitFramePrologue();
1061 EnterExitFrameEpilogue(argc, false);
1062}
1063
1064
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066 // Optionally restore FPU state.
1067 if (save_doubles) {
1068 const int offset = -2 * kPointerSize;
1069 frstor(MemOperand(ebp, offset - 108));
1070 }
1071
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072 if (pop_arguments) {
1073 // Get the return address from the stack and restore the frame pointer.
1074 mov(ecx, Operand(ebp, 1 * kPointerSize));
1075 mov(ebp, Operand(ebp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001076
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 // Pop the arguments and the receiver from the caller stack.
1078 lea(esp, Operand(esi, 1 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001079
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080 // Push the return address to get ready to return.
1081 push(ecx);
1082 } else {
1083 // Otherwise just leave the exit frame.
1084 leave();
1085 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086
1087 LeaveExitFrameEpilogue(true);
1088}
1089
1090
1091void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1092 // Restore current context from top and clear it in debug mode.
1093 ExternalReference context_address(Isolate::kContextAddress, isolate());
1094 if (restore_context) {
1095 mov(esi, Operand::StaticVariable(context_address));
1096 }
1097#ifdef DEBUG
1098 mov(Operand::StaticVariable(context_address), Immediate(0));
1099#endif
1100
1101 // Clear the top frame.
1102 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1103 isolate());
1104 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1105}
1106
1107
1108void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1109 mov(esp, ebp);
1110 pop(ebp);
1111
1112 LeaveExitFrameEpilogue(restore_context);
1113}
1114
1115
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001116void MacroAssembler::PushStackHandler() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001117 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001118 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001119 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001120
1121 // Link the current handler as the next handler.
1122 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1123 push(Operand::StaticVariable(handler_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001124
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001125 // Set this new handler as the current one.
1126 mov(Operand::StaticVariable(handler_address), esp);
1127}
1128
1129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001130void MacroAssembler::PopStackHandler() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001131 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1132 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1133 pop(Operand::StaticVariable(handler_address));
1134 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1135}
1136
1137
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001138void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1139 Register scratch1,
1140 Register scratch2,
1141 Label* miss) {
1142 Label same_contexts;
1143
1144 DCHECK(!holder_reg.is(scratch1));
1145 DCHECK(!holder_reg.is(scratch2));
1146 DCHECK(!scratch1.is(scratch2));
1147
1148 // Load current lexical context from the stack frame.
1149 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1150
1151 // When generating debug code, make sure the lexical context is set.
1152 if (emit_debug_code()) {
1153 cmp(scratch1, Immediate(0));
1154 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1155 }
1156 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001157 mov(scratch1, ContextOperand(scratch1, Context::NATIVE_CONTEXT_INDEX));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001158
1159 // Check the context is a native context.
1160 if (emit_debug_code()) {
1161 // Read the first word and compare to native_context_map.
1162 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1163 isolate()->factory()->native_context_map());
1164 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1165 }
1166
1167 // Check if both contexts are the same.
1168 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1169 j(equal, &same_contexts);
1170
1171 // Compare security tokens, save holder_reg on the stack so we can use it
1172 // as a temporary register.
1173 //
1174 // Check that the security token in the calling global object is
1175 // compatible with the security token in the receiving global
1176 // object.
1177 mov(scratch2,
1178 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1179
1180 // Check the context is a native context.
1181 if (emit_debug_code()) {
1182 cmp(scratch2, isolate()->factory()->null_value());
1183 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1184
1185 // Read the first word and compare to native_context_map(),
1186 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1187 isolate()->factory()->native_context_map());
1188 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1189 }
1190
1191 int token_offset = Context::kHeaderSize +
1192 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1193 mov(scratch1, FieldOperand(scratch1, token_offset));
1194 cmp(scratch1, FieldOperand(scratch2, token_offset));
1195 j(not_equal, miss);
1196
1197 bind(&same_contexts);
1198}
1199
1200
1201// Compute the hash code from the untagged key. This must be kept in sync with
1202// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1203// code-stub-hydrogen.cc
1204//
1205// Note: r0 will contain hash code
1206void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1207 // Xor original key with a seed.
1208 if (serializer_enabled()) {
1209 ExternalReference roots_array_start =
1210 ExternalReference::roots_array_start(isolate());
1211 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1212 mov(scratch,
1213 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1214 SmiUntag(scratch);
1215 xor_(r0, scratch);
1216 } else {
1217 int32_t seed = isolate()->heap()->HashSeed();
1218 xor_(r0, Immediate(seed));
1219 }
1220
1221 // hash = ~hash + (hash << 15);
1222 mov(scratch, r0);
1223 not_(r0);
1224 shl(scratch, 15);
1225 add(r0, scratch);
1226 // hash = hash ^ (hash >> 12);
1227 mov(scratch, r0);
1228 shr(scratch, 12);
1229 xor_(r0, scratch);
1230 // hash = hash + (hash << 2);
1231 lea(r0, Operand(r0, r0, times_4, 0));
1232 // hash = hash ^ (hash >> 4);
1233 mov(scratch, r0);
1234 shr(scratch, 4);
1235 xor_(r0, scratch);
1236 // hash = hash * 2057;
1237 imul(r0, r0, 2057);
1238 // hash = hash ^ (hash >> 16);
1239 mov(scratch, r0);
1240 shr(scratch, 16);
1241 xor_(r0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001242 and_(r0, 0x3fffffff);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001243}
1244
1245
1246
1247void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1248 Register elements,
1249 Register key,
1250 Register r0,
1251 Register r1,
1252 Register r2,
1253 Register result) {
1254 // Register use:
1255 //
1256 // elements - holds the slow-case elements of the receiver and is unchanged.
1257 //
1258 // key - holds the smi key on entry and is unchanged.
1259 //
1260 // Scratch registers:
1261 //
1262 // r0 - holds the untagged key on entry and holds the hash once computed.
1263 //
1264 // r1 - used to hold the capacity mask of the dictionary
1265 //
1266 // r2 - used for the index into the dictionary.
1267 //
1268 // result - holds the result on exit if the load succeeds and we fall through.
1269
1270 Label done;
1271
1272 GetNumberHash(r0, r1);
1273
1274 // Compute capacity mask.
1275 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1276 shr(r1, kSmiTagSize); // convert smi to int
1277 dec(r1);
1278
1279 // Generate an unrolled loop that performs a few probes before giving up.
1280 for (int i = 0; i < kNumberDictionaryProbes; i++) {
1281 // Use r2 for index calculations and keep the hash intact in r0.
1282 mov(r2, r0);
1283 // Compute the masked index: (hash + i + i * i) & mask.
1284 if (i > 0) {
1285 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1286 }
1287 and_(r2, r1);
1288
1289 // Scale the index by multiplying by the entry size.
1290 DCHECK(SeededNumberDictionary::kEntrySize == 3);
1291 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1292
1293 // Check if the key matches.
1294 cmp(key, FieldOperand(elements,
1295 r2,
1296 times_pointer_size,
1297 SeededNumberDictionary::kElementsStartOffset));
1298 if (i != (kNumberDictionaryProbes - 1)) {
1299 j(equal, &done);
1300 } else {
1301 j(not_equal, miss);
1302 }
1303 }
1304
1305 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001306 // Check that the value is a field property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001307 const int kDetailsOffset =
1308 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001309 DCHECK_EQ(DATA, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001310 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1311 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1312 j(not_zero, miss);
1313
1314 // Get the value at the masked, scaled index.
1315 const int kValueOffset =
1316 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1317 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1318}
1319
1320
1321void MacroAssembler::LoadAllocationTopHelper(Register result,
1322 Register scratch,
1323 AllocationFlags flags) {
1324 ExternalReference allocation_top =
1325 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1326
1327 // Just return if allocation top is already known.
1328 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1329 // No use of scratch if allocation top is provided.
1330 DCHECK(scratch.is(no_reg));
1331#ifdef DEBUG
1332 // Assert that result actually contains top on entry.
1333 cmp(result, Operand::StaticVariable(allocation_top));
1334 Check(equal, kUnexpectedAllocationTop);
1335#endif
1336 return;
1337 }
1338
1339 // Move address of new object to result. Use scratch register if available.
1340 if (scratch.is(no_reg)) {
1341 mov(result, Operand::StaticVariable(allocation_top));
1342 } else {
1343 mov(scratch, Immediate(allocation_top));
1344 mov(result, Operand(scratch, 0));
1345 }
1346}
1347
1348
1349void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1350 Register scratch,
1351 AllocationFlags flags) {
1352 if (emit_debug_code()) {
1353 test(result_end, Immediate(kObjectAlignmentMask));
1354 Check(zero, kUnalignedAllocationInNewSpace);
1355 }
1356
1357 ExternalReference allocation_top =
1358 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1359
1360 // Update new top. Use scratch if available.
1361 if (scratch.is(no_reg)) {
1362 mov(Operand::StaticVariable(allocation_top), result_end);
1363 } else {
1364 mov(Operand(scratch, 0), result_end);
1365 }
1366}
1367
1368
1369void MacroAssembler::Allocate(int object_size,
1370 Register result,
1371 Register result_end,
1372 Register scratch,
1373 Label* gc_required,
1374 AllocationFlags flags) {
1375 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1376 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1377 if (!FLAG_inline_new) {
1378 if (emit_debug_code()) {
1379 // Trash the registers to simulate an allocation failure.
1380 mov(result, Immediate(0x7091));
1381 if (result_end.is_valid()) {
1382 mov(result_end, Immediate(0x7191));
1383 }
1384 if (scratch.is_valid()) {
1385 mov(scratch, Immediate(0x7291));
1386 }
1387 }
1388 jmp(gc_required);
1389 return;
1390 }
1391 DCHECK(!result.is(result_end));
1392
1393 // Load address of new object into result.
1394 LoadAllocationTopHelper(result, scratch, flags);
1395
1396 ExternalReference allocation_limit =
1397 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1398
1399 // Align the next allocation. Storing the filler map without checking top is
1400 // safe in new-space because the limit of the heap is aligned there.
1401 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1403 Label aligned;
1404 test(result, Immediate(kDoubleAlignmentMask));
1405 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001407 cmp(result, Operand::StaticVariable(allocation_limit));
1408 j(above_equal, gc_required);
1409 }
1410 mov(Operand(result, 0),
1411 Immediate(isolate()->factory()->one_pointer_filler_map()));
1412 add(result, Immediate(kDoubleSize / 2));
1413 bind(&aligned);
1414 }
1415
1416 // Calculate new top and bail out if space is exhausted.
1417 Register top_reg = result_end.is_valid() ? result_end : result;
1418 if (!top_reg.is(result)) {
1419 mov(top_reg, result);
1420 }
1421 add(top_reg, Immediate(object_size));
1422 j(carry, gc_required);
1423 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1424 j(above, gc_required);
1425
1426 // Update allocation top.
1427 UpdateAllocationTopHelper(top_reg, scratch, flags);
1428
1429 // Tag result if requested.
1430 bool tag_result = (flags & TAG_OBJECT) != 0;
1431 if (top_reg.is(result)) {
1432 if (tag_result) {
1433 sub(result, Immediate(object_size - kHeapObjectTag));
1434 } else {
1435 sub(result, Immediate(object_size));
1436 }
1437 } else if (tag_result) {
1438 DCHECK(kHeapObjectTag == 1);
1439 inc(result);
1440 }
1441}
1442
1443
1444void MacroAssembler::Allocate(int header_size,
1445 ScaleFactor element_size,
1446 Register element_count,
1447 RegisterValueType element_count_type,
1448 Register result,
1449 Register result_end,
1450 Register scratch,
1451 Label* gc_required,
1452 AllocationFlags flags) {
1453 DCHECK((flags & SIZE_IN_WORDS) == 0);
1454 if (!FLAG_inline_new) {
1455 if (emit_debug_code()) {
1456 // Trash the registers to simulate an allocation failure.
1457 mov(result, Immediate(0x7091));
1458 mov(result_end, Immediate(0x7191));
1459 if (scratch.is_valid()) {
1460 mov(scratch, Immediate(0x7291));
1461 }
1462 // Register element_count is not modified by the function.
1463 }
1464 jmp(gc_required);
1465 return;
1466 }
1467 DCHECK(!result.is(result_end));
1468
1469 // Load address of new object into result.
1470 LoadAllocationTopHelper(result, scratch, flags);
1471
1472 ExternalReference allocation_limit =
1473 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1474
1475 // Align the next allocation. Storing the filler map without checking top is
1476 // safe in new-space because the limit of the heap is aligned there.
1477 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001478 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1479 Label aligned;
1480 test(result, Immediate(kDoubleAlignmentMask));
1481 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001483 cmp(result, Operand::StaticVariable(allocation_limit));
1484 j(above_equal, gc_required);
1485 }
1486 mov(Operand(result, 0),
1487 Immediate(isolate()->factory()->one_pointer_filler_map()));
1488 add(result, Immediate(kDoubleSize / 2));
1489 bind(&aligned);
1490 }
1491
1492 // Calculate new top and bail out if space is exhausted.
1493 // We assume that element_count*element_size + header_size does not
1494 // overflow.
1495 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1496 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1497 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1498 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1499 DCHECK(element_size >= times_2);
1500 DCHECK(kSmiTagSize == 1);
1501 element_size = static_cast<ScaleFactor>(element_size - 1);
1502 } else {
1503 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1504 }
1505 lea(result_end, Operand(element_count, element_size, header_size));
1506 add(result_end, result);
1507 j(carry, gc_required);
1508 cmp(result_end, Operand::StaticVariable(allocation_limit));
1509 j(above, gc_required);
1510
1511 if ((flags & TAG_OBJECT) != 0) {
1512 DCHECK(kHeapObjectTag == 1);
1513 inc(result);
1514 }
1515
1516 // Update allocation top.
1517 UpdateAllocationTopHelper(result_end, scratch, flags);
1518}
1519
1520
1521void MacroAssembler::Allocate(Register object_size,
1522 Register result,
1523 Register result_end,
1524 Register scratch,
1525 Label* gc_required,
1526 AllocationFlags flags) {
1527 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1528 if (!FLAG_inline_new) {
1529 if (emit_debug_code()) {
1530 // Trash the registers to simulate an allocation failure.
1531 mov(result, Immediate(0x7091));
1532 mov(result_end, Immediate(0x7191));
1533 if (scratch.is_valid()) {
1534 mov(scratch, Immediate(0x7291));
1535 }
1536 // object_size is left unchanged by this function.
1537 }
1538 jmp(gc_required);
1539 return;
1540 }
1541 DCHECK(!result.is(result_end));
1542
1543 // Load address of new object into result.
1544 LoadAllocationTopHelper(result, scratch, flags);
1545
1546 ExternalReference allocation_limit =
1547 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1548
1549 // Align the next allocation. Storing the filler map without checking top is
1550 // safe in new-space because the limit of the heap is aligned there.
1551 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001552 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1553 Label aligned;
1554 test(result, Immediate(kDoubleAlignmentMask));
1555 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001556 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557 cmp(result, Operand::StaticVariable(allocation_limit));
1558 j(above_equal, gc_required);
1559 }
1560 mov(Operand(result, 0),
1561 Immediate(isolate()->factory()->one_pointer_filler_map()));
1562 add(result, Immediate(kDoubleSize / 2));
1563 bind(&aligned);
1564 }
1565
1566 // Calculate new top and bail out if space is exhausted.
1567 if (!object_size.is(result_end)) {
1568 mov(result_end, object_size);
1569 }
1570 add(result_end, result);
1571 j(carry, gc_required);
1572 cmp(result_end, Operand::StaticVariable(allocation_limit));
1573 j(above, gc_required);
1574
1575 // Tag result if requested.
1576 if ((flags & TAG_OBJECT) != 0) {
1577 DCHECK(kHeapObjectTag == 1);
1578 inc(result);
1579 }
1580
1581 // Update allocation top.
1582 UpdateAllocationTopHelper(result_end, scratch, flags);
1583}
1584
1585
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001586void MacroAssembler::AllocateHeapNumber(Register result,
1587 Register scratch1,
1588 Register scratch2,
1589 Label* gc_required,
1590 MutableMode mode) {
1591 // Allocate heap number in new space.
1592 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1593 TAG_OBJECT);
1594
1595 Handle<Map> map = mode == MUTABLE
1596 ? isolate()->factory()->mutable_heap_number_map()
1597 : isolate()->factory()->heap_number_map();
1598
1599 // Set the map.
1600 mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1601}
1602
1603
1604void MacroAssembler::AllocateTwoByteString(Register result,
1605 Register length,
1606 Register scratch1,
1607 Register scratch2,
1608 Register scratch3,
1609 Label* gc_required) {
1610 // Calculate the number of bytes needed for the characters in the string while
1611 // observing object alignment.
1612 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1613 DCHECK(kShortSize == 2);
1614 // scratch1 = length * 2 + kObjectAlignmentMask.
1615 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1616 and_(scratch1, Immediate(~kObjectAlignmentMask));
1617
1618 // Allocate two byte string in new space.
1619 Allocate(SeqTwoByteString::kHeaderSize,
1620 times_1,
1621 scratch1,
1622 REGISTER_VALUE_IS_INT32,
1623 result,
1624 scratch2,
1625 scratch3,
1626 gc_required,
1627 TAG_OBJECT);
1628
1629 // Set the map, length and hash field.
1630 mov(FieldOperand(result, HeapObject::kMapOffset),
1631 Immediate(isolate()->factory()->string_map()));
1632 mov(scratch1, length);
1633 SmiTag(scratch1);
1634 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1635 mov(FieldOperand(result, String::kHashFieldOffset),
1636 Immediate(String::kEmptyHashField));
1637}
1638
1639
1640void MacroAssembler::AllocateOneByteString(Register result, Register length,
1641 Register scratch1, Register scratch2,
1642 Register scratch3,
1643 Label* gc_required) {
1644 // Calculate the number of bytes needed for the characters in the string while
1645 // observing object alignment.
1646 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1647 mov(scratch1, length);
1648 DCHECK(kCharSize == 1);
1649 add(scratch1, Immediate(kObjectAlignmentMask));
1650 and_(scratch1, Immediate(~kObjectAlignmentMask));
1651
1652 // Allocate one-byte string in new space.
1653 Allocate(SeqOneByteString::kHeaderSize,
1654 times_1,
1655 scratch1,
1656 REGISTER_VALUE_IS_INT32,
1657 result,
1658 scratch2,
1659 scratch3,
1660 gc_required,
1661 TAG_OBJECT);
1662
1663 // Set the map, length and hash field.
1664 mov(FieldOperand(result, HeapObject::kMapOffset),
1665 Immediate(isolate()->factory()->one_byte_string_map()));
1666 mov(scratch1, length);
1667 SmiTag(scratch1);
1668 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1669 mov(FieldOperand(result, String::kHashFieldOffset),
1670 Immediate(String::kEmptyHashField));
1671}
1672
1673
1674void MacroAssembler::AllocateOneByteString(Register result, int length,
1675 Register scratch1, Register scratch2,
1676 Label* gc_required) {
1677 DCHECK(length > 0);
1678
1679 // Allocate one-byte string in new space.
1680 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1681 gc_required, TAG_OBJECT);
1682
1683 // Set the map, length and hash field.
1684 mov(FieldOperand(result, HeapObject::kMapOffset),
1685 Immediate(isolate()->factory()->one_byte_string_map()));
1686 mov(FieldOperand(result, String::kLengthOffset),
1687 Immediate(Smi::FromInt(length)));
1688 mov(FieldOperand(result, String::kHashFieldOffset),
1689 Immediate(String::kEmptyHashField));
1690}
1691
1692
1693void MacroAssembler::AllocateTwoByteConsString(Register result,
1694 Register scratch1,
1695 Register scratch2,
1696 Label* gc_required) {
1697 // Allocate heap number in new space.
1698 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1699 TAG_OBJECT);
1700
1701 // Set the map. The other fields are left uninitialized.
1702 mov(FieldOperand(result, HeapObject::kMapOffset),
1703 Immediate(isolate()->factory()->cons_string_map()));
1704}
1705
1706
1707void MacroAssembler::AllocateOneByteConsString(Register result,
1708 Register scratch1,
1709 Register scratch2,
1710 Label* gc_required) {
1711 Allocate(ConsString::kSize,
1712 result,
1713 scratch1,
1714 scratch2,
1715 gc_required,
1716 TAG_OBJECT);
1717
1718 // Set the map. The other fields are left uninitialized.
1719 mov(FieldOperand(result, HeapObject::kMapOffset),
1720 Immediate(isolate()->factory()->cons_one_byte_string_map()));
1721}
1722
1723
1724void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1725 Register scratch1,
1726 Register scratch2,
1727 Label* gc_required) {
1728 // Allocate heap number in new space.
1729 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1730 TAG_OBJECT);
1731
1732 // Set the map. The other fields are left uninitialized.
1733 mov(FieldOperand(result, HeapObject::kMapOffset),
1734 Immediate(isolate()->factory()->sliced_string_map()));
1735}
1736
1737
1738void MacroAssembler::AllocateOneByteSlicedString(Register result,
1739 Register scratch1,
1740 Register scratch2,
1741 Label* gc_required) {
1742 // Allocate heap number in new space.
1743 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1744 TAG_OBJECT);
1745
1746 // Set the map. The other fields are left uninitialized.
1747 mov(FieldOperand(result, HeapObject::kMapOffset),
1748 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1749}
1750
1751
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001752void MacroAssembler::AllocateJSValue(Register result, Register constructor,
1753 Register value, Register scratch,
1754 Label* gc_required) {
1755 DCHECK(!result.is(constructor));
1756 DCHECK(!result.is(scratch));
1757 DCHECK(!result.is(value));
1758
1759 // Allocate JSValue in new space.
1760 Allocate(JSValue::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
1761
1762 // Initialize the JSValue.
1763 LoadGlobalFunctionInitialMap(constructor, scratch);
1764 mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
1765 LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
1766 mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
1767 mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
1768 mov(FieldOperand(result, JSValue::kValueOffset), value);
1769 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1770}
1771
1772
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001773// Copy memory, byte-by-byte, from source to destination. Not optimized for
1774// long or aligned copies. The contents of scratch and length are destroyed.
1775// Source and destination are incremented by length.
1776// Many variants of movsb, loop unrolling, word moves, and indexed operands
1777// have been tried here already, and this is fastest.
1778// A simpler loop is faster on small copies, but 30% slower on large ones.
1779// The cld() instruction must have been emitted, to set the direction flag(),
1780// before calling this function.
1781void MacroAssembler::CopyBytes(Register source,
1782 Register destination,
1783 Register length,
1784 Register scratch) {
1785 Label short_loop, len4, len8, len12, done, short_string;
1786 DCHECK(source.is(esi));
1787 DCHECK(destination.is(edi));
1788 DCHECK(length.is(ecx));
1789 cmp(length, Immediate(4));
1790 j(below, &short_string, Label::kNear);
1791
1792 // Because source is 4-byte aligned in our uses of this function,
1793 // we keep source aligned for the rep_movs call by copying the odd bytes
1794 // at the end of the ranges.
1795 mov(scratch, Operand(source, length, times_1, -4));
1796 mov(Operand(destination, length, times_1, -4), scratch);
1797
1798 cmp(length, Immediate(8));
1799 j(below_equal, &len4, Label::kNear);
1800 cmp(length, Immediate(12));
1801 j(below_equal, &len8, Label::kNear);
1802 cmp(length, Immediate(16));
1803 j(below_equal, &len12, Label::kNear);
1804
1805 mov(scratch, ecx);
1806 shr(ecx, 2);
1807 rep_movs();
1808 and_(scratch, Immediate(0x3));
1809 add(destination, scratch);
1810 jmp(&done, Label::kNear);
1811
1812 bind(&len12);
1813 mov(scratch, Operand(source, 8));
1814 mov(Operand(destination, 8), scratch);
1815 bind(&len8);
1816 mov(scratch, Operand(source, 4));
1817 mov(Operand(destination, 4), scratch);
1818 bind(&len4);
1819 mov(scratch, Operand(source, 0));
1820 mov(Operand(destination, 0), scratch);
1821 add(destination, length);
1822 jmp(&done, Label::kNear);
1823
1824 bind(&short_string);
1825 test(length, length);
1826 j(zero, &done, Label::kNear);
1827
1828 bind(&short_loop);
1829 mov_b(scratch, Operand(source, 0));
1830 mov_b(Operand(destination, 0), scratch);
1831 inc(source);
1832 inc(destination);
1833 dec(length);
1834 j(not_zero, &short_loop);
1835
1836 bind(&done);
1837}
1838
1839
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001840void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
1841 Register end_address,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001842 Register filler) {
1843 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001844 jmp(&entry, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001845 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846 mov(Operand(current_address, 0), filler);
1847 add(current_address, Immediate(kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001848 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001849 cmp(current_address, end_address);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001850 j(below, &loop, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001851}
1852
1853
1854void MacroAssembler::BooleanBitTest(Register object,
1855 int field_offset,
1856 int bit_index) {
1857 bit_index += kSmiTagSize + kSmiShiftSize;
1858 DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1859 int byte_index = bit_index / kBitsPerByte;
1860 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1861 test_b(FieldOperand(object, field_offset + byte_index),
1862 static_cast<byte>(1 << byte_bit_index));
1863}
1864
1865
1866
1867void MacroAssembler::NegativeZeroTest(Register result,
1868 Register op,
1869 Label* then_label) {
1870 Label ok;
1871 test(result, result);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001872 j(not_zero, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001873 test(op, op);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001874 j(sign, then_label, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001875 bind(&ok);
1876}
1877
1878
1879void MacroAssembler::NegativeZeroTest(Register result,
1880 Register op1,
1881 Register op2,
1882 Register scratch,
1883 Label* then_label) {
1884 Label ok;
1885 test(result, result);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001886 j(not_zero, &ok, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 mov(scratch, op1);
1888 or_(scratch, op2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001889 j(sign, then_label, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890 bind(&ok);
1891}
1892
1893
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001894void MacroAssembler::GetMapConstructor(Register result, Register map,
1895 Register temp) {
1896 Label done, loop;
1897 mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
1898 bind(&loop);
1899 JumpIfSmi(result, &done, Label::kNear);
1900 CmpObjectType(result, MAP_TYPE, temp);
1901 j(not_equal, &done, Label::kNear);
1902 mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
1903 jmp(&loop);
1904 bind(&done);
1905}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001907
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
1909 Register scratch, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001910 // Get the prototype or initial map from the function.
1911 mov(result,
1912 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1913
1914 // If the prototype or initial map is the hole, don't return it and
1915 // simply miss the cache instead. This will allow us to allocate a
1916 // prototype object on-demand in the runtime system.
1917 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1918 j(equal, miss);
1919
1920 // If the function does not have an initial map, we're done.
1921 Label done;
1922 CmpObjectType(result, MAP_TYPE, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001923 j(not_equal, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924
1925 // Get the prototype from the initial map.
1926 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1927
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001928 // All done.
1929 bind(&done);
1930}
1931
1932
1933void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1934 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1935 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1936}
1937
1938
1939void MacroAssembler::TailCallStub(CodeStub* stub) {
1940 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1941}
1942
1943
1944void MacroAssembler::StubReturn(int argc) {
1945 DCHECK(argc >= 1 && generating_stub());
1946 ret((argc - 1) * kPointerSize);
1947}
1948
1949
1950bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1951 return has_frame_ || !stub->SometimesSetsUpAFrame();
1952}
1953
1954
1955void MacroAssembler::IndexFromHash(Register hash, Register index) {
1956 // The assert checks that the constants for the maximum number of digits
1957 // for an array index cached in the hash field and the number of bits
1958 // reserved for it does not conflict.
1959 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
1960 (1 << String::kArrayIndexValueBits));
1961 if (!index.is(hash)) {
1962 mov(index, hash);
1963 }
1964 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
1965}
1966
1967
1968void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
1969 SaveFPRegsMode save_doubles) {
1970 // If the expected number of arguments of the runtime function is
1971 // constant, we check that the actual number of arguments match the
1972 // expectation.
1973 CHECK(f->nargs < 0 || f->nargs == num_arguments);
1974
1975 // TODO(1236192): Most runtime routines don't need the number of
1976 // arguments passed in because it is constant. At some point we
1977 // should remove this need and make the runtime routine entry code
1978 // smarter.
1979 Move(eax, Immediate(num_arguments));
1980 mov(ebx, Immediate(ExternalReference(f, isolate())));
1981 CEntryStub ces(isolate(), 1, save_doubles);
1982 CallStub(&ces);
1983}
1984
1985
1986void MacroAssembler::CallExternalReference(ExternalReference ref,
1987 int num_arguments) {
1988 mov(eax, Immediate(num_arguments));
1989 mov(ebx, Immediate(ref));
1990
1991 CEntryStub stub(isolate(), 1);
1992 CallStub(&stub);
1993}
1994
1995
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
1997 // ----------- S t a t e -------------
1998 // -- esp[0] : return address
1999 // -- esp[8] : argument num_arguments - 1
2000 // ...
2001 // -- esp[8 * num_arguments] : argument 0 (receiver)
2002 //
2003 // For runtime functions with variable arguments:
2004 // -- eax : number of arguments
2005 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002007 const Runtime::Function* function = Runtime::FunctionForId(fid);
2008 DCHECK_EQ(1, function->result_size);
2009 if (function->nargs >= 0) {
2010 // TODO(1236192): Most runtime routines don't need the number of
2011 // arguments passed in because it is constant. At some point we
2012 // should remove this need and make the runtime routine entry code
2013 // smarter.
2014 mov(eax, Immediate(function->nargs));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002015 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002016 JumpToExternalReference(ExternalReference(fid, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002017}
2018
2019
2020void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2021 // Set the entry point and jump to the C entry runtime stub.
2022 mov(ebx, Immediate(ext));
2023 CEntryStub ces(isolate(), 1);
2024 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2025}
2026
2027
2028void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2029 const ParameterCount& actual,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002030 Label* done,
2031 bool* definitely_mismatches,
2032 InvokeFlag flag,
2033 Label::Distance done_near,
2034 const CallWrapper& call_wrapper) {
2035 bool definitely_matches = false;
2036 *definitely_mismatches = false;
2037 Label invoke;
2038 if (expected.is_immediate()) {
2039 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002040 mov(eax, actual.immediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041 if (expected.immediate() == actual.immediate()) {
2042 definitely_matches = true;
2043 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002044 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2045 if (expected.immediate() == sentinel) {
2046 // Don't worry about adapting arguments for builtins that
2047 // don't want that done. Skip adaption code by making it look
2048 // like we have a match between expected and actual number of
2049 // arguments.
2050 definitely_matches = true;
2051 } else {
2052 *definitely_mismatches = true;
2053 mov(ebx, expected.immediate());
2054 }
2055 }
2056 } else {
2057 if (actual.is_immediate()) {
2058 // Expected is in register, actual is immediate. This is the
2059 // case when we invoke function values without going through the
2060 // IC mechanism.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002061 mov(eax, actual.immediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002062 cmp(expected.reg(), actual.immediate());
2063 j(equal, &invoke);
2064 DCHECK(expected.reg().is(ebx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065 } else if (!expected.reg().is(actual.reg())) {
2066 // Both expected and actual are in (different) registers. This
2067 // is the case when we invoke functions using call and apply.
2068 cmp(expected.reg(), actual.reg());
2069 j(equal, &invoke);
2070 DCHECK(actual.reg().is(eax));
2071 DCHECK(expected.reg().is(ebx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 } else {
2073 Move(eax, actual.reg());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002074 }
2075 }
2076
2077 if (!definitely_matches) {
2078 Handle<Code> adaptor =
2079 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002080 if (flag == CALL_FUNCTION) {
2081 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2082 call(adaptor, RelocInfo::CODE_TARGET);
2083 call_wrapper.AfterCall();
2084 if (!*definitely_mismatches) {
2085 jmp(done, done_near);
2086 }
2087 } else {
2088 jmp(adaptor, RelocInfo::CODE_TARGET);
2089 }
2090 bind(&invoke);
2091 }
2092}
2093
2094
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002095void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
2096 const ParameterCount& expected,
2097 const ParameterCount& actual) {
2098 Label skip_flooding;
2099 ExternalReference step_in_enabled =
2100 ExternalReference::debug_step_in_enabled_address(isolate());
2101 cmpb(Operand::StaticVariable(step_in_enabled), 0);
2102 j(equal, &skip_flooding);
2103 {
2104 FrameScope frame(this,
2105 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2106 if (expected.is_reg()) {
2107 SmiTag(expected.reg());
2108 Push(expected.reg());
2109 }
2110 if (actual.is_reg()) {
2111 SmiTag(actual.reg());
2112 Push(actual.reg());
2113 }
2114 if (new_target.is_valid()) {
2115 Push(new_target);
2116 }
2117 Push(fun);
2118 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002119 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002120 Pop(fun);
2121 if (new_target.is_valid()) {
2122 Pop(new_target);
2123 }
2124 if (actual.is_reg()) {
2125 Pop(actual.reg());
2126 SmiUntag(actual.reg());
2127 }
2128 if (expected.is_reg()) {
2129 Pop(expected.reg());
2130 SmiUntag(expected.reg());
2131 }
2132 }
2133 bind(&skip_flooding);
2134}
2135
2136
2137void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2138 const ParameterCount& expected,
2139 const ParameterCount& actual,
2140 InvokeFlag flag,
2141 const CallWrapper& call_wrapper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002142 // You can't call a function without a valid frame.
2143 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002144 DCHECK(function.is(edi));
2145 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
2146
2147 if (call_wrapper.NeedsDebugStepCheck()) {
2148 FloodFunctionIfStepping(function, new_target, expected, actual);
2149 }
2150
2151 // Clear the new.target register if not given.
2152 if (!new_target.is_valid()) {
2153 mov(edx, isolate()->factory()->undefined_value());
2154 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002155
2156 Label done;
2157 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002158 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2159 Label::kNear, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002160 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002161 // We call indirectly through the code field in the function to
2162 // allow recompilation to take effect without changing any of the
2163 // call sites.
2164 Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002165 if (flag == CALL_FUNCTION) {
2166 call_wrapper.BeforeCall(CallSize(code));
2167 call(code);
2168 call_wrapper.AfterCall();
2169 } else {
2170 DCHECK(flag == JUMP_FUNCTION);
2171 jmp(code);
2172 }
2173 bind(&done);
2174 }
2175}
2176
2177
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002178void MacroAssembler::InvokeFunction(Register fun, Register new_target,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002179 const ParameterCount& actual,
2180 InvokeFlag flag,
2181 const CallWrapper& call_wrapper) {
2182 // You can't call a function without a valid frame.
2183 DCHECK(flag == JUMP_FUNCTION || has_frame());
2184
2185 DCHECK(fun.is(edi));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002186 mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002187 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002188 mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002189 SmiUntag(ebx);
2190
2191 ParameterCount expected(ebx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002192 InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002193}
2194
2195
2196void MacroAssembler::InvokeFunction(Register fun,
2197 const ParameterCount& expected,
2198 const ParameterCount& actual,
2199 InvokeFlag flag,
2200 const CallWrapper& call_wrapper) {
2201 // You can't call a function without a valid frame.
2202 DCHECK(flag == JUMP_FUNCTION || has_frame());
2203
2204 DCHECK(fun.is(edi));
2205 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002207 InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208}
2209
2210
2211void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2212 const ParameterCount& expected,
2213 const ParameterCount& actual,
2214 InvokeFlag flag,
2215 const CallWrapper& call_wrapper) {
2216 LoadHeapObject(edi, function);
2217 InvokeFunction(edi, expected, actual, flag, call_wrapper);
2218}
2219
2220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002221void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2222 if (context_chain_length > 0) {
2223 // Move up the chain of contexts to the context containing the slot.
2224 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2225 for (int i = 1; i < context_chain_length; i++) {
2226 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2227 }
2228 } else {
2229 // Slot is in the current function context. Move it into the
2230 // destination register in case we store into it (the write barrier
2231 // cannot be allowed to destroy the context in esi).
2232 mov(dst, esi);
2233 }
2234
2235 // We should not have found a with context by walking the context chain
2236 // (i.e., the static scope chain and runtime context chain do not agree).
2237 // A variable occurring in such a scope should have slot type LOOKUP and
2238 // not CONTEXT.
2239 if (emit_debug_code()) {
2240 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2241 isolate()->factory()->with_context_map());
2242 Check(not_equal, kVariableResolvedToWithContext);
2243 }
2244}
2245
2246
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002247void MacroAssembler::LoadGlobalProxy(Register dst) {
2248 mov(dst, NativeContextOperand());
2249 mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
2250}
2251
2252
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002253void MacroAssembler::LoadTransitionedArrayMapConditional(
2254 ElementsKind expected_kind,
2255 ElementsKind transitioned_kind,
2256 Register map_in_out,
2257 Register scratch,
2258 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 DCHECK(IsFastElementsKind(expected_kind));
2260 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002261
2262 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002263 mov(scratch, NativeContextOperand());
2264 cmp(map_in_out,
2265 ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002266 j(not_equal, no_map_match);
2267
2268 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 mov(map_in_out,
2270 ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002271}
2272
2273
2274void MacroAssembler::LoadGlobalFunction(int index, Register function) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002275 // Load the native context from the current context.
2276 mov(function, NativeContextOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002277 // Load the function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002278 mov(function, ContextOperand(function, index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002279}
2280
2281
2282void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2283 Register map) {
2284 // Load the initial map. The global functions all have initial maps.
2285 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2286 if (emit_debug_code()) {
2287 Label ok, fail;
2288 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2289 jmp(&ok);
2290 bind(&fail);
2291 Abort(kGlobalFunctionsMustHaveInitialMap);
2292 bind(&ok);
2293 }
2294}
2295
2296
2297// Store the value in register src in the safepoint register stack
2298// slot for register dst.
2299void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2300 mov(SafepointRegisterSlot(dst), src);
2301}
2302
2303
2304void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2305 mov(SafepointRegisterSlot(dst), src);
2306}
2307
2308
2309void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2310 mov(dst, SafepointRegisterSlot(src));
2311}
2312
2313
2314Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2315 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2316}
2317
2318
2319int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2320 // The registers are pushed starting with the lowest encoding,
2321 // which means that lowest encodings are furthest away from
2322 // the stack pointer.
2323 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2324 return kNumSafepointRegisters - reg_code - 1;
2325}
2326
2327
2328void MacroAssembler::LoadHeapObject(Register result,
2329 Handle<HeapObject> object) {
2330 AllowDeferredHandleDereference embedding_raw_address;
2331 if (isolate()->heap()->InNewSpace(*object)) {
2332 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2333 mov(result, Operand::ForCell(cell));
2334 } else {
2335 mov(result, object);
2336 }
2337}
2338
2339
2340void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2341 AllowDeferredHandleDereference using_raw_address;
2342 if (isolate()->heap()->InNewSpace(*object)) {
2343 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2344 cmp(reg, Operand::ForCell(cell));
2345 } else {
2346 cmp(reg, object);
2347 }
2348}
2349
2350
2351void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2352 AllowDeferredHandleDereference using_raw_address;
2353 if (isolate()->heap()->InNewSpace(*object)) {
2354 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2355 push(Operand::ForCell(cell));
2356 } else {
2357 Push(object);
2358 }
2359}
2360
2361
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002362void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2363 Register scratch) {
2364 mov(scratch, cell);
2365 cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2366}
2367
2368
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002369void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002370 mov(value, cell);
2371 mov(value, FieldOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002372}
2373
2374
2375void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2376 Label* miss) {
2377 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002378 JumpIfSmi(value, miss);
2379}
2380
2381
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002382void MacroAssembler::Ret() {
2383 ret(0);
2384}
2385
2386
2387void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2388 if (is_uint16(bytes_dropped)) {
2389 ret(bytes_dropped);
2390 } else {
2391 pop(scratch);
2392 add(esp, Immediate(bytes_dropped));
2393 push(scratch);
2394 ret(0);
2395 }
2396}
2397
2398
2399void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2400 // Turn off the stack depth check when serializer is enabled to reduce the
2401 // code size.
2402 if (serializer_enabled()) return;
2403 // Make sure the floating point stack is either empty or has depth items.
2404 DCHECK(depth <= 7);
2405 // This is very expensive.
2406 DCHECK(FLAG_debug_code && FLAG_enable_slow_asserts);
2407
2408 // The top-of-stack (tos) is 7 if there is one item pushed.
2409 int tos = (8 - depth) % 8;
2410 const int kTopMask = 0x3800;
2411 push(eax);
2412 fwait();
2413 fnstsw_ax();
2414 and_(eax, kTopMask);
2415 shr(eax, 11);
2416 cmp(eax, Immediate(tos));
2417 Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
2418 fnclex();
2419 pop(eax);
2420}
2421
2422
2423void MacroAssembler::Drop(int stack_elements) {
2424 if (stack_elements > 0) {
2425 add(esp, Immediate(stack_elements * kPointerSize));
2426 }
2427}
2428
2429
2430void MacroAssembler::Move(Register dst, Register src) {
2431 if (!dst.is(src)) {
2432 mov(dst, src);
2433 }
2434}
2435
2436
2437void MacroAssembler::Move(Register dst, const Immediate& x) {
2438 if (x.is_zero()) {
2439 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2440 } else {
2441 mov(dst, x);
2442 }
2443}
2444
2445
2446void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2447 mov(dst, x);
2448}
2449
2450
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002451void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
2452 // TODO(intel): Add support for LZCNT (with ABM/BMI1).
2453 Label not_zero_src;
2454 bsr(dst, src);
2455 j(not_zero, &not_zero_src, Label::kNear);
2456 Move(dst, Immediate(63)); // 63^31 == 32
2457 bind(&not_zero_src);
2458 xor_(dst, Immediate(31)); // for x in [0..31], 31^x == 31-x.
2459}
2460
2461
2462void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
2463 // TODO(intel): Add support for TZCNT (with ABM/BMI1).
2464 Label not_zero_src;
2465 bsf(dst, src);
2466 j(not_zero, &not_zero_src, Label::kNear);
2467 Move(dst, Immediate(32)); // The result of tzcnt is 32 if src = 0.
2468 bind(&not_zero_src);
2469}
2470
2471
2472void MacroAssembler::Popcnt(Register dst, const Operand& src) {
2473 // TODO(intel): Add support for POPCNT (with POPCNT)
2474 // if (CpuFeatures::IsSupported(POPCNT)) {
2475 // CpuFeatureScope scope(this, POPCNT);
2476 // popcnt(dst, src);
2477 // return;
2478 // }
2479 UNREACHABLE();
2480}
2481
2482
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002483void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2484 if (FLAG_native_code_counters && counter->Enabled()) {
2485 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2486 }
2487}
2488
2489
2490void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2491 DCHECK(value > 0);
2492 if (FLAG_native_code_counters && counter->Enabled()) {
2493 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2494 if (value == 1) {
2495 inc(operand);
2496 } else {
2497 add(operand, Immediate(value));
2498 }
2499 }
2500}
2501
2502
2503void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2504 DCHECK(value > 0);
2505 if (FLAG_native_code_counters && counter->Enabled()) {
2506 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2507 if (value == 1) {
2508 dec(operand);
2509 } else {
2510 sub(operand, Immediate(value));
2511 }
2512 }
2513}
2514
2515
2516void MacroAssembler::IncrementCounter(Condition cc,
2517 StatsCounter* counter,
2518 int value) {
2519 DCHECK(value > 0);
2520 if (FLAG_native_code_counters && counter->Enabled()) {
2521 Label skip;
2522 j(NegateCondition(cc), &skip);
2523 pushfd();
2524 IncrementCounter(counter, value);
2525 popfd();
2526 bind(&skip);
2527 }
2528}
2529
2530
2531void MacroAssembler::DecrementCounter(Condition cc,
2532 StatsCounter* counter,
2533 int value) {
2534 DCHECK(value > 0);
2535 if (FLAG_native_code_counters && counter->Enabled()) {
2536 Label skip;
2537 j(NegateCondition(cc), &skip);
2538 pushfd();
2539 DecrementCounter(counter, value);
2540 popfd();
2541 bind(&skip);
2542 }
2543}
2544
2545
2546void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2547 if (emit_debug_code()) Check(cc, reason);
2548}
2549
2550
2551void MacroAssembler::AssertFastElements(Register elements) {
2552 if (emit_debug_code()) {
2553 Factory* factory = isolate()->factory();
2554 Label ok;
2555 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2556 Immediate(factory->fixed_array_map()));
2557 j(equal, &ok);
2558 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2559 Immediate(factory->fixed_double_array_map()));
2560 j(equal, &ok);
2561 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2562 Immediate(factory->fixed_cow_array_map()));
2563 j(equal, &ok);
2564 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2565 bind(&ok);
2566 }
2567}
2568
2569
2570void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2571 Label L;
2572 j(cc, &L);
2573 Abort(reason);
2574 // will not return here
2575 bind(&L);
2576}
2577
2578
2579void MacroAssembler::CheckStackAlignment() {
2580 int frame_alignment = base::OS::ActivationFrameAlignment();
2581 int frame_alignment_mask = frame_alignment - 1;
2582 if (frame_alignment > kPointerSize) {
2583 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2584 Label alignment_as_expected;
2585 test(esp, Immediate(frame_alignment_mask));
2586 j(zero, &alignment_as_expected);
2587 // Abort if stack is not aligned.
2588 int3();
2589 bind(&alignment_as_expected);
2590 }
2591}
2592
2593
2594void MacroAssembler::Abort(BailoutReason reason) {
2595#ifdef DEBUG
2596 const char* msg = GetBailoutReason(reason);
2597 if (msg != NULL) {
2598 RecordComment("Abort message: ");
2599 RecordComment(msg);
2600 }
2601
2602 if (FLAG_trap_on_abort) {
2603 int3();
2604 return;
2605 }
2606#endif
2607
2608 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2609 // Disable stub call restrictions to always allow calls to abort.
2610 if (!has_frame_) {
2611 // We don't actually want to generate a pile of code for this, so just
2612 // claim there is a stack frame, without generating one.
2613 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002614 CallRuntime(Runtime::kAbort);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002615 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002616 CallRuntime(Runtime::kAbort);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002617 }
2618 // will not return here
2619 int3();
2620}
2621
2622
2623void MacroAssembler::LoadInstanceDescriptors(Register map,
2624 Register descriptors) {
2625 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2626}
2627
2628
2629void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2630 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2631 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2632}
2633
2634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002635void MacroAssembler::LoadAccessor(Register dst, Register holder,
2636 int accessor_index,
2637 AccessorComponent accessor) {
2638 mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
2639 LoadInstanceDescriptors(dst, dst);
2640 mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
2641 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
2642 : AccessorPair::kSetterOffset;
2643 mov(dst, FieldOperand(dst, offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002644}
2645
2646
2647void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2648 Register instance_type, Register scratch, Label* failure) {
2649 if (!scratch.is(instance_type)) {
2650 mov(scratch, instance_type);
2651 }
2652 and_(scratch,
2653 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2654 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2655 j(not_equal, failure);
2656}
2657
2658
2659void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2660 Register object2,
2661 Register scratch1,
2662 Register scratch2,
2663 Label* failure) {
2664 // Check that both objects are not smis.
2665 STATIC_ASSERT(kSmiTag == 0);
2666 mov(scratch1, object1);
2667 and_(scratch1, object2);
2668 JumpIfSmi(scratch1, failure);
2669
2670 // Load instance type for both strings.
2671 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2672 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2673 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2674 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2675
2676 // Check that both are flat one-byte strings.
2677 const int kFlatOneByteStringMask =
2678 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2679 const int kFlatOneByteStringTag =
2680 kStringTag | kOneByteStringTag | kSeqStringTag;
2681 // Interleave bits from both instance types and compare them in one check.
2682 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2683 and_(scratch1, kFlatOneByteStringMask);
2684 and_(scratch2, kFlatOneByteStringMask);
2685 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2686 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2687 j(not_equal, failure);
2688}
2689
2690
2691void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2692 Label* not_unique_name,
2693 Label::Distance distance) {
2694 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2695 Label succeed;
2696 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2697 j(zero, &succeed);
2698 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2699 j(not_equal, not_unique_name, distance);
2700
2701 bind(&succeed);
2702}
2703
2704
2705void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2706 Register index,
2707 Register value,
2708 uint32_t encoding_mask) {
2709 Label is_object;
2710 JumpIfNotSmi(string, &is_object, Label::kNear);
2711 Abort(kNonObject);
2712 bind(&is_object);
2713
2714 push(value);
2715 mov(value, FieldOperand(string, HeapObject::kMapOffset));
2716 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2717
2718 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2719 cmp(value, Immediate(encoding_mask));
2720 pop(value);
2721 Check(equal, kUnexpectedStringType);
2722
2723 // The index is assumed to be untagged coming in, tag it to compare with the
2724 // string length without using a temp register, it is restored at the end of
2725 // this function.
2726 SmiTag(index);
2727 Check(no_overflow, kIndexIsTooLarge);
2728
2729 cmp(index, FieldOperand(string, String::kLengthOffset));
2730 Check(less, kIndexIsTooLarge);
2731
2732 cmp(index, Immediate(Smi::FromInt(0)));
2733 Check(greater_equal, kIndexIsNegative);
2734
2735 // Restore the index
2736 SmiUntag(index);
2737}
2738
2739
2740void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2741 int frame_alignment = base::OS::ActivationFrameAlignment();
2742 if (frame_alignment != 0) {
2743 // Make stack end at alignment and make room for num_arguments words
2744 // and the original value of esp.
2745 mov(scratch, esp);
2746 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2747 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2748 and_(esp, -frame_alignment);
2749 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2750 } else {
2751 sub(esp, Immediate(num_arguments * kPointerSize));
2752 }
2753}
2754
2755
2756void MacroAssembler::CallCFunction(ExternalReference function,
2757 int num_arguments) {
2758 // Trashing eax is ok as it will be the return value.
2759 mov(eax, Immediate(function));
2760 CallCFunction(eax, num_arguments);
2761}
2762
2763
2764void MacroAssembler::CallCFunction(Register function,
2765 int num_arguments) {
2766 DCHECK(has_frame());
2767 // Check stack alignment.
2768 if (emit_debug_code()) {
2769 CheckStackAlignment();
2770 }
2771
2772 call(function);
2773 if (base::OS::ActivationFrameAlignment() != 0) {
2774 mov(esp, Operand(esp, num_arguments * kPointerSize));
2775 } else {
2776 add(esp, Immediate(num_arguments * kPointerSize));
2777 }
2778}
2779
2780
2781#ifdef DEBUG
2782bool AreAliased(Register reg1,
2783 Register reg2,
2784 Register reg3,
2785 Register reg4,
2786 Register reg5,
2787 Register reg6,
2788 Register reg7,
2789 Register reg8) {
2790 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2791 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2792 reg7.is_valid() + reg8.is_valid();
2793
2794 RegList regs = 0;
2795 if (reg1.is_valid()) regs |= reg1.bit();
2796 if (reg2.is_valid()) regs |= reg2.bit();
2797 if (reg3.is_valid()) regs |= reg3.bit();
2798 if (reg4.is_valid()) regs |= reg4.bit();
2799 if (reg5.is_valid()) regs |= reg5.bit();
2800 if (reg6.is_valid()) regs |= reg6.bit();
2801 if (reg7.is_valid()) regs |= reg7.bit();
2802 if (reg8.is_valid()) regs |= reg8.bit();
2803 int n_of_non_aliasing_regs = NumRegs(regs);
2804
2805 return n_of_valid_regs != n_of_non_aliasing_regs;
2806}
2807#endif
2808
2809
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002810CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002811 : address_(address),
2812 size_(size),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002813 masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002814 // Create a new macro assembler pointing to the address of the code to patch.
2815 // The size is adjusted with kGap on order for the assembler to generate size
2816 // bytes of instructions without failing with buffer size constraints.
2817 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2818}
2819
2820
2821CodePatcher::~CodePatcher() {
2822 // Indicate that code has changed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002823 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002824
2825 // Check that the code was patched as expected.
2826 DCHECK(masm_.pc_ == address_ + size_);
2827 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2828}
2829
2830
2831void MacroAssembler::CheckPageFlag(
2832 Register object,
2833 Register scratch,
2834 int mask,
2835 Condition cc,
2836 Label* condition_met,
2837 Label::Distance condition_met_distance) {
2838 DCHECK(cc == zero || cc == not_zero);
2839 if (scratch.is(object)) {
2840 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2841 } else {
2842 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2843 and_(scratch, object);
2844 }
2845 if (mask < (1 << kBitsPerByte)) {
2846 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2847 static_cast<uint8_t>(mask));
2848 } else {
2849 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2850 }
2851 j(cc, condition_met, condition_met_distance);
2852}
2853
2854
2855void MacroAssembler::CheckPageFlagForMap(
2856 Handle<Map> map,
2857 int mask,
2858 Condition cc,
2859 Label* condition_met,
2860 Label::Distance condition_met_distance) {
2861 DCHECK(cc == zero || cc == not_zero);
2862 Page* page = Page::FromAddress(map->address());
2863 DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
2864 ExternalReference reference(ExternalReference::page_flags(page));
2865 // The inlined static address check of the page's flags relies
2866 // on maps never being compacted.
2867 DCHECK(!isolate()->heap()->mark_compact_collector()->
2868 IsOnEvacuationCandidate(*map));
2869 if (mask < (1 << kBitsPerByte)) {
2870 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2871 } else {
2872 test(Operand::StaticVariable(reference), Immediate(mask));
2873 }
2874 j(cc, condition_met, condition_met_distance);
2875}
2876
2877
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002878void MacroAssembler::JumpIfBlack(Register object,
2879 Register scratch0,
2880 Register scratch1,
2881 Label* on_black,
2882 Label::Distance on_black_near) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002883 HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
2884 1); // kBlackBitPattern.
2885 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002886}
2887
2888
2889void MacroAssembler::HasColor(Register object,
2890 Register bitmap_scratch,
2891 Register mask_scratch,
2892 Label* has_color,
2893 Label::Distance has_color_distance,
2894 int first_bit,
2895 int second_bit) {
2896 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2897
2898 GetMarkBits(object, bitmap_scratch, mask_scratch);
2899
2900 Label other_color, word_boundary;
2901 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2902 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2903 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2904 j(zero, &word_boundary, Label::kNear);
2905 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2906 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2907 jmp(&other_color, Label::kNear);
2908
2909 bind(&word_boundary);
2910 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2911
2912 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2913 bind(&other_color);
2914}
2915
2916
2917void MacroAssembler::GetMarkBits(Register addr_reg,
2918 Register bitmap_reg,
2919 Register mask_reg) {
2920 DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2921 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2922 and_(bitmap_reg, addr_reg);
2923 mov(ecx, addr_reg);
2924 int shift =
2925 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2926 shr(ecx, shift);
2927 and_(ecx,
2928 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2929
2930 add(bitmap_reg, ecx);
2931 mov(ecx, addr_reg);
2932 shr(ecx, kPointerSizeLog2);
2933 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2934 mov(mask_reg, Immediate(1));
2935 shl_cl(mask_reg);
2936}
2937
2938
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002939void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
2940 Register mask_scratch, Label* value_is_white,
2941 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002942 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2943 GetMarkBits(value, bitmap_scratch, mask_scratch);
2944
2945 // If the value is black or grey we don't need to do anything.
2946 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002947 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
2948 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002949 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2950
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002951 // Since both black and grey have a 1 in the first position and white does
2952 // not have a 1 there we only need to check one bit.
2953 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002954 j(zero, value_is_white, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002955}
2956
2957
2958void MacroAssembler::EnumLength(Register dst, Register map) {
2959 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
2960 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2961 and_(dst, Immediate(Map::EnumLengthBits::kMask));
2962 SmiTag(dst);
2963}
2964
2965
2966void MacroAssembler::CheckEnumCache(Label* call_runtime) {
2967 Label next, start;
2968 mov(ecx, eax);
2969
2970 // Check if the enum length field is properly initialized, indicating that
2971 // there is an enum cache.
2972 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2973
2974 EnumLength(edx, ebx);
2975 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
2976 j(equal, call_runtime);
2977
2978 jmp(&start);
2979
2980 bind(&next);
2981 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2982
2983 // For all objects but the receiver, check that the cache is empty.
2984 EnumLength(edx, ebx);
2985 cmp(edx, Immediate(Smi::FromInt(0)));
2986 j(not_equal, call_runtime);
2987
2988 bind(&start);
2989
2990 // Check that there are no elements. Register rcx contains the current JS
2991 // object we've reached through the prototype chain.
2992 Label no_elements;
2993 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
2994 cmp(ecx, isolate()->factory()->empty_fixed_array());
2995 j(equal, &no_elements);
2996
2997 // Second chance, the object may be using the empty slow element dictionary.
2998 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
2999 j(not_equal, call_runtime);
3000
3001 bind(&no_elements);
3002 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3003 cmp(ecx, isolate()->factory()->null_value());
3004 j(not_equal, &next);
3005}
3006
3007
3008void MacroAssembler::TestJSArrayForAllocationMemento(
3009 Register receiver_reg,
3010 Register scratch_reg,
3011 Label* no_memento_found) {
3012 ExternalReference new_space_start =
3013 ExternalReference::new_space_start(isolate());
3014 ExternalReference new_space_allocation_top =
3015 ExternalReference::new_space_allocation_top_address(isolate());
3016
3017 lea(scratch_reg, Operand(receiver_reg,
3018 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3019 cmp(scratch_reg, Immediate(new_space_start));
3020 j(less, no_memento_found);
3021 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3022 j(greater, no_memento_found);
3023 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3024 Immediate(isolate()->factory()->allocation_memento_map()));
3025}
3026
3027
3028void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3029 Register object,
3030 Register scratch0,
3031 Register scratch1,
3032 Label* found) {
3033 DCHECK(!scratch1.is(scratch0));
3034 Factory* factory = isolate()->factory();
3035 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003036 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003037
3038 // scratch contained elements pointer.
3039 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003040 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3041 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3042 cmp(current, Immediate(factory->null_value()));
3043 j(equal, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003044
3045 // Loop based on the map going up the prototype chain.
3046 bind(&loop_again);
3047 mov(current, FieldOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003048 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3049 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3050 CmpInstanceType(current, JS_OBJECT_TYPE);
3051 j(below, found);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003052 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3053 DecodeField<Map::ElementsKindBits>(scratch1);
3054 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3055 j(equal, found);
3056 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3057 cmp(current, Immediate(factory->null_value()));
3058 j(not_equal, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003059
3060 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003061}
3062
3063
3064void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3065 DCHECK(!dividend.is(eax));
3066 DCHECK(!dividend.is(edx));
3067 base::MagicNumbersForDivision<uint32_t> mag =
3068 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3069 mov(eax, Immediate(mag.multiplier));
3070 imul(dividend);
3071 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3072 if (divisor > 0 && neg) add(edx, dividend);
3073 if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3074 if (mag.shift > 0) sar(edx, mag.shift);
3075 mov(eax, dividend);
3076 shr(eax, 31);
3077 add(edx, eax);
3078}
3079
3080
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003081} // namespace internal
3082} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003083
3084#endif // V8_TARGET_ARCH_X87