blob: 008b2af63d93394a5ad4790e364ee757f8282a7f [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_X87
8
9#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
13#include "src/cpu-profiler.h"
14#include "src/debug.h"
15#include "src/isolate-inl.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000017#include "src/serialize.h"
18
19namespace v8 {
20namespace internal {
21
22// -------------------------------------------------------------------------
23// MacroAssembler implementation.
24
25MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
26 : Assembler(arg_isolate, buffer, size),
27 generating_stub_(false),
28 has_frame_(false) {
29 if (isolate() != NULL) {
30 // TODO(titzer): should we just use a null handle here instead?
31 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
32 isolate());
33 }
34}
35
36
37void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
38 DCHECK(!r.IsDouble());
39 if (r.IsInteger8()) {
40 movsx_b(dst, src);
41 } else if (r.IsUInteger8()) {
42 movzx_b(dst, src);
43 } else if (r.IsInteger16()) {
44 movsx_w(dst, src);
45 } else if (r.IsUInteger16()) {
46 movzx_w(dst, src);
47 } else {
48 mov(dst, src);
49 }
50}
51
52
53void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
54 DCHECK(!r.IsDouble());
55 if (r.IsInteger8() || r.IsUInteger8()) {
56 mov_b(dst, src);
57 } else if (r.IsInteger16() || r.IsUInteger16()) {
58 mov_w(dst, src);
59 } else {
60 if (r.IsHeapObject()) {
61 AssertNotSmi(src);
62 } else if (r.IsSmi()) {
63 AssertSmi(src);
64 }
65 mov(dst, src);
66 }
67}
68
69
70void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
71 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
72 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
73 mov(destination, value);
74 return;
75 }
76 ExternalReference roots_array_start =
77 ExternalReference::roots_array_start(isolate());
78 mov(destination, Immediate(index));
79 mov(destination, Operand::StaticArray(destination,
80 times_pointer_size,
81 roots_array_start));
82}
83
84
85void MacroAssembler::StoreRoot(Register source,
86 Register scratch,
87 Heap::RootListIndex index) {
88 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
89 ExternalReference roots_array_start =
90 ExternalReference::roots_array_start(isolate());
91 mov(scratch, Immediate(index));
92 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
93 source);
94}
95
96
97void MacroAssembler::CompareRoot(Register with,
98 Register scratch,
99 Heap::RootListIndex index) {
100 ExternalReference roots_array_start =
101 ExternalReference::roots_array_start(isolate());
102 mov(scratch, Immediate(index));
103 cmp(with, Operand::StaticArray(scratch,
104 times_pointer_size,
105 roots_array_start));
106}
107
108
109void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
110 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
111 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
112 cmp(with, value);
113}
114
115
116void MacroAssembler::CompareRoot(const Operand& with,
117 Heap::RootListIndex index) {
118 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
119 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
120 cmp(with, value);
121}
122
123
124void MacroAssembler::InNewSpace(
125 Register object,
126 Register scratch,
127 Condition cc,
128 Label* condition_met,
129 Label::Distance condition_met_distance) {
130 DCHECK(cc == equal || cc == not_equal);
131 if (scratch.is(object)) {
132 and_(scratch, Immediate(~Page::kPageAlignmentMask));
133 } else {
134 mov(scratch, Immediate(~Page::kPageAlignmentMask));
135 and_(scratch, object);
136 }
137 // Check that we can use a test_b.
138 DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139 DCHECK(MemoryChunk::IN_TO_SPACE < 8);
140 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141 | (1 << MemoryChunk::IN_TO_SPACE);
142 // If non-zero, the page belongs to new-space.
143 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144 static_cast<uint8_t>(mask));
145 j(cc, condition_met, condition_met_distance);
146}
147
148
149void MacroAssembler::RememberedSetHelper(
150 Register object, // Only used for debug checks.
151 Register addr, Register scratch, SaveFPRegsMode save_fp,
152 MacroAssembler::RememberedSetFinalAction and_then) {
153 Label done;
154 if (emit_debug_code()) {
155 Label ok;
156 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
157 int3();
158 bind(&ok);
159 }
160 // Load store buffer top.
161 ExternalReference store_buffer =
162 ExternalReference::store_buffer_top(isolate());
163 mov(scratch, Operand::StaticVariable(store_buffer));
164 // Store pointer to buffer.
165 mov(Operand(scratch, 0), addr);
166 // Increment buffer top.
167 add(scratch, Immediate(kPointerSize));
168 // Write back new top of buffer.
169 mov(Operand::StaticVariable(store_buffer), scratch);
170 // Call stub on end of buffer.
171 // Check for end of buffer.
172 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
173 if (and_then == kReturnAtEnd) {
174 Label buffer_overflowed;
175 j(not_equal, &buffer_overflowed, Label::kNear);
176 ret(0);
177 bind(&buffer_overflowed);
178 } else {
179 DCHECK(and_then == kFallThroughAtEnd);
180 j(equal, &done, Label::kNear);
181 }
182 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
183 CallStub(&store_buffer_overflow);
184 if (and_then == kReturnAtEnd) {
185 ret(0);
186 } else {
187 DCHECK(and_then == kFallThroughAtEnd);
188 bind(&done);
189 }
190}
191
192
193void MacroAssembler::ClampTOSToUint8(Register result_reg) {
194 Label done, conv_failure;
195 sub(esp, Immediate(kPointerSize));
196 fnclex();
197 fist_s(Operand(esp, 0));
198 pop(result_reg);
199 X87CheckIA();
200 j(equal, &conv_failure, Label::kNear);
201 test(result_reg, Immediate(0xFFFFFF00));
202 j(zero, &done, Label::kNear);
203 setcc(sign, result_reg);
204 sub(result_reg, Immediate(1));
205 and_(result_reg, Immediate(255));
206 jmp(&done, Label::kNear);
207 bind(&conv_failure);
208 fnclex();
209 fldz();
210 fld(1);
211 FCmp();
212 setcc(below, result_reg); // 1 if negative, 0 if positive.
213 dec_b(result_reg); // 0 if negative, 255 if positive.
214 bind(&done);
215}
216
217
218void MacroAssembler::ClampUint8(Register reg) {
219 Label done;
220 test(reg, Immediate(0xFFFFFF00));
221 j(zero, &done, Label::kNear);
222 setcc(negative, reg); // 1 if negative, 0 if positive.
223 dec_b(reg); // 0 if negative, 255 if positive.
224 bind(&done);
225}
226
227
228void MacroAssembler::SlowTruncateToI(Register result_reg,
229 Register input_reg,
230 int offset) {
231 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
232 call(stub.GetCode(), RelocInfo::CODE_TARGET);
233}
234
235
236void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
237 sub(esp, Immediate(kDoubleSize));
238 fst_d(MemOperand(esp, 0));
239 SlowTruncateToI(result_reg, esp, 0);
240 add(esp, Immediate(kDoubleSize));
241}
242
243
244void MacroAssembler::X87TOSToI(Register result_reg,
245 MinusZeroMode minus_zero_mode,
246 Label* lost_precision, Label* is_nan,
247 Label* minus_zero, Label::Distance dst) {
248 Label done;
249 sub(esp, Immediate(kPointerSize));
250 fld(0);
251 fist_s(MemOperand(esp, 0));
252 fild_s(MemOperand(esp, 0));
253 pop(result_reg);
254 FCmp();
255 j(not_equal, lost_precision, dst);
256 j(parity_even, is_nan, dst);
257 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
258 test(result_reg, Operand(result_reg));
259 j(not_zero, &done, Label::kNear);
260 // To check for minus zero, we load the value again as float, and check
261 // if that is still 0.
262 sub(esp, Immediate(kPointerSize));
263 fst_s(MemOperand(esp, 0));
264 pop(result_reg);
265 test(result_reg, Operand(result_reg));
266 j(not_zero, minus_zero, dst);
267 }
268 bind(&done);
269}
270
271
272void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
273 Register input_reg) {
274 Label done, slow_case;
275
276 SlowTruncateToI(result_reg, input_reg);
277 bind(&done);
278}
279
280
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400281void MacroAssembler::LoadUint32NoSSE2(const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 Label done;
283 push(src);
284 fild_s(Operand(esp, 0));
285 cmp(src, Immediate(0));
286 j(not_sign, &done, Label::kNear);
287 ExternalReference uint32_bias =
288 ExternalReference::address_of_uint32_bias();
289 fld_d(Operand::StaticVariable(uint32_bias));
290 faddp(1);
291 bind(&done);
292 add(esp, Immediate(kPointerSize));
293}
294
295
296void MacroAssembler::RecordWriteArray(
297 Register object, Register value, Register index, SaveFPRegsMode save_fp,
298 RememberedSetAction remembered_set_action, SmiCheck smi_check,
299 PointersToHereCheck pointers_to_here_check_for_value) {
300 // First, check if a write barrier is even needed. The tests below
301 // catch stores of Smis.
302 Label done;
303
304 // Skip barrier if writing a smi.
305 if (smi_check == INLINE_SMI_CHECK) {
306 DCHECK_EQ(0, kSmiTag);
307 test(value, Immediate(kSmiTagMask));
308 j(zero, &done);
309 }
310
311 // Array access: calculate the destination address in the same manner as
312 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
313 // into an array of words.
314 Register dst = index;
315 lea(dst, Operand(object, index, times_half_pointer_size,
316 FixedArray::kHeaderSize - kHeapObjectTag));
317
318 RecordWrite(object, dst, value, save_fp, remembered_set_action,
319 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
320
321 bind(&done);
322
323 // Clobber clobbered input registers when running with the debug-code flag
324 // turned on to provoke errors.
325 if (emit_debug_code()) {
326 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
327 mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
328 }
329}
330
331
332void MacroAssembler::RecordWriteField(
333 Register object, int offset, Register value, Register dst,
334 SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action,
335 SmiCheck smi_check, PointersToHereCheck pointers_to_here_check_for_value) {
336 // First, check if a write barrier is even needed. The tests below
337 // catch stores of Smis.
338 Label done;
339
340 // Skip barrier if writing a smi.
341 if (smi_check == INLINE_SMI_CHECK) {
342 JumpIfSmi(value, &done, Label::kNear);
343 }
344
345 // Although the object register is tagged, the offset is relative to the start
346 // of the object, so so offset must be a multiple of kPointerSize.
347 DCHECK(IsAligned(offset, kPointerSize));
348
349 lea(dst, FieldOperand(object, offset));
350 if (emit_debug_code()) {
351 Label ok;
352 test_b(dst, (1 << kPointerSizeLog2) - 1);
353 j(zero, &ok, Label::kNear);
354 int3();
355 bind(&ok);
356 }
357
358 RecordWrite(object, dst, value, save_fp, remembered_set_action,
359 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
360
361 bind(&done);
362
363 // Clobber clobbered input registers when running with the debug-code flag
364 // turned on to provoke errors.
365 if (emit_debug_code()) {
366 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
367 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
368 }
369}
370
371
372void MacroAssembler::RecordWriteForMap(Register object, Handle<Map> map,
373 Register scratch1, Register scratch2,
374 SaveFPRegsMode save_fp) {
375 Label done;
376
377 Register address = scratch1;
378 Register value = scratch2;
379 if (emit_debug_code()) {
380 Label ok;
381 lea(address, FieldOperand(object, HeapObject::kMapOffset));
382 test_b(address, (1 << kPointerSizeLog2) - 1);
383 j(zero, &ok, Label::kNear);
384 int3();
385 bind(&ok);
386 }
387
388 DCHECK(!object.is(value));
389 DCHECK(!object.is(address));
390 DCHECK(!value.is(address));
391 AssertNotSmi(object);
392
393 if (!FLAG_incremental_marking) {
394 return;
395 }
396
397 // Compute the address.
398 lea(address, FieldOperand(object, HeapObject::kMapOffset));
399
400 // A single check of the map's pages interesting flag suffices, since it is
401 // only set during incremental collection, and then it's also guaranteed that
402 // the from object's page's interesting flag is also set. This optimization
403 // relies on the fact that maps can never be in new space.
404 DCHECK(!isolate()->heap()->InNewSpace(*map));
405 CheckPageFlagForMap(map,
406 MemoryChunk::kPointersToHereAreInterestingMask,
407 zero,
408 &done,
409 Label::kNear);
410
411 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
412 save_fp);
413 CallStub(&stub);
414
415 bind(&done);
416
417 // Count number of write barriers in generated code.
418 isolate()->counters()->write_barriers_static()->Increment();
419 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
420
421 // Clobber clobbered input registers when running with the debug-code flag
422 // turned on to provoke errors.
423 if (emit_debug_code()) {
424 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
425 mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
426 mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
427 }
428}
429
430
431void MacroAssembler::RecordWrite(
432 Register object, Register address, Register value, SaveFPRegsMode fp_mode,
433 RememberedSetAction remembered_set_action, SmiCheck smi_check,
434 PointersToHereCheck pointers_to_here_check_for_value) {
435 DCHECK(!object.is(value));
436 DCHECK(!object.is(address));
437 DCHECK(!value.is(address));
438 AssertNotSmi(object);
439
440 if (remembered_set_action == OMIT_REMEMBERED_SET &&
441 !FLAG_incremental_marking) {
442 return;
443 }
444
445 if (emit_debug_code()) {
446 Label ok;
447 cmp(value, Operand(address, 0));
448 j(equal, &ok, Label::kNear);
449 int3();
450 bind(&ok);
451 }
452
453 // First, check if a write barrier is even needed. The tests below
454 // catch stores of Smis and stores into young gen.
455 Label done;
456
457 if (smi_check == INLINE_SMI_CHECK) {
458 // Skip barrier if writing a smi.
459 JumpIfSmi(value, &done, Label::kNear);
460 }
461
462 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
463 CheckPageFlag(value,
464 value, // Used as scratch.
465 MemoryChunk::kPointersToHereAreInterestingMask,
466 zero,
467 &done,
468 Label::kNear);
469 }
470 CheckPageFlag(object,
471 value, // Used as scratch.
472 MemoryChunk::kPointersFromHereAreInterestingMask,
473 zero,
474 &done,
475 Label::kNear);
476
477 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
478 fp_mode);
479 CallStub(&stub);
480
481 bind(&done);
482
483 // Count number of write barriers in generated code.
484 isolate()->counters()->write_barriers_static()->Increment();
485 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
486
487 // Clobber clobbered registers when running with the debug-code flag
488 // turned on to provoke errors.
489 if (emit_debug_code()) {
490 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
491 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
492 }
493}
494
495
496void MacroAssembler::DebugBreak() {
497 Move(eax, Immediate(0));
498 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
499 CEntryStub ces(isolate(), 1);
500 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
501}
502
503
504bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
505 static const int kMaxImmediateBits = 17;
506 if (!RelocInfo::IsNone(x.rmode_)) return false;
507 return !is_intn(x.x_, kMaxImmediateBits);
508}
509
510
511void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
512 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
513 Move(dst, Immediate(x.x_ ^ jit_cookie()));
514 xor_(dst, jit_cookie());
515 } else {
516 Move(dst, x);
517 }
518}
519
520
521void MacroAssembler::SafePush(const Immediate& x) {
522 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
523 push(Immediate(x.x_ ^ jit_cookie()));
524 xor_(Operand(esp, 0), Immediate(jit_cookie()));
525 } else {
526 push(x);
527 }
528}
529
530
531void MacroAssembler::CmpObjectType(Register heap_object,
532 InstanceType type,
533 Register map) {
534 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
535 CmpInstanceType(map, type);
536}
537
538
539void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
540 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
541 static_cast<int8_t>(type));
542}
543
544
545void MacroAssembler::CheckFastElements(Register map,
546 Label* fail,
547 Label::Distance distance) {
548 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
549 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
550 STATIC_ASSERT(FAST_ELEMENTS == 2);
551 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
552 cmpb(FieldOperand(map, Map::kBitField2Offset),
553 Map::kMaximumBitField2FastHoleyElementValue);
554 j(above, fail, distance);
555}
556
557
558void MacroAssembler::CheckFastObjectElements(Register map,
559 Label* fail,
560 Label::Distance distance) {
561 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
562 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
563 STATIC_ASSERT(FAST_ELEMENTS == 2);
564 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
565 cmpb(FieldOperand(map, Map::kBitField2Offset),
566 Map::kMaximumBitField2FastHoleySmiElementValue);
567 j(below_equal, fail, distance);
568 cmpb(FieldOperand(map, Map::kBitField2Offset),
569 Map::kMaximumBitField2FastHoleyElementValue);
570 j(above, fail, distance);
571}
572
573
574void MacroAssembler::CheckFastSmiElements(Register map,
575 Label* fail,
576 Label::Distance distance) {
577 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
578 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
579 cmpb(FieldOperand(map, Map::kBitField2Offset),
580 Map::kMaximumBitField2FastHoleySmiElementValue);
581 j(above, fail, distance);
582}
583
584
585void MacroAssembler::StoreNumberToDoubleElements(
586 Register maybe_number,
587 Register elements,
588 Register key,
589 Register scratch,
590 Label* fail,
591 int elements_offset) {
592 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
593 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
594
595 CheckMap(maybe_number,
596 isolate()->factory()->heap_number_map(),
597 fail,
598 DONT_DO_SMI_CHECK);
599
600 // Double value, canonicalize NaN.
601 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
602 cmp(FieldOperand(maybe_number, offset),
603 Immediate(kNaNOrInfinityLowerBoundUpper32));
604 j(greater_equal, &maybe_nan, Label::kNear);
605
606 bind(&not_nan);
607 ExternalReference canonical_nan_reference =
608 ExternalReference::address_of_canonical_non_hole_nan();
609 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
610 bind(&have_double_value);
611 fstp_d(FieldOperand(elements, key, times_4,
612 FixedDoubleArray::kHeaderSize - elements_offset));
613 jmp(&done);
614
615 bind(&maybe_nan);
616 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
617 // it's an Infinity, and the non-NaN code path applies.
618 j(greater, &is_nan, Label::kNear);
619 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
620 j(zero, &not_nan);
621 bind(&is_nan);
622 fld_d(Operand::StaticVariable(canonical_nan_reference));
623 jmp(&have_double_value, Label::kNear);
624
625 bind(&smi_value);
626 // Value is a smi. Convert to a double and store.
627 // Preserve original value.
628 mov(scratch, maybe_number);
629 SmiUntag(scratch);
630 push(scratch);
631 fild_s(Operand(esp, 0));
632 pop(scratch);
633 fstp_d(FieldOperand(elements, key, times_4,
634 FixedDoubleArray::kHeaderSize - elements_offset));
635 bind(&done);
636}
637
638
639void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
640 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
641}
642
643
644void MacroAssembler::CheckMap(Register obj,
645 Handle<Map> map,
646 Label* fail,
647 SmiCheckType smi_check_type) {
648 if (smi_check_type == DO_SMI_CHECK) {
649 JumpIfSmi(obj, fail);
650 }
651
652 CompareMap(obj, map);
653 j(not_equal, fail);
654}
655
656
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400657void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
658 Register scratch2, Handle<WeakCell> cell,
659 Handle<Code> success,
660 SmiCheckType smi_check_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000661 Label fail;
662 if (smi_check_type == DO_SMI_CHECK) {
663 JumpIfSmi(obj, &fail);
664 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400665 mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
666 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000667 j(equal, success);
668
669 bind(&fail);
670}
671
672
673Condition MacroAssembler::IsObjectStringType(Register heap_object,
674 Register map,
675 Register instance_type) {
676 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
677 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
678 STATIC_ASSERT(kNotStringTag != 0);
679 test(instance_type, Immediate(kIsNotStringMask));
680 return zero;
681}
682
683
684Condition MacroAssembler::IsObjectNameType(Register heap_object,
685 Register map,
686 Register instance_type) {
687 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
688 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
689 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
690 return below_equal;
691}
692
693
694void MacroAssembler::IsObjectJSObjectType(Register heap_object,
695 Register map,
696 Register scratch,
697 Label* fail) {
698 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
699 IsInstanceJSObjectType(map, scratch, fail);
700}
701
702
703void MacroAssembler::IsInstanceJSObjectType(Register map,
704 Register scratch,
705 Label* fail) {
706 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
707 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
708 cmp(scratch,
709 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
710 j(above, fail);
711}
712
713
714void MacroAssembler::FCmp() {
715 fucompp();
716 push(eax);
717 fnstsw_ax();
718 sahf();
719 pop(eax);
720}
721
722
723void MacroAssembler::FXamMinusZero() {
724 fxam();
725 push(eax);
726 fnstsw_ax();
727 and_(eax, Immediate(0x4700));
728 // For minus zero, C3 == 1 && C1 == 1.
729 cmp(eax, Immediate(0x4200));
730 pop(eax);
731 fstp(0);
732}
733
734
735void MacroAssembler::FXamSign() {
736 fxam();
737 push(eax);
738 fnstsw_ax();
739 // For negative value (including -0.0), C1 == 1.
740 and_(eax, Immediate(0x0200));
741 pop(eax);
742 fstp(0);
743}
744
745
746void MacroAssembler::X87CheckIA() {
747 push(eax);
748 fnstsw_ax();
749 // For #IA, IE == 1 && SF == 0.
750 and_(eax, Immediate(0x0041));
751 cmp(eax, Immediate(0x0001));
752 pop(eax);
753}
754
755
756// rc=00B, round to nearest.
757// rc=01B, round down.
758// rc=10B, round up.
759// rc=11B, round toward zero.
760void MacroAssembler::X87SetRC(int rc) {
761 sub(esp, Immediate(kPointerSize));
762 fnstcw(MemOperand(esp, 0));
763 and_(MemOperand(esp, 0), Immediate(0xF3FF));
764 or_(MemOperand(esp, 0), Immediate(rc));
765 fldcw(MemOperand(esp, 0));
766 add(esp, Immediate(kPointerSize));
767}
768
769
770void MacroAssembler::X87SetFPUCW(int cw) {
771 push(Immediate(cw));
772 fldcw(MemOperand(esp, 0));
773 add(esp, Immediate(kPointerSize));
774}
775
776
777void MacroAssembler::AssertNumber(Register object) {
778 if (emit_debug_code()) {
779 Label ok;
780 JumpIfSmi(object, &ok);
781 cmp(FieldOperand(object, HeapObject::kMapOffset),
782 isolate()->factory()->heap_number_map());
783 Check(equal, kOperandNotANumber);
784 bind(&ok);
785 }
786}
787
788
789void MacroAssembler::AssertSmi(Register object) {
790 if (emit_debug_code()) {
791 test(object, Immediate(kSmiTagMask));
792 Check(equal, kOperandIsNotASmi);
793 }
794}
795
796
797void MacroAssembler::AssertString(Register object) {
798 if (emit_debug_code()) {
799 test(object, Immediate(kSmiTagMask));
800 Check(not_equal, kOperandIsASmiAndNotAString);
801 push(object);
802 mov(object, FieldOperand(object, HeapObject::kMapOffset));
803 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
804 pop(object);
805 Check(below, kOperandIsNotAString);
806 }
807}
808
809
810void MacroAssembler::AssertName(Register object) {
811 if (emit_debug_code()) {
812 test(object, Immediate(kSmiTagMask));
813 Check(not_equal, kOperandIsASmiAndNotAName);
814 push(object);
815 mov(object, FieldOperand(object, HeapObject::kMapOffset));
816 CmpInstanceType(object, LAST_NAME_TYPE);
817 pop(object);
818 Check(below_equal, kOperandIsNotAName);
819 }
820}
821
822
823void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
824 if (emit_debug_code()) {
825 Label done_checking;
826 AssertNotSmi(object);
827 cmp(object, isolate()->factory()->undefined_value());
828 j(equal, &done_checking);
829 cmp(FieldOperand(object, 0),
830 Immediate(isolate()->factory()->allocation_site_map()));
831 Assert(equal, kExpectedUndefinedOrCell);
832 bind(&done_checking);
833 }
834}
835
836
837void MacroAssembler::AssertNotSmi(Register object) {
838 if (emit_debug_code()) {
839 test(object, Immediate(kSmiTagMask));
840 Check(not_equal, kOperandIsASmi);
841 }
842}
843
844
845void MacroAssembler::StubPrologue() {
846 push(ebp); // Caller's frame pointer.
847 mov(ebp, esp);
848 push(esi); // Callee's context.
849 push(Immediate(Smi::FromInt(StackFrame::STUB)));
850}
851
852
853void MacroAssembler::Prologue(bool code_pre_aging) {
854 PredictableCodeSizeScope predictible_code_size_scope(this,
855 kNoCodeAgeSequenceLength);
856 if (code_pre_aging) {
857 // Pre-age the code.
858 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
859 RelocInfo::CODE_AGE_SEQUENCE);
860 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
861 } else {
862 push(ebp); // Caller's frame pointer.
863 mov(ebp, esp);
864 push(esi); // Callee's context.
865 push(edi); // Callee's JS function.
866 }
867}
868
869
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400870void MacroAssembler::EnterFrame(StackFrame::Type type,
871 bool load_constant_pool_pointer_reg) {
872 // Out-of-line constant pool not implemented on x87.
873 UNREACHABLE();
874}
875
876
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877void MacroAssembler::EnterFrame(StackFrame::Type type) {
878 push(ebp);
879 mov(ebp, esp);
880 push(esi);
881 push(Immediate(Smi::FromInt(type)));
882 push(Immediate(CodeObject()));
883 if (emit_debug_code()) {
884 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
885 Check(not_equal, kCodeObjectNotProperlyPatched);
886 }
887}
888
889
890void MacroAssembler::LeaveFrame(StackFrame::Type type) {
891 if (emit_debug_code()) {
892 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
893 Immediate(Smi::FromInt(type)));
894 Check(equal, kStackFrameTypesMustMatch);
895 }
896 leave();
897}
898
899
900void MacroAssembler::EnterExitFramePrologue() {
901 // Set up the frame structure on the stack.
902 DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
903 DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
904 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
905 push(ebp);
906 mov(ebp, esp);
907
908 // Reserve room for entry stack pointer and push the code object.
909 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
910 push(Immediate(0)); // Saved entry sp, patched before call.
911 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
912
913 // Save the frame pointer and the context in top.
914 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
915 ExternalReference context_address(Isolate::kContextAddress, isolate());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400916 ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000917 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
918 mov(Operand::StaticVariable(context_address), esi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400919 mov(Operand::StaticVariable(c_function_address), ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920}
921
922
923void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
924 // Optionally save FPU state.
925 if (save_doubles) {
926 // Store FPU state to m108byte.
927 int space = 108 + argc * kPointerSize;
928 sub(esp, Immediate(space));
929 const int offset = -2 * kPointerSize; // entry fp + code object.
930 fnsave(MemOperand(ebp, offset - 108));
931 } else {
932 sub(esp, Immediate(argc * kPointerSize));
933 }
934
935 // Get the required frame alignment for the OS.
936 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
937 if (kFrameAlignment > 0) {
938 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
939 and_(esp, -kFrameAlignment);
940 }
941
942 // Patch the saved entry sp.
943 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
944}
945
946
947void MacroAssembler::EnterExitFrame(bool save_doubles) {
948 EnterExitFramePrologue();
949
950 // Set up argc and argv in callee-saved registers.
951 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
952 mov(edi, eax);
953 lea(esi, Operand(ebp, eax, times_4, offset));
954
955 // Reserve space for argc, argv and isolate.
956 EnterExitFrameEpilogue(3, save_doubles);
957}
958
959
960void MacroAssembler::EnterApiExitFrame(int argc) {
961 EnterExitFramePrologue();
962 EnterExitFrameEpilogue(argc, false);
963}
964
965
966void MacroAssembler::LeaveExitFrame(bool save_doubles) {
967 // Optionally restore FPU state.
968 if (save_doubles) {
969 const int offset = -2 * kPointerSize;
970 frstor(MemOperand(ebp, offset - 108));
971 }
972
973 // Get the return address from the stack and restore the frame pointer.
974 mov(ecx, Operand(ebp, 1 * kPointerSize));
975 mov(ebp, Operand(ebp, 0 * kPointerSize));
976
977 // Pop the arguments and the receiver from the caller stack.
978 lea(esp, Operand(esi, 1 * kPointerSize));
979
980 // Push the return address to get ready to return.
981 push(ecx);
982
983 LeaveExitFrameEpilogue(true);
984}
985
986
987void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
988 // Restore current context from top and clear it in debug mode.
989 ExternalReference context_address(Isolate::kContextAddress, isolate());
990 if (restore_context) {
991 mov(esi, Operand::StaticVariable(context_address));
992 }
993#ifdef DEBUG
994 mov(Operand::StaticVariable(context_address), Immediate(0));
995#endif
996
997 // Clear the top frame.
998 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
999 isolate());
1000 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1001}
1002
1003
1004void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1005 mov(esp, ebp);
1006 pop(ebp);
1007
1008 LeaveExitFrameEpilogue(restore_context);
1009}
1010
1011
1012void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1013 int handler_index) {
1014 // Adjust this code if not the case.
1015 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1016 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1017 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1018 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1019 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1020 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1021
1022 // We will build up the handler from the bottom by pushing on the stack.
1023 // First push the frame pointer and context.
1024 if (kind == StackHandler::JS_ENTRY) {
1025 // The frame pointer does not point to a JS frame so we save NULL for
1026 // ebp. We expect the code throwing an exception to check ebp before
1027 // dereferencing it to restore the context.
1028 push(Immediate(0)); // NULL frame pointer.
1029 push(Immediate(Smi::FromInt(0))); // No context.
1030 } else {
1031 push(ebp);
1032 push(esi);
1033 }
1034 // Push the state and the code object.
1035 unsigned state =
1036 StackHandler::IndexField::encode(handler_index) |
1037 StackHandler::KindField::encode(kind);
1038 push(Immediate(state));
1039 Push(CodeObject());
1040
1041 // Link the current handler as the next handler.
1042 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1043 push(Operand::StaticVariable(handler_address));
1044 // Set this new handler as the current one.
1045 mov(Operand::StaticVariable(handler_address), esp);
1046}
1047
1048
1049void MacroAssembler::PopTryHandler() {
1050 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1051 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1052 pop(Operand::StaticVariable(handler_address));
1053 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1054}
1055
1056
1057void MacroAssembler::JumpToHandlerEntry() {
1058 // Compute the handler entry address and jump to it. The handler table is
1059 // a fixed array of (smi-tagged) code offsets.
1060 // eax = exception, edi = code object, edx = state.
1061 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
1062 shr(edx, StackHandler::kKindWidth);
1063 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
1064 SmiUntag(edx);
1065 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1066 jmp(edi);
1067}
1068
1069
1070void MacroAssembler::Throw(Register value) {
1071 // Adjust this code if not the case.
1072 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1073 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1074 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1075 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1076 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1077 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1078
1079 // The exception is expected in eax.
1080 if (!value.is(eax)) {
1081 mov(eax, value);
1082 }
1083 // Drop the stack pointer to the top of the top handler.
1084 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1085 mov(esp, Operand::StaticVariable(handler_address));
1086 // Restore the next handler.
1087 pop(Operand::StaticVariable(handler_address));
1088
1089 // Remove the code object and state, compute the handler address in edi.
1090 pop(edi); // Code object.
1091 pop(edx); // Index and state.
1092
1093 // Restore the context and frame pointer.
1094 pop(esi); // Context.
1095 pop(ebp); // Frame pointer.
1096
1097 // If the handler is a JS frame, restore the context to the frame.
1098 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
1099 // ebp or esi.
1100 Label skip;
1101 test(esi, esi);
1102 j(zero, &skip, Label::kNear);
1103 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1104 bind(&skip);
1105
1106 JumpToHandlerEntry();
1107}
1108
1109
1110void MacroAssembler::ThrowUncatchable(Register value) {
1111 // Adjust this code if not the case.
1112 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1113 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1114 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1115 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1116 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1117 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1118
1119 // The exception is expected in eax.
1120 if (!value.is(eax)) {
1121 mov(eax, value);
1122 }
1123 // Drop the stack pointer to the top of the top stack handler.
1124 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1125 mov(esp, Operand::StaticVariable(handler_address));
1126
1127 // Unwind the handlers until the top ENTRY handler is found.
1128 Label fetch_next, check_kind;
1129 jmp(&check_kind, Label::kNear);
1130 bind(&fetch_next);
1131 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1132
1133 bind(&check_kind);
1134 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1135 test(Operand(esp, StackHandlerConstants::kStateOffset),
1136 Immediate(StackHandler::KindField::kMask));
1137 j(not_zero, &fetch_next);
1138
1139 // Set the top handler address to next handler past the top ENTRY handler.
1140 pop(Operand::StaticVariable(handler_address));
1141
1142 // Remove the code object and state, compute the handler address in edi.
1143 pop(edi); // Code object.
1144 pop(edx); // Index and state.
1145
1146 // Clear the context pointer and frame pointer (0 was saved in the handler).
1147 pop(esi);
1148 pop(ebp);
1149
1150 JumpToHandlerEntry();
1151}
1152
1153
1154void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1155 Register scratch1,
1156 Register scratch2,
1157 Label* miss) {
1158 Label same_contexts;
1159
1160 DCHECK(!holder_reg.is(scratch1));
1161 DCHECK(!holder_reg.is(scratch2));
1162 DCHECK(!scratch1.is(scratch2));
1163
1164 // Load current lexical context from the stack frame.
1165 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1166
1167 // When generating debug code, make sure the lexical context is set.
1168 if (emit_debug_code()) {
1169 cmp(scratch1, Immediate(0));
1170 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1171 }
1172 // Load the native context of the current context.
1173 int offset =
1174 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1175 mov(scratch1, FieldOperand(scratch1, offset));
1176 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1177
1178 // Check the context is a native context.
1179 if (emit_debug_code()) {
1180 // Read the first word and compare to native_context_map.
1181 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1182 isolate()->factory()->native_context_map());
1183 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1184 }
1185
1186 // Check if both contexts are the same.
1187 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1188 j(equal, &same_contexts);
1189
1190 // Compare security tokens, save holder_reg on the stack so we can use it
1191 // as a temporary register.
1192 //
1193 // Check that the security token in the calling global object is
1194 // compatible with the security token in the receiving global
1195 // object.
1196 mov(scratch2,
1197 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1198
1199 // Check the context is a native context.
1200 if (emit_debug_code()) {
1201 cmp(scratch2, isolate()->factory()->null_value());
1202 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1203
1204 // Read the first word and compare to native_context_map(),
1205 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1206 isolate()->factory()->native_context_map());
1207 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1208 }
1209
1210 int token_offset = Context::kHeaderSize +
1211 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1212 mov(scratch1, FieldOperand(scratch1, token_offset));
1213 cmp(scratch1, FieldOperand(scratch2, token_offset));
1214 j(not_equal, miss);
1215
1216 bind(&same_contexts);
1217}
1218
1219
1220// Compute the hash code from the untagged key. This must be kept in sync with
1221// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1222// code-stub-hydrogen.cc
1223//
1224// Note: r0 will contain hash code
1225void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1226 // Xor original key with a seed.
1227 if (serializer_enabled()) {
1228 ExternalReference roots_array_start =
1229 ExternalReference::roots_array_start(isolate());
1230 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1231 mov(scratch,
1232 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1233 SmiUntag(scratch);
1234 xor_(r0, scratch);
1235 } else {
1236 int32_t seed = isolate()->heap()->HashSeed();
1237 xor_(r0, Immediate(seed));
1238 }
1239
1240 // hash = ~hash + (hash << 15);
1241 mov(scratch, r0);
1242 not_(r0);
1243 shl(scratch, 15);
1244 add(r0, scratch);
1245 // hash = hash ^ (hash >> 12);
1246 mov(scratch, r0);
1247 shr(scratch, 12);
1248 xor_(r0, scratch);
1249 // hash = hash + (hash << 2);
1250 lea(r0, Operand(r0, r0, times_4, 0));
1251 // hash = hash ^ (hash >> 4);
1252 mov(scratch, r0);
1253 shr(scratch, 4);
1254 xor_(r0, scratch);
1255 // hash = hash * 2057;
1256 imul(r0, r0, 2057);
1257 // hash = hash ^ (hash >> 16);
1258 mov(scratch, r0);
1259 shr(scratch, 16);
1260 xor_(r0, scratch);
1261}
1262
1263
1264
1265void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1266 Register elements,
1267 Register key,
1268 Register r0,
1269 Register r1,
1270 Register r2,
1271 Register result) {
1272 // Register use:
1273 //
1274 // elements - holds the slow-case elements of the receiver and is unchanged.
1275 //
1276 // key - holds the smi key on entry and is unchanged.
1277 //
1278 // Scratch registers:
1279 //
1280 // r0 - holds the untagged key on entry and holds the hash once computed.
1281 //
1282 // r1 - used to hold the capacity mask of the dictionary
1283 //
1284 // r2 - used for the index into the dictionary.
1285 //
1286 // result - holds the result on exit if the load succeeds and we fall through.
1287
1288 Label done;
1289
1290 GetNumberHash(r0, r1);
1291
1292 // Compute capacity mask.
1293 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1294 shr(r1, kSmiTagSize); // convert smi to int
1295 dec(r1);
1296
1297 // Generate an unrolled loop that performs a few probes before giving up.
1298 for (int i = 0; i < kNumberDictionaryProbes; i++) {
1299 // Use r2 for index calculations and keep the hash intact in r0.
1300 mov(r2, r0);
1301 // Compute the masked index: (hash + i + i * i) & mask.
1302 if (i > 0) {
1303 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1304 }
1305 and_(r2, r1);
1306
1307 // Scale the index by multiplying by the entry size.
1308 DCHECK(SeededNumberDictionary::kEntrySize == 3);
1309 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1310
1311 // Check if the key matches.
1312 cmp(key, FieldOperand(elements,
1313 r2,
1314 times_pointer_size,
1315 SeededNumberDictionary::kElementsStartOffset));
1316 if (i != (kNumberDictionaryProbes - 1)) {
1317 j(equal, &done);
1318 } else {
1319 j(not_equal, miss);
1320 }
1321 }
1322
1323 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001324 // Check that the value is a field property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001325 const int kDetailsOffset =
1326 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001327 DCHECK_EQ(FIELD, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001328 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1329 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1330 j(not_zero, miss);
1331
1332 // Get the value at the masked, scaled index.
1333 const int kValueOffset =
1334 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1335 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1336}
1337
1338
1339void MacroAssembler::LoadAllocationTopHelper(Register result,
1340 Register scratch,
1341 AllocationFlags flags) {
1342 ExternalReference allocation_top =
1343 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1344
1345 // Just return if allocation top is already known.
1346 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1347 // No use of scratch if allocation top is provided.
1348 DCHECK(scratch.is(no_reg));
1349#ifdef DEBUG
1350 // Assert that result actually contains top on entry.
1351 cmp(result, Operand::StaticVariable(allocation_top));
1352 Check(equal, kUnexpectedAllocationTop);
1353#endif
1354 return;
1355 }
1356
1357 // Move address of new object to result. Use scratch register if available.
1358 if (scratch.is(no_reg)) {
1359 mov(result, Operand::StaticVariable(allocation_top));
1360 } else {
1361 mov(scratch, Immediate(allocation_top));
1362 mov(result, Operand(scratch, 0));
1363 }
1364}
1365
1366
1367void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1368 Register scratch,
1369 AllocationFlags flags) {
1370 if (emit_debug_code()) {
1371 test(result_end, Immediate(kObjectAlignmentMask));
1372 Check(zero, kUnalignedAllocationInNewSpace);
1373 }
1374
1375 ExternalReference allocation_top =
1376 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1377
1378 // Update new top. Use scratch if available.
1379 if (scratch.is(no_reg)) {
1380 mov(Operand::StaticVariable(allocation_top), result_end);
1381 } else {
1382 mov(Operand(scratch, 0), result_end);
1383 }
1384}
1385
1386
1387void MacroAssembler::Allocate(int object_size,
1388 Register result,
1389 Register result_end,
1390 Register scratch,
1391 Label* gc_required,
1392 AllocationFlags flags) {
1393 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1394 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1395 if (!FLAG_inline_new) {
1396 if (emit_debug_code()) {
1397 // Trash the registers to simulate an allocation failure.
1398 mov(result, Immediate(0x7091));
1399 if (result_end.is_valid()) {
1400 mov(result_end, Immediate(0x7191));
1401 }
1402 if (scratch.is_valid()) {
1403 mov(scratch, Immediate(0x7291));
1404 }
1405 }
1406 jmp(gc_required);
1407 return;
1408 }
1409 DCHECK(!result.is(result_end));
1410
1411 // Load address of new object into result.
1412 LoadAllocationTopHelper(result, scratch, flags);
1413
1414 ExternalReference allocation_limit =
1415 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1416
1417 // Align the next allocation. Storing the filler map without checking top is
1418 // safe in new-space because the limit of the heap is aligned there.
1419 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1420 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1421 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1422 Label aligned;
1423 test(result, Immediate(kDoubleAlignmentMask));
1424 j(zero, &aligned, Label::kNear);
1425 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1426 cmp(result, Operand::StaticVariable(allocation_limit));
1427 j(above_equal, gc_required);
1428 }
1429 mov(Operand(result, 0),
1430 Immediate(isolate()->factory()->one_pointer_filler_map()));
1431 add(result, Immediate(kDoubleSize / 2));
1432 bind(&aligned);
1433 }
1434
1435 // Calculate new top and bail out if space is exhausted.
1436 Register top_reg = result_end.is_valid() ? result_end : result;
1437 if (!top_reg.is(result)) {
1438 mov(top_reg, result);
1439 }
1440 add(top_reg, Immediate(object_size));
1441 j(carry, gc_required);
1442 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1443 j(above, gc_required);
1444
1445 // Update allocation top.
1446 UpdateAllocationTopHelper(top_reg, scratch, flags);
1447
1448 // Tag result if requested.
1449 bool tag_result = (flags & TAG_OBJECT) != 0;
1450 if (top_reg.is(result)) {
1451 if (tag_result) {
1452 sub(result, Immediate(object_size - kHeapObjectTag));
1453 } else {
1454 sub(result, Immediate(object_size));
1455 }
1456 } else if (tag_result) {
1457 DCHECK(kHeapObjectTag == 1);
1458 inc(result);
1459 }
1460}
1461
1462
1463void MacroAssembler::Allocate(int header_size,
1464 ScaleFactor element_size,
1465 Register element_count,
1466 RegisterValueType element_count_type,
1467 Register result,
1468 Register result_end,
1469 Register scratch,
1470 Label* gc_required,
1471 AllocationFlags flags) {
1472 DCHECK((flags & SIZE_IN_WORDS) == 0);
1473 if (!FLAG_inline_new) {
1474 if (emit_debug_code()) {
1475 // Trash the registers to simulate an allocation failure.
1476 mov(result, Immediate(0x7091));
1477 mov(result_end, Immediate(0x7191));
1478 if (scratch.is_valid()) {
1479 mov(scratch, Immediate(0x7291));
1480 }
1481 // Register element_count is not modified by the function.
1482 }
1483 jmp(gc_required);
1484 return;
1485 }
1486 DCHECK(!result.is(result_end));
1487
1488 // Load address of new object into result.
1489 LoadAllocationTopHelper(result, scratch, flags);
1490
1491 ExternalReference allocation_limit =
1492 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1493
1494 // Align the next allocation. Storing the filler map without checking top is
1495 // safe in new-space because the limit of the heap is aligned there.
1496 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1497 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1498 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1499 Label aligned;
1500 test(result, Immediate(kDoubleAlignmentMask));
1501 j(zero, &aligned, Label::kNear);
1502 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1503 cmp(result, Operand::StaticVariable(allocation_limit));
1504 j(above_equal, gc_required);
1505 }
1506 mov(Operand(result, 0),
1507 Immediate(isolate()->factory()->one_pointer_filler_map()));
1508 add(result, Immediate(kDoubleSize / 2));
1509 bind(&aligned);
1510 }
1511
1512 // Calculate new top and bail out if space is exhausted.
1513 // We assume that element_count*element_size + header_size does not
1514 // overflow.
1515 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1516 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1517 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1518 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1519 DCHECK(element_size >= times_2);
1520 DCHECK(kSmiTagSize == 1);
1521 element_size = static_cast<ScaleFactor>(element_size - 1);
1522 } else {
1523 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1524 }
1525 lea(result_end, Operand(element_count, element_size, header_size));
1526 add(result_end, result);
1527 j(carry, gc_required);
1528 cmp(result_end, Operand::StaticVariable(allocation_limit));
1529 j(above, gc_required);
1530
1531 if ((flags & TAG_OBJECT) != 0) {
1532 DCHECK(kHeapObjectTag == 1);
1533 inc(result);
1534 }
1535
1536 // Update allocation top.
1537 UpdateAllocationTopHelper(result_end, scratch, flags);
1538}
1539
1540
1541void MacroAssembler::Allocate(Register object_size,
1542 Register result,
1543 Register result_end,
1544 Register scratch,
1545 Label* gc_required,
1546 AllocationFlags flags) {
1547 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1548 if (!FLAG_inline_new) {
1549 if (emit_debug_code()) {
1550 // Trash the registers to simulate an allocation failure.
1551 mov(result, Immediate(0x7091));
1552 mov(result_end, Immediate(0x7191));
1553 if (scratch.is_valid()) {
1554 mov(scratch, Immediate(0x7291));
1555 }
1556 // object_size is left unchanged by this function.
1557 }
1558 jmp(gc_required);
1559 return;
1560 }
1561 DCHECK(!result.is(result_end));
1562
1563 // Load address of new object into result.
1564 LoadAllocationTopHelper(result, scratch, flags);
1565
1566 ExternalReference allocation_limit =
1567 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1568
1569 // Align the next allocation. Storing the filler map without checking top is
1570 // safe in new-space because the limit of the heap is aligned there.
1571 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1572 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1573 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1574 Label aligned;
1575 test(result, Immediate(kDoubleAlignmentMask));
1576 j(zero, &aligned, Label::kNear);
1577 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1578 cmp(result, Operand::StaticVariable(allocation_limit));
1579 j(above_equal, gc_required);
1580 }
1581 mov(Operand(result, 0),
1582 Immediate(isolate()->factory()->one_pointer_filler_map()));
1583 add(result, Immediate(kDoubleSize / 2));
1584 bind(&aligned);
1585 }
1586
1587 // Calculate new top and bail out if space is exhausted.
1588 if (!object_size.is(result_end)) {
1589 mov(result_end, object_size);
1590 }
1591 add(result_end, result);
1592 j(carry, gc_required);
1593 cmp(result_end, Operand::StaticVariable(allocation_limit));
1594 j(above, gc_required);
1595
1596 // Tag result if requested.
1597 if ((flags & TAG_OBJECT) != 0) {
1598 DCHECK(kHeapObjectTag == 1);
1599 inc(result);
1600 }
1601
1602 // Update allocation top.
1603 UpdateAllocationTopHelper(result_end, scratch, flags);
1604}
1605
1606
1607void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1608 ExternalReference new_space_allocation_top =
1609 ExternalReference::new_space_allocation_top_address(isolate());
1610
1611 // Make sure the object has no tag before resetting top.
1612 and_(object, Immediate(~kHeapObjectTagMask));
1613#ifdef DEBUG
1614 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1615 Check(below, kUndoAllocationOfNonAllocatedMemory);
1616#endif
1617 mov(Operand::StaticVariable(new_space_allocation_top), object);
1618}
1619
1620
1621void MacroAssembler::AllocateHeapNumber(Register result,
1622 Register scratch1,
1623 Register scratch2,
1624 Label* gc_required,
1625 MutableMode mode) {
1626 // Allocate heap number in new space.
1627 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1628 TAG_OBJECT);
1629
1630 Handle<Map> map = mode == MUTABLE
1631 ? isolate()->factory()->mutable_heap_number_map()
1632 : isolate()->factory()->heap_number_map();
1633
1634 // Set the map.
1635 mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1636}
1637
1638
1639void MacroAssembler::AllocateTwoByteString(Register result,
1640 Register length,
1641 Register scratch1,
1642 Register scratch2,
1643 Register scratch3,
1644 Label* gc_required) {
1645 // Calculate the number of bytes needed for the characters in the string while
1646 // observing object alignment.
1647 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1648 DCHECK(kShortSize == 2);
1649 // scratch1 = length * 2 + kObjectAlignmentMask.
1650 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1651 and_(scratch1, Immediate(~kObjectAlignmentMask));
1652
1653 // Allocate two byte string in new space.
1654 Allocate(SeqTwoByteString::kHeaderSize,
1655 times_1,
1656 scratch1,
1657 REGISTER_VALUE_IS_INT32,
1658 result,
1659 scratch2,
1660 scratch3,
1661 gc_required,
1662 TAG_OBJECT);
1663
1664 // Set the map, length and hash field.
1665 mov(FieldOperand(result, HeapObject::kMapOffset),
1666 Immediate(isolate()->factory()->string_map()));
1667 mov(scratch1, length);
1668 SmiTag(scratch1);
1669 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1670 mov(FieldOperand(result, String::kHashFieldOffset),
1671 Immediate(String::kEmptyHashField));
1672}
1673
1674
1675void MacroAssembler::AllocateOneByteString(Register result, Register length,
1676 Register scratch1, Register scratch2,
1677 Register scratch3,
1678 Label* gc_required) {
1679 // Calculate the number of bytes needed for the characters in the string while
1680 // observing object alignment.
1681 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1682 mov(scratch1, length);
1683 DCHECK(kCharSize == 1);
1684 add(scratch1, Immediate(kObjectAlignmentMask));
1685 and_(scratch1, Immediate(~kObjectAlignmentMask));
1686
1687 // Allocate one-byte string in new space.
1688 Allocate(SeqOneByteString::kHeaderSize,
1689 times_1,
1690 scratch1,
1691 REGISTER_VALUE_IS_INT32,
1692 result,
1693 scratch2,
1694 scratch3,
1695 gc_required,
1696 TAG_OBJECT);
1697
1698 // Set the map, length and hash field.
1699 mov(FieldOperand(result, HeapObject::kMapOffset),
1700 Immediate(isolate()->factory()->one_byte_string_map()));
1701 mov(scratch1, length);
1702 SmiTag(scratch1);
1703 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1704 mov(FieldOperand(result, String::kHashFieldOffset),
1705 Immediate(String::kEmptyHashField));
1706}
1707
1708
1709void MacroAssembler::AllocateOneByteString(Register result, int length,
1710 Register scratch1, Register scratch2,
1711 Label* gc_required) {
1712 DCHECK(length > 0);
1713
1714 // Allocate one-byte string in new space.
1715 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1716 gc_required, TAG_OBJECT);
1717
1718 // Set the map, length and hash field.
1719 mov(FieldOperand(result, HeapObject::kMapOffset),
1720 Immediate(isolate()->factory()->one_byte_string_map()));
1721 mov(FieldOperand(result, String::kLengthOffset),
1722 Immediate(Smi::FromInt(length)));
1723 mov(FieldOperand(result, String::kHashFieldOffset),
1724 Immediate(String::kEmptyHashField));
1725}
1726
1727
1728void MacroAssembler::AllocateTwoByteConsString(Register result,
1729 Register scratch1,
1730 Register scratch2,
1731 Label* gc_required) {
1732 // Allocate heap number in new space.
1733 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1734 TAG_OBJECT);
1735
1736 // Set the map. The other fields are left uninitialized.
1737 mov(FieldOperand(result, HeapObject::kMapOffset),
1738 Immediate(isolate()->factory()->cons_string_map()));
1739}
1740
1741
1742void MacroAssembler::AllocateOneByteConsString(Register result,
1743 Register scratch1,
1744 Register scratch2,
1745 Label* gc_required) {
1746 Allocate(ConsString::kSize,
1747 result,
1748 scratch1,
1749 scratch2,
1750 gc_required,
1751 TAG_OBJECT);
1752
1753 // Set the map. The other fields are left uninitialized.
1754 mov(FieldOperand(result, HeapObject::kMapOffset),
1755 Immediate(isolate()->factory()->cons_one_byte_string_map()));
1756}
1757
1758
1759void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1760 Register scratch1,
1761 Register scratch2,
1762 Label* gc_required) {
1763 // Allocate heap number in new space.
1764 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1765 TAG_OBJECT);
1766
1767 // Set the map. The other fields are left uninitialized.
1768 mov(FieldOperand(result, HeapObject::kMapOffset),
1769 Immediate(isolate()->factory()->sliced_string_map()));
1770}
1771
1772
1773void MacroAssembler::AllocateOneByteSlicedString(Register result,
1774 Register scratch1,
1775 Register scratch2,
1776 Label* gc_required) {
1777 // Allocate heap number in new space.
1778 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1779 TAG_OBJECT);
1780
1781 // Set the map. The other fields are left uninitialized.
1782 mov(FieldOperand(result, HeapObject::kMapOffset),
1783 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1784}
1785
1786
1787// Copy memory, byte-by-byte, from source to destination. Not optimized for
1788// long or aligned copies. The contents of scratch and length are destroyed.
1789// Source and destination are incremented by length.
1790// Many variants of movsb, loop unrolling, word moves, and indexed operands
1791// have been tried here already, and this is fastest.
1792// A simpler loop is faster on small copies, but 30% slower on large ones.
1793// The cld() instruction must have been emitted, to set the direction flag(),
1794// before calling this function.
1795void MacroAssembler::CopyBytes(Register source,
1796 Register destination,
1797 Register length,
1798 Register scratch) {
1799 Label short_loop, len4, len8, len12, done, short_string;
1800 DCHECK(source.is(esi));
1801 DCHECK(destination.is(edi));
1802 DCHECK(length.is(ecx));
1803 cmp(length, Immediate(4));
1804 j(below, &short_string, Label::kNear);
1805
1806 // Because source is 4-byte aligned in our uses of this function,
1807 // we keep source aligned for the rep_movs call by copying the odd bytes
1808 // at the end of the ranges.
1809 mov(scratch, Operand(source, length, times_1, -4));
1810 mov(Operand(destination, length, times_1, -4), scratch);
1811
1812 cmp(length, Immediate(8));
1813 j(below_equal, &len4, Label::kNear);
1814 cmp(length, Immediate(12));
1815 j(below_equal, &len8, Label::kNear);
1816 cmp(length, Immediate(16));
1817 j(below_equal, &len12, Label::kNear);
1818
1819 mov(scratch, ecx);
1820 shr(ecx, 2);
1821 rep_movs();
1822 and_(scratch, Immediate(0x3));
1823 add(destination, scratch);
1824 jmp(&done, Label::kNear);
1825
1826 bind(&len12);
1827 mov(scratch, Operand(source, 8));
1828 mov(Operand(destination, 8), scratch);
1829 bind(&len8);
1830 mov(scratch, Operand(source, 4));
1831 mov(Operand(destination, 4), scratch);
1832 bind(&len4);
1833 mov(scratch, Operand(source, 0));
1834 mov(Operand(destination, 0), scratch);
1835 add(destination, length);
1836 jmp(&done, Label::kNear);
1837
1838 bind(&short_string);
1839 test(length, length);
1840 j(zero, &done, Label::kNear);
1841
1842 bind(&short_loop);
1843 mov_b(scratch, Operand(source, 0));
1844 mov_b(Operand(destination, 0), scratch);
1845 inc(source);
1846 inc(destination);
1847 dec(length);
1848 j(not_zero, &short_loop);
1849
1850 bind(&done);
1851}
1852
1853
1854void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1855 Register end_offset,
1856 Register filler) {
1857 Label loop, entry;
1858 jmp(&entry);
1859 bind(&loop);
1860 mov(Operand(start_offset, 0), filler);
1861 add(start_offset, Immediate(kPointerSize));
1862 bind(&entry);
1863 cmp(start_offset, end_offset);
1864 j(less, &loop);
1865}
1866
1867
1868void MacroAssembler::BooleanBitTest(Register object,
1869 int field_offset,
1870 int bit_index) {
1871 bit_index += kSmiTagSize + kSmiShiftSize;
1872 DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1873 int byte_index = bit_index / kBitsPerByte;
1874 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1875 test_b(FieldOperand(object, field_offset + byte_index),
1876 static_cast<byte>(1 << byte_bit_index));
1877}
1878
1879
1880
1881void MacroAssembler::NegativeZeroTest(Register result,
1882 Register op,
1883 Label* then_label) {
1884 Label ok;
1885 test(result, result);
1886 j(not_zero, &ok);
1887 test(op, op);
1888 j(sign, then_label);
1889 bind(&ok);
1890}
1891
1892
1893void MacroAssembler::NegativeZeroTest(Register result,
1894 Register op1,
1895 Register op2,
1896 Register scratch,
1897 Label* then_label) {
1898 Label ok;
1899 test(result, result);
1900 j(not_zero, &ok);
1901 mov(scratch, op1);
1902 or_(scratch, op2);
1903 j(sign, then_label);
1904 bind(&ok);
1905}
1906
1907
1908void MacroAssembler::TryGetFunctionPrototype(Register function,
1909 Register result,
1910 Register scratch,
1911 Label* miss,
1912 bool miss_on_bound_function) {
1913 Label non_instance;
1914 if (miss_on_bound_function) {
1915 // Check that the receiver isn't a smi.
1916 JumpIfSmi(function, miss);
1917
1918 // Check that the function really is a function.
1919 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1920 j(not_equal, miss);
1921
1922 // If a bound function, go to miss label.
1923 mov(scratch,
1924 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1925 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1926 SharedFunctionInfo::kBoundFunction);
1927 j(not_zero, miss);
1928
1929 // Make sure that the function has an instance prototype.
1930 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1931 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1932 j(not_zero, &non_instance);
1933 }
1934
1935 // Get the prototype or initial map from the function.
1936 mov(result,
1937 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1938
1939 // If the prototype or initial map is the hole, don't return it and
1940 // simply miss the cache instead. This will allow us to allocate a
1941 // prototype object on-demand in the runtime system.
1942 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1943 j(equal, miss);
1944
1945 // If the function does not have an initial map, we're done.
1946 Label done;
1947 CmpObjectType(result, MAP_TYPE, scratch);
1948 j(not_equal, &done);
1949
1950 // Get the prototype from the initial map.
1951 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1952
1953 if (miss_on_bound_function) {
1954 jmp(&done);
1955
1956 // Non-instance prototype: Fetch prototype from constructor field
1957 // in initial map.
1958 bind(&non_instance);
1959 mov(result, FieldOperand(result, Map::kConstructorOffset));
1960 }
1961
1962 // All done.
1963 bind(&done);
1964}
1965
1966
1967void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1968 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1969 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1970}
1971
1972
1973void MacroAssembler::TailCallStub(CodeStub* stub) {
1974 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1975}
1976
1977
1978void MacroAssembler::StubReturn(int argc) {
1979 DCHECK(argc >= 1 && generating_stub());
1980 ret((argc - 1) * kPointerSize);
1981}
1982
1983
1984bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1985 return has_frame_ || !stub->SometimesSetsUpAFrame();
1986}
1987
1988
1989void MacroAssembler::IndexFromHash(Register hash, Register index) {
1990 // The assert checks that the constants for the maximum number of digits
1991 // for an array index cached in the hash field and the number of bits
1992 // reserved for it does not conflict.
1993 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
1994 (1 << String::kArrayIndexValueBits));
1995 if (!index.is(hash)) {
1996 mov(index, hash);
1997 }
1998 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
1999}
2000
2001
2002void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
2003 SaveFPRegsMode save_doubles) {
2004 // If the expected number of arguments of the runtime function is
2005 // constant, we check that the actual number of arguments match the
2006 // expectation.
2007 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2008
2009 // TODO(1236192): Most runtime routines don't need the number of
2010 // arguments passed in because it is constant. At some point we
2011 // should remove this need and make the runtime routine entry code
2012 // smarter.
2013 Move(eax, Immediate(num_arguments));
2014 mov(ebx, Immediate(ExternalReference(f, isolate())));
2015 CEntryStub ces(isolate(), 1, save_doubles);
2016 CallStub(&ces);
2017}
2018
2019
2020void MacroAssembler::CallExternalReference(ExternalReference ref,
2021 int num_arguments) {
2022 mov(eax, Immediate(num_arguments));
2023 mov(ebx, Immediate(ref));
2024
2025 CEntryStub stub(isolate(), 1);
2026 CallStub(&stub);
2027}
2028
2029
2030void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2031 int num_arguments,
2032 int result_size) {
2033 // TODO(1236192): Most runtime routines don't need the number of
2034 // arguments passed in because it is constant. At some point we
2035 // should remove this need and make the runtime routine entry code
2036 // smarter.
2037 Move(eax, Immediate(num_arguments));
2038 JumpToExternalReference(ext);
2039}
2040
2041
2042void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2043 int num_arguments,
2044 int result_size) {
2045 TailCallExternalReference(ExternalReference(fid, isolate()),
2046 num_arguments,
2047 result_size);
2048}
2049
2050
2051Operand ApiParameterOperand(int index) {
2052 return Operand(esp, index * kPointerSize);
2053}
2054
2055
2056void MacroAssembler::PrepareCallApiFunction(int argc) {
2057 EnterApiExitFrame(argc);
2058 if (emit_debug_code()) {
2059 mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
2060 }
2061}
2062
2063
2064void MacroAssembler::CallApiFunctionAndReturn(
2065 Register function_address,
2066 ExternalReference thunk_ref,
2067 Operand thunk_last_arg,
2068 int stack_space,
2069 Operand return_value_operand,
2070 Operand* context_restore_operand) {
2071 ExternalReference next_address =
2072 ExternalReference::handle_scope_next_address(isolate());
2073 ExternalReference limit_address =
2074 ExternalReference::handle_scope_limit_address(isolate());
2075 ExternalReference level_address =
2076 ExternalReference::handle_scope_level_address(isolate());
2077
2078 DCHECK(edx.is(function_address));
2079 // Allocate HandleScope in callee-save registers.
2080 mov(ebx, Operand::StaticVariable(next_address));
2081 mov(edi, Operand::StaticVariable(limit_address));
2082 add(Operand::StaticVariable(level_address), Immediate(1));
2083
2084 if (FLAG_log_timer_events) {
2085 FrameScope frame(this, StackFrame::MANUAL);
2086 PushSafepointRegisters();
2087 PrepareCallCFunction(1, eax);
2088 mov(Operand(esp, 0),
2089 Immediate(ExternalReference::isolate_address(isolate())));
2090 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2091 PopSafepointRegisters();
2092 }
2093
2094
2095 Label profiler_disabled;
2096 Label end_profiler_check;
2097 mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2098 cmpb(Operand(eax, 0), 0);
2099 j(zero, &profiler_disabled);
2100
2101 // Additional parameter is the address of the actual getter function.
2102 mov(thunk_last_arg, function_address);
2103 // Call the api function.
2104 mov(eax, Immediate(thunk_ref));
2105 call(eax);
2106 jmp(&end_profiler_check);
2107
2108 bind(&profiler_disabled);
2109 // Call the api function.
2110 call(function_address);
2111 bind(&end_profiler_check);
2112
2113 if (FLAG_log_timer_events) {
2114 FrameScope frame(this, StackFrame::MANUAL);
2115 PushSafepointRegisters();
2116 PrepareCallCFunction(1, eax);
2117 mov(Operand(esp, 0),
2118 Immediate(ExternalReference::isolate_address(isolate())));
2119 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2120 PopSafepointRegisters();
2121 }
2122
2123 Label prologue;
2124 // Load the value from ReturnValue
2125 mov(eax, return_value_operand);
2126
2127 Label promote_scheduled_exception;
2128 Label exception_handled;
2129 Label delete_allocated_handles;
2130 Label leave_exit_frame;
2131
2132 bind(&prologue);
2133 // No more valid handles (the result handle was the last one). Restore
2134 // previous handle scope.
2135 mov(Operand::StaticVariable(next_address), ebx);
2136 sub(Operand::StaticVariable(level_address), Immediate(1));
2137 Assert(above_equal, kInvalidHandleScopeLevel);
2138 cmp(edi, Operand::StaticVariable(limit_address));
2139 j(not_equal, &delete_allocated_handles);
2140 bind(&leave_exit_frame);
2141
2142 // Check if the function scheduled an exception.
2143 ExternalReference scheduled_exception_address =
2144 ExternalReference::scheduled_exception_address(isolate());
2145 cmp(Operand::StaticVariable(scheduled_exception_address),
2146 Immediate(isolate()->factory()->the_hole_value()));
2147 j(not_equal, &promote_scheduled_exception);
2148 bind(&exception_handled);
2149
2150#if ENABLE_EXTRA_CHECKS
2151 // Check if the function returned a valid JavaScript value.
2152 Label ok;
2153 Register return_value = eax;
2154 Register map = ecx;
2155
2156 JumpIfSmi(return_value, &ok, Label::kNear);
2157 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2158
2159 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2160 j(below, &ok, Label::kNear);
2161
2162 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2163 j(above_equal, &ok, Label::kNear);
2164
2165 cmp(map, isolate()->factory()->heap_number_map());
2166 j(equal, &ok, Label::kNear);
2167
2168 cmp(return_value, isolate()->factory()->undefined_value());
2169 j(equal, &ok, Label::kNear);
2170
2171 cmp(return_value, isolate()->factory()->true_value());
2172 j(equal, &ok, Label::kNear);
2173
2174 cmp(return_value, isolate()->factory()->false_value());
2175 j(equal, &ok, Label::kNear);
2176
2177 cmp(return_value, isolate()->factory()->null_value());
2178 j(equal, &ok, Label::kNear);
2179
2180 Abort(kAPICallReturnedInvalidObject);
2181
2182 bind(&ok);
2183#endif
2184
2185 bool restore_context = context_restore_operand != NULL;
2186 if (restore_context) {
2187 mov(esi, *context_restore_operand);
2188 }
2189 LeaveApiExitFrame(!restore_context);
2190 ret(stack_space * kPointerSize);
2191
2192 bind(&promote_scheduled_exception);
2193 {
2194 FrameScope frame(this, StackFrame::INTERNAL);
2195 CallRuntime(Runtime::kPromoteScheduledException, 0);
2196 }
2197 jmp(&exception_handled);
2198
2199 // HandleScope limit has changed. Delete allocated extensions.
2200 ExternalReference delete_extensions =
2201 ExternalReference::delete_handle_scope_extensions(isolate());
2202 bind(&delete_allocated_handles);
2203 mov(Operand::StaticVariable(limit_address), edi);
2204 mov(edi, eax);
2205 mov(Operand(esp, 0),
2206 Immediate(ExternalReference::isolate_address(isolate())));
2207 mov(eax, Immediate(delete_extensions));
2208 call(eax);
2209 mov(eax, edi);
2210 jmp(&leave_exit_frame);
2211}
2212
2213
2214void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2215 // Set the entry point and jump to the C entry runtime stub.
2216 mov(ebx, Immediate(ext));
2217 CEntryStub ces(isolate(), 1);
2218 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2219}
2220
2221
2222void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2223 const ParameterCount& actual,
2224 Handle<Code> code_constant,
2225 const Operand& code_operand,
2226 Label* done,
2227 bool* definitely_mismatches,
2228 InvokeFlag flag,
2229 Label::Distance done_near,
2230 const CallWrapper& call_wrapper) {
2231 bool definitely_matches = false;
2232 *definitely_mismatches = false;
2233 Label invoke;
2234 if (expected.is_immediate()) {
2235 DCHECK(actual.is_immediate());
2236 if (expected.immediate() == actual.immediate()) {
2237 definitely_matches = true;
2238 } else {
2239 mov(eax, actual.immediate());
2240 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2241 if (expected.immediate() == sentinel) {
2242 // Don't worry about adapting arguments for builtins that
2243 // don't want that done. Skip adaption code by making it look
2244 // like we have a match between expected and actual number of
2245 // arguments.
2246 definitely_matches = true;
2247 } else {
2248 *definitely_mismatches = true;
2249 mov(ebx, expected.immediate());
2250 }
2251 }
2252 } else {
2253 if (actual.is_immediate()) {
2254 // Expected is in register, actual is immediate. This is the
2255 // case when we invoke function values without going through the
2256 // IC mechanism.
2257 cmp(expected.reg(), actual.immediate());
2258 j(equal, &invoke);
2259 DCHECK(expected.reg().is(ebx));
2260 mov(eax, actual.immediate());
2261 } else if (!expected.reg().is(actual.reg())) {
2262 // Both expected and actual are in (different) registers. This
2263 // is the case when we invoke functions using call and apply.
2264 cmp(expected.reg(), actual.reg());
2265 j(equal, &invoke);
2266 DCHECK(actual.reg().is(eax));
2267 DCHECK(expected.reg().is(ebx));
2268 }
2269 }
2270
2271 if (!definitely_matches) {
2272 Handle<Code> adaptor =
2273 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2274 if (!code_constant.is_null()) {
2275 mov(edx, Immediate(code_constant));
2276 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2277 } else if (!code_operand.is_reg(edx)) {
2278 mov(edx, code_operand);
2279 }
2280
2281 if (flag == CALL_FUNCTION) {
2282 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2283 call(adaptor, RelocInfo::CODE_TARGET);
2284 call_wrapper.AfterCall();
2285 if (!*definitely_mismatches) {
2286 jmp(done, done_near);
2287 }
2288 } else {
2289 jmp(adaptor, RelocInfo::CODE_TARGET);
2290 }
2291 bind(&invoke);
2292 }
2293}
2294
2295
2296void MacroAssembler::InvokeCode(const Operand& code,
2297 const ParameterCount& expected,
2298 const ParameterCount& actual,
2299 InvokeFlag flag,
2300 const CallWrapper& call_wrapper) {
2301 // You can't call a function without a valid frame.
2302 DCHECK(flag == JUMP_FUNCTION || has_frame());
2303
2304 Label done;
2305 bool definitely_mismatches = false;
2306 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2307 &done, &definitely_mismatches, flag, Label::kNear,
2308 call_wrapper);
2309 if (!definitely_mismatches) {
2310 if (flag == CALL_FUNCTION) {
2311 call_wrapper.BeforeCall(CallSize(code));
2312 call(code);
2313 call_wrapper.AfterCall();
2314 } else {
2315 DCHECK(flag == JUMP_FUNCTION);
2316 jmp(code);
2317 }
2318 bind(&done);
2319 }
2320}
2321
2322
2323void MacroAssembler::InvokeFunction(Register fun,
2324 const ParameterCount& actual,
2325 InvokeFlag flag,
2326 const CallWrapper& call_wrapper) {
2327 // You can't call a function without a valid frame.
2328 DCHECK(flag == JUMP_FUNCTION || has_frame());
2329
2330 DCHECK(fun.is(edi));
2331 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2332 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2333 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2334 SmiUntag(ebx);
2335
2336 ParameterCount expected(ebx);
2337 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2338 expected, actual, flag, call_wrapper);
2339}
2340
2341
2342void MacroAssembler::InvokeFunction(Register fun,
2343 const ParameterCount& expected,
2344 const ParameterCount& actual,
2345 InvokeFlag flag,
2346 const CallWrapper& call_wrapper) {
2347 // You can't call a function without a valid frame.
2348 DCHECK(flag == JUMP_FUNCTION || has_frame());
2349
2350 DCHECK(fun.is(edi));
2351 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2352
2353 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2354 expected, actual, flag, call_wrapper);
2355}
2356
2357
2358void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2359 const ParameterCount& expected,
2360 const ParameterCount& actual,
2361 InvokeFlag flag,
2362 const CallWrapper& call_wrapper) {
2363 LoadHeapObject(edi, function);
2364 InvokeFunction(edi, expected, actual, flag, call_wrapper);
2365}
2366
2367
2368void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2369 InvokeFlag flag,
2370 const CallWrapper& call_wrapper) {
2371 // You can't call a builtin without a valid frame.
2372 DCHECK(flag == JUMP_FUNCTION || has_frame());
2373
2374 // Rely on the assertion to check that the number of provided
2375 // arguments match the expected number of arguments. Fake a
2376 // parameter count to avoid emitting code to do the check.
2377 ParameterCount expected(0);
2378 GetBuiltinFunction(edi, id);
2379 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2380 expected, expected, flag, call_wrapper);
2381}
2382
2383
2384void MacroAssembler::GetBuiltinFunction(Register target,
2385 Builtins::JavaScript id) {
2386 // Load the JavaScript builtin function from the builtins object.
2387 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2388 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2389 mov(target, FieldOperand(target,
2390 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2391}
2392
2393
2394void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2395 DCHECK(!target.is(edi));
2396 // Load the JavaScript builtin function from the builtins object.
2397 GetBuiltinFunction(edi, id);
2398 // Load the code entry point from the function into the target register.
2399 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2400}
2401
2402
2403void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2404 if (context_chain_length > 0) {
2405 // Move up the chain of contexts to the context containing the slot.
2406 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2407 for (int i = 1; i < context_chain_length; i++) {
2408 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2409 }
2410 } else {
2411 // Slot is in the current function context. Move it into the
2412 // destination register in case we store into it (the write barrier
2413 // cannot be allowed to destroy the context in esi).
2414 mov(dst, esi);
2415 }
2416
2417 // We should not have found a with context by walking the context chain
2418 // (i.e., the static scope chain and runtime context chain do not agree).
2419 // A variable occurring in such a scope should have slot type LOOKUP and
2420 // not CONTEXT.
2421 if (emit_debug_code()) {
2422 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2423 isolate()->factory()->with_context_map());
2424 Check(not_equal, kVariableResolvedToWithContext);
2425 }
2426}
2427
2428
2429void MacroAssembler::LoadTransitionedArrayMapConditional(
2430 ElementsKind expected_kind,
2431 ElementsKind transitioned_kind,
2432 Register map_in_out,
2433 Register scratch,
2434 Label* no_map_match) {
2435 // Load the global or builtins object from the current context.
2436 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2437 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2438
2439 // Check that the function's map is the same as the expected cached map.
2440 mov(scratch, Operand(scratch,
2441 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2442
2443 size_t offset = expected_kind * kPointerSize +
2444 FixedArrayBase::kHeaderSize;
2445 cmp(map_in_out, FieldOperand(scratch, offset));
2446 j(not_equal, no_map_match);
2447
2448 // Use the transitioned cached map.
2449 offset = transitioned_kind * kPointerSize +
2450 FixedArrayBase::kHeaderSize;
2451 mov(map_in_out, FieldOperand(scratch, offset));
2452}
2453
2454
2455void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2456 // Load the global or builtins object from the current context.
2457 mov(function,
2458 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2459 // Load the native context from the global or builtins object.
2460 mov(function,
2461 FieldOperand(function, GlobalObject::kNativeContextOffset));
2462 // Load the function from the native context.
2463 mov(function, Operand(function, Context::SlotOffset(index)));
2464}
2465
2466
2467void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2468 Register map) {
2469 // Load the initial map. The global functions all have initial maps.
2470 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2471 if (emit_debug_code()) {
2472 Label ok, fail;
2473 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2474 jmp(&ok);
2475 bind(&fail);
2476 Abort(kGlobalFunctionsMustHaveInitialMap);
2477 bind(&ok);
2478 }
2479}
2480
2481
2482// Store the value in register src in the safepoint register stack
2483// slot for register dst.
2484void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2485 mov(SafepointRegisterSlot(dst), src);
2486}
2487
2488
2489void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2490 mov(SafepointRegisterSlot(dst), src);
2491}
2492
2493
2494void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2495 mov(dst, SafepointRegisterSlot(src));
2496}
2497
2498
2499Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2500 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2501}
2502
2503
2504int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2505 // The registers are pushed starting with the lowest encoding,
2506 // which means that lowest encodings are furthest away from
2507 // the stack pointer.
2508 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2509 return kNumSafepointRegisters - reg_code - 1;
2510}
2511
2512
2513void MacroAssembler::LoadHeapObject(Register result,
2514 Handle<HeapObject> object) {
2515 AllowDeferredHandleDereference embedding_raw_address;
2516 if (isolate()->heap()->InNewSpace(*object)) {
2517 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2518 mov(result, Operand::ForCell(cell));
2519 } else {
2520 mov(result, object);
2521 }
2522}
2523
2524
2525void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2526 AllowDeferredHandleDereference using_raw_address;
2527 if (isolate()->heap()->InNewSpace(*object)) {
2528 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2529 cmp(reg, Operand::ForCell(cell));
2530 } else {
2531 cmp(reg, object);
2532 }
2533}
2534
2535
2536void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2537 AllowDeferredHandleDereference using_raw_address;
2538 if (isolate()->heap()->InNewSpace(*object)) {
2539 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2540 push(Operand::ForCell(cell));
2541 } else {
2542 Push(object);
2543 }
2544}
2545
2546
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002547void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2548 Register scratch) {
2549 mov(scratch, cell);
2550 cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2551}
2552
2553
2554void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2555 Label* miss) {
2556 mov(value, cell);
2557 mov(value, FieldOperand(value, WeakCell::kValueOffset));
2558 JumpIfSmi(value, miss);
2559}
2560
2561
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002562void MacroAssembler::Ret() {
2563 ret(0);
2564}
2565
2566
2567void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2568 if (is_uint16(bytes_dropped)) {
2569 ret(bytes_dropped);
2570 } else {
2571 pop(scratch);
2572 add(esp, Immediate(bytes_dropped));
2573 push(scratch);
2574 ret(0);
2575 }
2576}
2577
2578
2579void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2580 // Turn off the stack depth check when serializer is enabled to reduce the
2581 // code size.
2582 if (serializer_enabled()) return;
2583 // Make sure the floating point stack is either empty or has depth items.
2584 DCHECK(depth <= 7);
2585 // This is very expensive.
2586 DCHECK(FLAG_debug_code && FLAG_enable_slow_asserts);
2587
2588 // The top-of-stack (tos) is 7 if there is one item pushed.
2589 int tos = (8 - depth) % 8;
2590 const int kTopMask = 0x3800;
2591 push(eax);
2592 fwait();
2593 fnstsw_ax();
2594 and_(eax, kTopMask);
2595 shr(eax, 11);
2596 cmp(eax, Immediate(tos));
2597 Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
2598 fnclex();
2599 pop(eax);
2600}
2601
2602
2603void MacroAssembler::Drop(int stack_elements) {
2604 if (stack_elements > 0) {
2605 add(esp, Immediate(stack_elements * kPointerSize));
2606 }
2607}
2608
2609
2610void MacroAssembler::Move(Register dst, Register src) {
2611 if (!dst.is(src)) {
2612 mov(dst, src);
2613 }
2614}
2615
2616
2617void MacroAssembler::Move(Register dst, const Immediate& x) {
2618 if (x.is_zero()) {
2619 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2620 } else {
2621 mov(dst, x);
2622 }
2623}
2624
2625
2626void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2627 mov(dst, x);
2628}
2629
2630
2631void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2632 if (FLAG_native_code_counters && counter->Enabled()) {
2633 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2634 }
2635}
2636
2637
2638void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2639 DCHECK(value > 0);
2640 if (FLAG_native_code_counters && counter->Enabled()) {
2641 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2642 if (value == 1) {
2643 inc(operand);
2644 } else {
2645 add(operand, Immediate(value));
2646 }
2647 }
2648}
2649
2650
2651void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2652 DCHECK(value > 0);
2653 if (FLAG_native_code_counters && counter->Enabled()) {
2654 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2655 if (value == 1) {
2656 dec(operand);
2657 } else {
2658 sub(operand, Immediate(value));
2659 }
2660 }
2661}
2662
2663
2664void MacroAssembler::IncrementCounter(Condition cc,
2665 StatsCounter* counter,
2666 int value) {
2667 DCHECK(value > 0);
2668 if (FLAG_native_code_counters && counter->Enabled()) {
2669 Label skip;
2670 j(NegateCondition(cc), &skip);
2671 pushfd();
2672 IncrementCounter(counter, value);
2673 popfd();
2674 bind(&skip);
2675 }
2676}
2677
2678
2679void MacroAssembler::DecrementCounter(Condition cc,
2680 StatsCounter* counter,
2681 int value) {
2682 DCHECK(value > 0);
2683 if (FLAG_native_code_counters && counter->Enabled()) {
2684 Label skip;
2685 j(NegateCondition(cc), &skip);
2686 pushfd();
2687 DecrementCounter(counter, value);
2688 popfd();
2689 bind(&skip);
2690 }
2691}
2692
2693
2694void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2695 if (emit_debug_code()) Check(cc, reason);
2696}
2697
2698
2699void MacroAssembler::AssertFastElements(Register elements) {
2700 if (emit_debug_code()) {
2701 Factory* factory = isolate()->factory();
2702 Label ok;
2703 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2704 Immediate(factory->fixed_array_map()));
2705 j(equal, &ok);
2706 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2707 Immediate(factory->fixed_double_array_map()));
2708 j(equal, &ok);
2709 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2710 Immediate(factory->fixed_cow_array_map()));
2711 j(equal, &ok);
2712 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2713 bind(&ok);
2714 }
2715}
2716
2717
2718void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2719 Label L;
2720 j(cc, &L);
2721 Abort(reason);
2722 // will not return here
2723 bind(&L);
2724}
2725
2726
2727void MacroAssembler::CheckStackAlignment() {
2728 int frame_alignment = base::OS::ActivationFrameAlignment();
2729 int frame_alignment_mask = frame_alignment - 1;
2730 if (frame_alignment > kPointerSize) {
2731 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2732 Label alignment_as_expected;
2733 test(esp, Immediate(frame_alignment_mask));
2734 j(zero, &alignment_as_expected);
2735 // Abort if stack is not aligned.
2736 int3();
2737 bind(&alignment_as_expected);
2738 }
2739}
2740
2741
2742void MacroAssembler::Abort(BailoutReason reason) {
2743#ifdef DEBUG
2744 const char* msg = GetBailoutReason(reason);
2745 if (msg != NULL) {
2746 RecordComment("Abort message: ");
2747 RecordComment(msg);
2748 }
2749
2750 if (FLAG_trap_on_abort) {
2751 int3();
2752 return;
2753 }
2754#endif
2755
2756 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2757 // Disable stub call restrictions to always allow calls to abort.
2758 if (!has_frame_) {
2759 // We don't actually want to generate a pile of code for this, so just
2760 // claim there is a stack frame, without generating one.
2761 FrameScope scope(this, StackFrame::NONE);
2762 CallRuntime(Runtime::kAbort, 1);
2763 } else {
2764 CallRuntime(Runtime::kAbort, 1);
2765 }
2766 // will not return here
2767 int3();
2768}
2769
2770
2771void MacroAssembler::LoadInstanceDescriptors(Register map,
2772 Register descriptors) {
2773 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2774}
2775
2776
2777void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2778 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2779 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2780}
2781
2782
2783void MacroAssembler::LookupNumberStringCache(Register object,
2784 Register result,
2785 Register scratch1,
2786 Register scratch2,
2787 Label* not_found) {
2788 // Use of registers. Register result is used as a temporary.
2789 Register number_string_cache = result;
2790 Register mask = scratch1;
2791 Register scratch = scratch2;
2792
2793 // Load the number string cache.
2794 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2795 // Make the hash mask from the length of the number string cache. It
2796 // contains two elements (number and string) for each cache entry.
2797 mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2798 shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
2799 sub(mask, Immediate(1)); // Make mask.
2800
2801 // Calculate the entry in the number string cache. The hash value in the
2802 // number string cache for smis is just the smi value, and the hash for
2803 // doubles is the xor of the upper and lower words. See
2804 // Heap::GetNumberStringCache.
2805 Label smi_hash_calculated;
2806 Label load_result_from_cache;
2807 Label not_smi;
2808 STATIC_ASSERT(kSmiTag == 0);
2809 JumpIfNotSmi(object, &not_smi, Label::kNear);
2810 mov(scratch, object);
2811 SmiUntag(scratch);
2812 jmp(&smi_hash_calculated, Label::kNear);
2813 bind(&not_smi);
2814 cmp(FieldOperand(object, HeapObject::kMapOffset),
2815 isolate()->factory()->heap_number_map());
2816 j(not_equal, not_found);
2817 STATIC_ASSERT(8 == kDoubleSize);
2818 mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2819 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2820 // Object is heap number and hash is now in scratch. Calculate cache index.
2821 and_(scratch, mask);
2822 Register index = scratch;
2823 Register probe = mask;
2824 mov(probe,
2825 FieldOperand(number_string_cache,
2826 index,
2827 times_twice_pointer_size,
2828 FixedArray::kHeaderSize));
2829 JumpIfSmi(probe, not_found);
2830 fld_d(FieldOperand(object, HeapNumber::kValueOffset));
2831 fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
2832 FCmp();
2833 j(parity_even, not_found); // Bail out if NaN is involved.
2834 j(not_equal, not_found); // The cache did not contain this value.
2835 jmp(&load_result_from_cache, Label::kNear);
2836
2837 bind(&smi_hash_calculated);
2838 // Object is smi and hash is now in scratch. Calculate cache index.
2839 and_(scratch, mask);
2840 // Check if the entry is the smi we are looking for.
2841 cmp(object,
2842 FieldOperand(number_string_cache,
2843 index,
2844 times_twice_pointer_size,
2845 FixedArray::kHeaderSize));
2846 j(not_equal, not_found);
2847
2848 // Get the result from the cache.
2849 bind(&load_result_from_cache);
2850 mov(result,
2851 FieldOperand(number_string_cache,
2852 index,
2853 times_twice_pointer_size,
2854 FixedArray::kHeaderSize + kPointerSize));
2855 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2856}
2857
2858
2859void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2860 Register instance_type, Register scratch, Label* failure) {
2861 if (!scratch.is(instance_type)) {
2862 mov(scratch, instance_type);
2863 }
2864 and_(scratch,
2865 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2866 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2867 j(not_equal, failure);
2868}
2869
2870
2871void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2872 Register object2,
2873 Register scratch1,
2874 Register scratch2,
2875 Label* failure) {
2876 // Check that both objects are not smis.
2877 STATIC_ASSERT(kSmiTag == 0);
2878 mov(scratch1, object1);
2879 and_(scratch1, object2);
2880 JumpIfSmi(scratch1, failure);
2881
2882 // Load instance type for both strings.
2883 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2884 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2885 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2886 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2887
2888 // Check that both are flat one-byte strings.
2889 const int kFlatOneByteStringMask =
2890 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2891 const int kFlatOneByteStringTag =
2892 kStringTag | kOneByteStringTag | kSeqStringTag;
2893 // Interleave bits from both instance types and compare them in one check.
2894 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2895 and_(scratch1, kFlatOneByteStringMask);
2896 and_(scratch2, kFlatOneByteStringMask);
2897 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2898 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2899 j(not_equal, failure);
2900}
2901
2902
2903void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2904 Label* not_unique_name,
2905 Label::Distance distance) {
2906 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2907 Label succeed;
2908 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2909 j(zero, &succeed);
2910 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2911 j(not_equal, not_unique_name, distance);
2912
2913 bind(&succeed);
2914}
2915
2916
2917void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2918 Register index,
2919 Register value,
2920 uint32_t encoding_mask) {
2921 Label is_object;
2922 JumpIfNotSmi(string, &is_object, Label::kNear);
2923 Abort(kNonObject);
2924 bind(&is_object);
2925
2926 push(value);
2927 mov(value, FieldOperand(string, HeapObject::kMapOffset));
2928 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2929
2930 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2931 cmp(value, Immediate(encoding_mask));
2932 pop(value);
2933 Check(equal, kUnexpectedStringType);
2934
2935 // The index is assumed to be untagged coming in, tag it to compare with the
2936 // string length without using a temp register, it is restored at the end of
2937 // this function.
2938 SmiTag(index);
2939 Check(no_overflow, kIndexIsTooLarge);
2940
2941 cmp(index, FieldOperand(string, String::kLengthOffset));
2942 Check(less, kIndexIsTooLarge);
2943
2944 cmp(index, Immediate(Smi::FromInt(0)));
2945 Check(greater_equal, kIndexIsNegative);
2946
2947 // Restore the index
2948 SmiUntag(index);
2949}
2950
2951
2952void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2953 int frame_alignment = base::OS::ActivationFrameAlignment();
2954 if (frame_alignment != 0) {
2955 // Make stack end at alignment and make room for num_arguments words
2956 // and the original value of esp.
2957 mov(scratch, esp);
2958 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2959 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2960 and_(esp, -frame_alignment);
2961 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2962 } else {
2963 sub(esp, Immediate(num_arguments * kPointerSize));
2964 }
2965}
2966
2967
2968void MacroAssembler::CallCFunction(ExternalReference function,
2969 int num_arguments) {
2970 // Trashing eax is ok as it will be the return value.
2971 mov(eax, Immediate(function));
2972 CallCFunction(eax, num_arguments);
2973}
2974
2975
2976void MacroAssembler::CallCFunction(Register function,
2977 int num_arguments) {
2978 DCHECK(has_frame());
2979 // Check stack alignment.
2980 if (emit_debug_code()) {
2981 CheckStackAlignment();
2982 }
2983
2984 call(function);
2985 if (base::OS::ActivationFrameAlignment() != 0) {
2986 mov(esp, Operand(esp, num_arguments * kPointerSize));
2987 } else {
2988 add(esp, Immediate(num_arguments * kPointerSize));
2989 }
2990}
2991
2992
2993#ifdef DEBUG
2994bool AreAliased(Register reg1,
2995 Register reg2,
2996 Register reg3,
2997 Register reg4,
2998 Register reg5,
2999 Register reg6,
3000 Register reg7,
3001 Register reg8) {
3002 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3003 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3004 reg7.is_valid() + reg8.is_valid();
3005
3006 RegList regs = 0;
3007 if (reg1.is_valid()) regs |= reg1.bit();
3008 if (reg2.is_valid()) regs |= reg2.bit();
3009 if (reg3.is_valid()) regs |= reg3.bit();
3010 if (reg4.is_valid()) regs |= reg4.bit();
3011 if (reg5.is_valid()) regs |= reg5.bit();
3012 if (reg6.is_valid()) regs |= reg6.bit();
3013 if (reg7.is_valid()) regs |= reg7.bit();
3014 if (reg8.is_valid()) regs |= reg8.bit();
3015 int n_of_non_aliasing_regs = NumRegs(regs);
3016
3017 return n_of_valid_regs != n_of_non_aliasing_regs;
3018}
3019#endif
3020
3021
3022CodePatcher::CodePatcher(byte* address, int size)
3023 : address_(address),
3024 size_(size),
3025 masm_(NULL, address, size + Assembler::kGap) {
3026 // Create a new macro assembler pointing to the address of the code to patch.
3027 // The size is adjusted with kGap on order for the assembler to generate size
3028 // bytes of instructions without failing with buffer size constraints.
3029 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3030}
3031
3032
3033CodePatcher::~CodePatcher() {
3034 // Indicate that code has changed.
3035 CpuFeatures::FlushICache(address_, size_);
3036
3037 // Check that the code was patched as expected.
3038 DCHECK(masm_.pc_ == address_ + size_);
3039 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3040}
3041
3042
3043void MacroAssembler::CheckPageFlag(
3044 Register object,
3045 Register scratch,
3046 int mask,
3047 Condition cc,
3048 Label* condition_met,
3049 Label::Distance condition_met_distance) {
3050 DCHECK(cc == zero || cc == not_zero);
3051 if (scratch.is(object)) {
3052 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3053 } else {
3054 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3055 and_(scratch, object);
3056 }
3057 if (mask < (1 << kBitsPerByte)) {
3058 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3059 static_cast<uint8_t>(mask));
3060 } else {
3061 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3062 }
3063 j(cc, condition_met, condition_met_distance);
3064}
3065
3066
3067void MacroAssembler::CheckPageFlagForMap(
3068 Handle<Map> map,
3069 int mask,
3070 Condition cc,
3071 Label* condition_met,
3072 Label::Distance condition_met_distance) {
3073 DCHECK(cc == zero || cc == not_zero);
3074 Page* page = Page::FromAddress(map->address());
3075 DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
3076 ExternalReference reference(ExternalReference::page_flags(page));
3077 // The inlined static address check of the page's flags relies
3078 // on maps never being compacted.
3079 DCHECK(!isolate()->heap()->mark_compact_collector()->
3080 IsOnEvacuationCandidate(*map));
3081 if (mask < (1 << kBitsPerByte)) {
3082 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3083 } else {
3084 test(Operand::StaticVariable(reference), Immediate(mask));
3085 }
3086 j(cc, condition_met, condition_met_distance);
3087}
3088
3089
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003090void MacroAssembler::JumpIfBlack(Register object,
3091 Register scratch0,
3092 Register scratch1,
3093 Label* on_black,
3094 Label::Distance on_black_near) {
3095 HasColor(object, scratch0, scratch1,
3096 on_black, on_black_near,
3097 1, 0); // kBlackBitPattern.
3098 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3099}
3100
3101
3102void MacroAssembler::HasColor(Register object,
3103 Register bitmap_scratch,
3104 Register mask_scratch,
3105 Label* has_color,
3106 Label::Distance has_color_distance,
3107 int first_bit,
3108 int second_bit) {
3109 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3110
3111 GetMarkBits(object, bitmap_scratch, mask_scratch);
3112
3113 Label other_color, word_boundary;
3114 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3115 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3116 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
3117 j(zero, &word_boundary, Label::kNear);
3118 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3119 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3120 jmp(&other_color, Label::kNear);
3121
3122 bind(&word_boundary);
3123 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3124
3125 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3126 bind(&other_color);
3127}
3128
3129
3130void MacroAssembler::GetMarkBits(Register addr_reg,
3131 Register bitmap_reg,
3132 Register mask_reg) {
3133 DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3134 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3135 and_(bitmap_reg, addr_reg);
3136 mov(ecx, addr_reg);
3137 int shift =
3138 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3139 shr(ecx, shift);
3140 and_(ecx,
3141 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3142
3143 add(bitmap_reg, ecx);
3144 mov(ecx, addr_reg);
3145 shr(ecx, kPointerSizeLog2);
3146 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3147 mov(mask_reg, Immediate(1));
3148 shl_cl(mask_reg);
3149}
3150
3151
3152void MacroAssembler::EnsureNotWhite(
3153 Register value,
3154 Register bitmap_scratch,
3155 Register mask_scratch,
3156 Label* value_is_white_and_not_data,
3157 Label::Distance distance) {
3158 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3159 GetMarkBits(value, bitmap_scratch, mask_scratch);
3160
3161 // If the value is black or grey we don't need to do anything.
3162 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3163 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3164 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
3165 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3166
3167 Label done;
3168
3169 // Since both black and grey have a 1 in the first position and white does
3170 // not have a 1 there we only need to check one bit.
3171 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3172 j(not_zero, &done, Label::kNear);
3173
3174 if (emit_debug_code()) {
3175 // Check for impossible bit pattern.
3176 Label ok;
3177 push(mask_scratch);
3178 // shl. May overflow making the check conservative.
3179 add(mask_scratch, mask_scratch);
3180 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3181 j(zero, &ok, Label::kNear);
3182 int3();
3183 bind(&ok);
3184 pop(mask_scratch);
3185 }
3186
3187 // Value is white. We check whether it is data that doesn't need scanning.
3188 // Currently only checks for HeapNumber and non-cons strings.
3189 Register map = ecx; // Holds map while checking type.
3190 Register length = ecx; // Holds length of object after checking type.
3191 Label not_heap_number;
3192 Label is_data_object;
3193
3194 // Check for heap-number
3195 mov(map, FieldOperand(value, HeapObject::kMapOffset));
3196 cmp(map, isolate()->factory()->heap_number_map());
3197 j(not_equal, &not_heap_number, Label::kNear);
3198 mov(length, Immediate(HeapNumber::kSize));
3199 jmp(&is_data_object, Label::kNear);
3200
3201 bind(&not_heap_number);
3202 // Check for strings.
3203 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3204 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3205 // If it's a string and it's not a cons string then it's an object containing
3206 // no GC pointers.
3207 Register instance_type = ecx;
3208 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3209 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3210 j(not_zero, value_is_white_and_not_data);
3211 // It's a non-indirect (non-cons and non-slice) string.
3212 // If it's external, the length is just ExternalString::kSize.
3213 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3214 Label not_external;
3215 // External strings are the only ones with the kExternalStringTag bit
3216 // set.
3217 DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
3218 DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
3219 test_b(instance_type, kExternalStringTag);
3220 j(zero, &not_external, Label::kNear);
3221 mov(length, Immediate(ExternalString::kSize));
3222 jmp(&is_data_object, Label::kNear);
3223
3224 bind(&not_external);
3225 // Sequential string, either Latin1 or UC16.
3226 DCHECK(kOneByteStringTag == 0x04);
3227 and_(length, Immediate(kStringEncodingMask));
3228 xor_(length, Immediate(kStringEncodingMask));
3229 add(length, Immediate(0x04));
3230 // Value now either 4 (if Latin1) or 8 (if UC16), i.e., char-size shifted
3231 // by 2. If we multiply the string length as smi by this, it still
3232 // won't overflow a 32-bit value.
3233 DCHECK_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3234 DCHECK(SeqOneByteString::kMaxSize <=
3235 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3236 imul(length, FieldOperand(value, String::kLengthOffset));
3237 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3238 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3239 and_(length, Immediate(~kObjectAlignmentMask));
3240
3241 bind(&is_data_object);
3242 // Value is a data object, and it is white. Mark it black. Since we know
3243 // that the object is white we can make it black by flipping one bit.
3244 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3245
3246 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3247 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3248 length);
3249 if (emit_debug_code()) {
3250 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3251 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3252 Check(less_equal, kLiveBytesCountOverflowChunkSize);
3253 }
3254
3255 bind(&done);
3256}
3257
3258
3259void MacroAssembler::EnumLength(Register dst, Register map) {
3260 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3261 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3262 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3263 SmiTag(dst);
3264}
3265
3266
3267void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3268 Label next, start;
3269 mov(ecx, eax);
3270
3271 // Check if the enum length field is properly initialized, indicating that
3272 // there is an enum cache.
3273 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3274
3275 EnumLength(edx, ebx);
3276 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3277 j(equal, call_runtime);
3278
3279 jmp(&start);
3280
3281 bind(&next);
3282 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3283
3284 // For all objects but the receiver, check that the cache is empty.
3285 EnumLength(edx, ebx);
3286 cmp(edx, Immediate(Smi::FromInt(0)));
3287 j(not_equal, call_runtime);
3288
3289 bind(&start);
3290
3291 // Check that there are no elements. Register rcx contains the current JS
3292 // object we've reached through the prototype chain.
3293 Label no_elements;
3294 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3295 cmp(ecx, isolate()->factory()->empty_fixed_array());
3296 j(equal, &no_elements);
3297
3298 // Second chance, the object may be using the empty slow element dictionary.
3299 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3300 j(not_equal, call_runtime);
3301
3302 bind(&no_elements);
3303 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3304 cmp(ecx, isolate()->factory()->null_value());
3305 j(not_equal, &next);
3306}
3307
3308
3309void MacroAssembler::TestJSArrayForAllocationMemento(
3310 Register receiver_reg,
3311 Register scratch_reg,
3312 Label* no_memento_found) {
3313 ExternalReference new_space_start =
3314 ExternalReference::new_space_start(isolate());
3315 ExternalReference new_space_allocation_top =
3316 ExternalReference::new_space_allocation_top_address(isolate());
3317
3318 lea(scratch_reg, Operand(receiver_reg,
3319 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3320 cmp(scratch_reg, Immediate(new_space_start));
3321 j(less, no_memento_found);
3322 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3323 j(greater, no_memento_found);
3324 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3325 Immediate(isolate()->factory()->allocation_memento_map()));
3326}
3327
3328
3329void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3330 Register object,
3331 Register scratch0,
3332 Register scratch1,
3333 Label* found) {
3334 DCHECK(!scratch1.is(scratch0));
3335 Factory* factory = isolate()->factory();
3336 Register current = scratch0;
3337 Label loop_again;
3338
3339 // scratch contained elements pointer.
3340 mov(current, object);
3341
3342 // Loop based on the map going up the prototype chain.
3343 bind(&loop_again);
3344 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3345 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3346 DecodeField<Map::ElementsKindBits>(scratch1);
3347 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3348 j(equal, found);
3349 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3350 cmp(current, Immediate(factory->null_value()));
3351 j(not_equal, &loop_again);
3352}
3353
3354
3355void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3356 DCHECK(!dividend.is(eax));
3357 DCHECK(!dividend.is(edx));
3358 base::MagicNumbersForDivision<uint32_t> mag =
3359 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3360 mov(eax, Immediate(mag.multiplier));
3361 imul(dividend);
3362 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3363 if (divisor > 0 && neg) add(edx, dividend);
3364 if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3365 if (mag.shift > 0) sar(edx, mag.shift);
3366 mov(eax, dividend);
3367 shr(eax, 31);
3368 add(edx, eax);
3369}
3370
3371
3372} } // namespace v8::internal
3373
3374#endif // V8_TARGET_ARCH_X87