blob: 5f80b4d52f15dde6268c568abace15302ef96e81 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/base/division-by-constant.h"
9#include "src/bootstrapper.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/debug/debug.h"
12#include "src/ia32/frames-ia32.h"
13#include "src/ia32/macro-assembler-ia32.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000015
16namespace v8 {
17namespace internal {
18
19// -------------------------------------------------------------------------
20// MacroAssembler implementation.
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
35 DCHECK(!r.IsDouble());
36 if (r.IsInteger8()) {
37 movsx_b(dst, src);
38 } else if (r.IsUInteger8()) {
39 movzx_b(dst, src);
40 } else if (r.IsInteger16()) {
41 movsx_w(dst, src);
42 } else if (r.IsUInteger16()) {
43 movzx_w(dst, src);
44 } else {
45 mov(dst, src);
46 }
47}
48
49
50void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
51 DCHECK(!r.IsDouble());
52 if (r.IsInteger8() || r.IsUInteger8()) {
53 mov_b(dst, src);
54 } else if (r.IsInteger16() || r.IsUInteger16()) {
55 mov_w(dst, src);
56 } else {
57 if (r.IsHeapObject()) {
58 AssertNotSmi(src);
59 } else if (r.IsSmi()) {
60 AssertSmi(src);
61 }
62 mov(dst, src);
63 }
64}
65
66
67void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
68 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 mov(destination, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 return;
71 }
72 ExternalReference roots_array_start =
73 ExternalReference::roots_array_start(isolate());
74 mov(destination, Immediate(index));
75 mov(destination, Operand::StaticArray(destination,
76 times_pointer_size,
77 roots_array_start));
78}
79
80
81void MacroAssembler::StoreRoot(Register source,
82 Register scratch,
83 Heap::RootListIndex index) {
84 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
85 ExternalReference roots_array_start =
86 ExternalReference::roots_array_start(isolate());
87 mov(scratch, Immediate(index));
88 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
89 source);
90}
91
92
93void MacroAssembler::CompareRoot(Register with,
94 Register scratch,
95 Heap::RootListIndex index) {
96 ExternalReference roots_array_start =
97 ExternalReference::roots_array_start(isolate());
98 mov(scratch, Immediate(index));
99 cmp(with, Operand::StaticArray(scratch,
100 times_pointer_size,
101 roots_array_start));
102}
103
104
105void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
106 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000107 cmp(with, isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108}
109
110
111void MacroAssembler::CompareRoot(const Operand& with,
112 Heap::RootListIndex index) {
113 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 cmp(with, isolate()->heap()->root_handle(index));
115}
116
117
118void MacroAssembler::PushRoot(Heap::RootListIndex index) {
119 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
120 Push(isolate()->heap()->root_handle(index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121}
122
123
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100124void MacroAssembler::InNewSpace(
125 Register object,
126 Register scratch,
127 Condition cc,
128 Label* condition_met,
129 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000130 DCHECK(cc == equal || cc == not_equal);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100131 if (scratch.is(object)) {
132 and_(scratch, Immediate(~Page::kPageAlignmentMask));
133 } else {
134 mov(scratch, Immediate(~Page::kPageAlignmentMask));
135 and_(scratch, object);
Steve Block6ded16b2010-05-10 14:33:55 +0100136 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100137 // Check that we can use a test_b.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138 DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139 DCHECK(MemoryChunk::IN_TO_SPACE < 8);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100140 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141 | (1 << MemoryChunk::IN_TO_SPACE);
142 // If non-zero, the page belongs to new-space.
143 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144 static_cast<uint8_t>(mask));
145 j(cc, condition_met, condition_met_distance);
146}
Steve Block6ded16b2010-05-10 14:33:55 +0100147
Steve Blocka7e24c12009-10-30 11:49:00 +0000148
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100149void MacroAssembler::RememberedSetHelper(
150 Register object, // Only used for debug checks.
151 Register addr,
152 Register scratch,
153 SaveFPRegsMode save_fp,
154 MacroAssembler::RememberedSetFinalAction and_then) {
155 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000156 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100157 Label ok;
158 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
159 int3();
160 bind(&ok);
161 }
162 // Load store buffer top.
163 ExternalReference store_buffer =
164 ExternalReference::store_buffer_top(isolate());
165 mov(scratch, Operand::StaticVariable(store_buffer));
166 // Store pointer to buffer.
167 mov(Operand(scratch, 0), addr);
168 // Increment buffer top.
169 add(scratch, Immediate(kPointerSize));
170 // Write back new top of buffer.
171 mov(Operand::StaticVariable(store_buffer), scratch);
172 // Call stub on end of buffer.
173 // Check for end of buffer.
174 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
175 if (and_then == kReturnAtEnd) {
176 Label buffer_overflowed;
177 j(not_equal, &buffer_overflowed, Label::kNear);
178 ret(0);
179 bind(&buffer_overflowed);
180 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100182 j(equal, &done, Label::kNear);
183 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100185 CallStub(&store_buffer_overflow);
186 if (and_then == kReturnAtEnd) {
187 ret(0);
188 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 DCHECK(and_then == kFallThroughAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100190 bind(&done);
191 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000192}
193
194
195void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
196 XMMRegister scratch_reg,
197 Register result_reg) {
198 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 Label conv_failure;
200 xorps(scratch_reg, scratch_reg);
201 cvtsd2si(result_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +0000202 test(result_reg, Immediate(0xFFFFFF00));
203 j(zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 cmp(result_reg, Immediate(0x1));
205 j(overflow, &conv_failure, Label::kNear);
206 mov(result_reg, Immediate(0));
207 setcc(sign, result_reg);
208 sub(result_reg, Immediate(1));
209 and_(result_reg, Immediate(255));
210 jmp(&done, Label::kNear);
211 bind(&conv_failure);
212 Move(result_reg, Immediate(0));
213 ucomisd(input_reg, scratch_reg);
214 j(below, &done, Label::kNear);
215 Move(result_reg, Immediate(255));
Ben Murdoch257744e2011-11-30 15:57:28 +0000216 bind(&done);
217}
218
219
220void MacroAssembler::ClampUint8(Register reg) {
221 Label done;
222 test(reg, Immediate(0xFFFFFF00));
223 j(zero, &done, Label::kNear);
224 setcc(negative, reg); // 1 if negative, 0 if positive.
225 dec_b(reg); // 0 if negative, 255 if positive.
226 bind(&done);
227}
228
229
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000230void MacroAssembler::SlowTruncateToI(Register result_reg,
231 Register input_reg,
232 int offset) {
233 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
234 call(stub.GetCode(), RelocInfo::CODE_TARGET);
235}
236
237
238void MacroAssembler::TruncateDoubleToI(Register result_reg,
239 XMMRegister input_reg) {
240 Label done;
241 cvttsd2si(result_reg, Operand(input_reg));
242 cmp(result_reg, 0x1);
243 j(no_overflow, &done, Label::kNear);
244
245 sub(esp, Immediate(kDoubleSize));
246 movsd(MemOperand(esp, 0), input_reg);
247 SlowTruncateToI(result_reg, esp, 0);
248 add(esp, Immediate(kDoubleSize));
249 bind(&done);
250}
251
252
253void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
254 XMMRegister scratch,
255 MinusZeroMode minus_zero_mode,
256 Label* lost_precision, Label* is_nan,
257 Label* minus_zero, Label::Distance dst) {
258 DCHECK(!input_reg.is(scratch));
259 cvttsd2si(result_reg, Operand(input_reg));
260 Cvtsi2sd(scratch, Operand(result_reg));
261 ucomisd(scratch, input_reg);
262 j(not_equal, lost_precision, dst);
263 j(parity_even, is_nan, dst);
264 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
265 Label done;
266 // The integer converted back is equal to the original. We
267 // only have to test if we got -0 as an input.
268 test(result_reg, Operand(result_reg));
269 j(not_zero, &done, Label::kNear);
270 movmskpd(result_reg, input_reg);
271 // Bit 0 contains the sign of the double in input_reg.
272 // If input was positive, we are ok and return 0, otherwise
273 // jump to minus_zero.
274 and_(result_reg, 1);
275 j(not_zero, minus_zero, dst);
276 bind(&done);
277 }
278}
279
280
281void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
282 Register input_reg) {
283 Label done, slow_case;
284
285 if (CpuFeatures::IsSupported(SSE3)) {
286 CpuFeatureScope scope(this, SSE3);
287 Label convert;
288 // Use more powerful conversion when sse3 is available.
289 // Load x87 register with heap number.
290 fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
291 // Get exponent alone and check for too-big exponent.
292 mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
293 and_(result_reg, HeapNumber::kExponentMask);
294 const uint32_t kTooBigExponent =
295 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
296 cmp(Operand(result_reg), Immediate(kTooBigExponent));
297 j(greater_equal, &slow_case, Label::kNear);
298
299 // Reserve space for 64 bit answer.
300 sub(Operand(esp), Immediate(kDoubleSize));
301 // Do conversion, which cannot fail because we checked the exponent.
302 fisttp_d(Operand(esp, 0));
303 mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
304 add(Operand(esp), Immediate(kDoubleSize));
305 jmp(&done, Label::kNear);
306
307 // Slow case.
308 bind(&slow_case);
309 if (input_reg.is(result_reg)) {
310 // Input is clobbered. Restore number from fpu stack
311 sub(Operand(esp), Immediate(kDoubleSize));
312 fstp_d(Operand(esp, 0));
313 SlowTruncateToI(result_reg, esp, 0);
314 add(esp, Immediate(kDoubleSize));
315 } else {
316 fstp(0);
317 SlowTruncateToI(result_reg, input_reg);
318 }
319 } else {
320 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
321 cvttsd2si(result_reg, Operand(xmm0));
322 cmp(result_reg, 0x1);
323 j(no_overflow, &done, Label::kNear);
324 // Check if the input was 0x8000000 (kMinInt).
325 // If no, then we got an overflow and we deoptimize.
326 ExternalReference min_int = ExternalReference::address_of_min_int();
327 ucomisd(xmm0, Operand::StaticVariable(min_int));
328 j(not_equal, &slow_case, Label::kNear);
329 j(parity_even, &slow_case, Label::kNear); // NaN.
330 jmp(&done, Label::kNear);
331
332 // Slow case.
333 bind(&slow_case);
334 if (input_reg.is(result_reg)) {
335 // Input is clobbered. Restore number from double scratch.
336 sub(esp, Immediate(kDoubleSize));
337 movsd(MemOperand(esp, 0), xmm0);
338 SlowTruncateToI(result_reg, esp, 0);
339 add(esp, Immediate(kDoubleSize));
340 } else {
341 SlowTruncateToI(result_reg, input_reg);
342 }
343 }
344 bind(&done);
345}
346
347
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400348void MacroAssembler::LoadUint32(XMMRegister dst, const Operand& src) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 Label done;
350 cmp(src, Immediate(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400351 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000352 Cvtsi2sd(dst, src);
353 j(not_sign, &done, Label::kNear);
354 addsd(dst, Operand::StaticVariable(uint32_bias));
355 bind(&done);
356}
357
358
359void MacroAssembler::RecordWriteArray(
360 Register object,
361 Register value,
362 Register index,
363 SaveFPRegsMode save_fp,
364 RememberedSetAction remembered_set_action,
365 SmiCheck smi_check,
366 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100367 // First, check if a write barrier is even needed. The tests below
368 // catch stores of Smis.
369 Label done;
370
371 // Skip barrier if writing a smi.
372 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100374 test(value, Immediate(kSmiTagMask));
375 j(zero, &done);
376 }
377
378 // Array access: calculate the destination address in the same manner as
379 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
380 // into an array of words.
381 Register dst = index;
382 lea(dst, Operand(object, index, times_half_pointer_size,
383 FixedArray::kHeaderSize - kHeapObjectTag));
384
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000385 RecordWrite(object, dst, value, save_fp, remembered_set_action,
386 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100387
388 bind(&done);
389
390 // Clobber clobbered input registers when running with the debug-code flag
391 // turned on to provoke errors.
392 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000393 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
394 mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000395 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000396}
397
398
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100399void MacroAssembler::RecordWriteField(
400 Register object,
401 int offset,
402 Register value,
403 Register dst,
404 SaveFPRegsMode save_fp,
405 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 SmiCheck smi_check,
407 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100408 // First, check if a write barrier is even needed. The tests below
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100409 // catch stores of Smis.
Ben Murdoch257744e2011-11-30 15:57:28 +0000410 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +0000411
412 // Skip barrier if writing a smi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100413 if (smi_check == INLINE_SMI_CHECK) {
414 JumpIfSmi(value, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000415 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100416
417 // Although the object register is tagged, the offset is relative to the start
418 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000419 DCHECK(IsAligned(offset, kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100420
421 lea(dst, FieldOperand(object, offset));
422 if (emit_debug_code()) {
423 Label ok;
424 test_b(dst, (1 << kPointerSizeLog2) - 1);
425 j(zero, &ok, Label::kNear);
426 int3();
427 bind(&ok);
428 }
429
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000430 RecordWrite(object, dst, value, save_fp, remembered_set_action,
431 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000432
433 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000434
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000436 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100437 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
439 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000440 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000441}
442
443
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444void MacroAssembler::RecordWriteForMap(
445 Register object,
446 Handle<Map> map,
447 Register scratch1,
448 Register scratch2,
449 SaveFPRegsMode save_fp) {
450 Label done;
451
452 Register address = scratch1;
453 Register value = scratch2;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100454 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 Label ok;
456 lea(address, FieldOperand(object, HeapObject::kMapOffset));
457 test_b(address, (1 << kPointerSizeLog2) - 1);
458 j(zero, &ok, Label::kNear);
459 int3();
460 bind(&ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100461 }
462
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 DCHECK(!object.is(value));
464 DCHECK(!object.is(address));
465 DCHECK(!value.is(address));
466 AssertNotSmi(object);
467
468 if (!FLAG_incremental_marking) {
469 return;
470 }
471
472 // Compute the address.
473 lea(address, FieldOperand(object, HeapObject::kMapOffset));
474
475 // A single check of the map's pages interesting flag suffices, since it is
476 // only set during incremental collection, and then it's also guaranteed that
477 // the from object's page's interesting flag is also set. This optimization
478 // relies on the fact that maps can never be in new space.
479 DCHECK(!isolate()->heap()->InNewSpace(*map));
480 CheckPageFlagForMap(map,
481 MemoryChunk::kPointersToHereAreInterestingMask,
482 zero,
483 &done,
484 Label::kNear);
485
486 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
487 save_fp);
488 CallStub(&stub);
489
490 bind(&done);
491
492 // Count number of write barriers in generated code.
493 isolate()->counters()->write_barriers_static()->Increment();
494 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
495
496 // Clobber clobbered input registers when running with the debug-code flag
497 // turned on to provoke errors.
498 if (emit_debug_code()) {
499 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
500 mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
501 mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
502 }
503}
504
505
506void MacroAssembler::RecordWrite(
507 Register object,
508 Register address,
509 Register value,
510 SaveFPRegsMode fp_mode,
511 RememberedSetAction remembered_set_action,
512 SmiCheck smi_check,
513 PointersToHereCheck pointers_to_here_check_for_value) {
514 DCHECK(!object.is(value));
515 DCHECK(!object.is(address));
516 DCHECK(!value.is(address));
517 AssertNotSmi(object);
518
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100519 if (remembered_set_action == OMIT_REMEMBERED_SET &&
520 !FLAG_incremental_marking) {
521 return;
522 }
523
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 if (emit_debug_code()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525 Label ok;
526 cmp(value, Operand(address, 0));
527 j(equal, &ok, Label::kNear);
528 int3();
529 bind(&ok);
530 }
531
Steve Block8defd9f2010-07-08 12:39:36 +0100532 // First, check if a write barrier is even needed. The tests below
533 // catch stores of Smis and stores into young gen.
534 Label done;
535
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100536 if (smi_check == INLINE_SMI_CHECK) {
537 // Skip barrier if writing a smi.
538 JumpIfSmi(value, &done, Label::kNear);
539 }
Steve Block8defd9f2010-07-08 12:39:36 +0100540
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000541 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
542 CheckPageFlag(value,
543 value, // Used as scratch.
544 MemoryChunk::kPointersToHereAreInterestingMask,
545 zero,
546 &done,
547 Label::kNear);
548 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100549 CheckPageFlag(object,
550 value, // Used as scratch.
551 MemoryChunk::kPointersFromHereAreInterestingMask,
552 zero,
553 &done,
554 Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100555
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000556 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
557 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100558 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100559
560 bind(&done);
561
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000562 // Count number of write barriers in generated code.
563 isolate()->counters()->write_barriers_static()->Increment();
564 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
565
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100566 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100567 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100568 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
570 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
Steve Block8defd9f2010-07-08 12:39:36 +0100571 }
572}
573
574
Andrei Popescu402d9372010-02-26 13:31:12 +0000575void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576 Move(eax, Immediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000577 mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
578 isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579 CEntryStub ces(isolate(), 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000580 call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +0000581}
Steve Blocka7e24c12009-10-30 11:49:00 +0000582
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100583
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000584void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
585 xorps(dst, dst);
586 cvtsi2sd(dst, src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000587}
588
589
Steve Block053d10c2011-06-13 19:13:29 +0100590bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
591 static const int kMaxImmediateBits = 17;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 if (!RelocInfo::IsNone(x.rmode_)) return false;
Steve Block053d10c2011-06-13 19:13:29 +0100593 return !is_intn(x.x_, kMaxImmediateBits);
594}
595
596
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
Steve Block053d10c2011-06-13 19:13:29 +0100598 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599 Move(dst, Immediate(x.x_ ^ jit_cookie()));
Steve Block053d10c2011-06-13 19:13:29 +0100600 xor_(dst, jit_cookie());
601 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 Move(dst, x);
Steve Block053d10c2011-06-13 19:13:29 +0100603 }
604}
605
606
607void MacroAssembler::SafePush(const Immediate& x) {
608 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
609 push(Immediate(x.x_ ^ jit_cookie()));
610 xor_(Operand(esp, 0), Immediate(jit_cookie()));
611 } else {
612 push(x);
613 }
614}
615
616
Steve Blocka7e24c12009-10-30 11:49:00 +0000617void MacroAssembler::CmpObjectType(Register heap_object,
618 InstanceType type,
619 Register map) {
620 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
621 CmpInstanceType(map, type);
622}
623
624
625void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
626 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
627 static_cast<int8_t>(type));
628}
629
630
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000631void MacroAssembler::CheckFastElements(Register map,
632 Label* fail,
633 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
635 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
636 STATIC_ASSERT(FAST_ELEMENTS == 2);
637 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000638 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 Map::kMaximumBitField2FastHoleyElementValue);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000640 j(above, fail, distance);
641}
642
643
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100644void MacroAssembler::CheckFastObjectElements(Register map,
645 Label* fail,
646 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
648 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
649 STATIC_ASSERT(FAST_ELEMENTS == 2);
650 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100651 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 Map::kMaximumBitField2FastHoleySmiElementValue);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100653 j(below_equal, fail, distance);
654 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655 Map::kMaximumBitField2FastHoleyElementValue);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100656 j(above, fail, distance);
657}
658
659
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000660void MacroAssembler::CheckFastSmiElements(Register map,
661 Label* fail,
662 Label::Distance distance) {
663 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
664 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100665 cmpb(FieldOperand(map, Map::kBitField2Offset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000666 Map::kMaximumBitField2FastHoleySmiElementValue);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100667 j(above, fail, distance);
668}
669
670
671void MacroAssembler::StoreNumberToDoubleElements(
672 Register maybe_number,
673 Register elements,
674 Register key,
675 Register scratch1,
676 XMMRegister scratch2,
677 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000679 Label smi_value, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100680 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
681
682 CheckMap(maybe_number,
683 isolate()->factory()->heap_number_map(),
684 fail,
685 DONT_DO_SMI_CHECK);
686
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687 // Double value, turn potential sNaN into qNaN.
688 Move(scratch2, 1.0);
689 mulsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
690 jmp(&done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100691
692 bind(&smi_value);
693 // Value is a smi. Convert to a double and store.
694 // Preserve original value.
695 mov(scratch1, maybe_number);
696 SmiUntag(scratch1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697 Cvtsi2sd(scratch2, scratch1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698 bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699 movsd(FieldOperand(elements, key, times_4,
700 FixedDoubleArray::kHeaderSize - elements_offset),
701 scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100702}
703
704
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100706 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100707}
708
709
Andrei Popescu31002712010-02-23 13:46:05 +0000710void MacroAssembler::CheckMap(Register obj,
711 Handle<Map> map,
712 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000714 if (smi_check_type == DO_SMI_CHECK) {
715 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +0000716 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100717
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000718 CompareMap(obj, map);
Andrei Popescu31002712010-02-23 13:46:05 +0000719 j(not_equal, fail);
720}
721
722
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400723void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
724 Register scratch2, Handle<WeakCell> cell,
725 Handle<Code> success,
726 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000727 Label fail;
728 if (smi_check_type == DO_SMI_CHECK) {
729 JumpIfSmi(obj, &fail);
730 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400731 mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
732 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000733 j(equal, success);
734
735 bind(&fail);
736}
737
738
Leon Clarkee46be812010-01-19 14:06:41 +0000739Condition MacroAssembler::IsObjectStringType(Register heap_object,
740 Register map,
741 Register instance_type) {
742 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
743 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000744 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +0000745 test(instance_type, Immediate(kIsNotStringMask));
746 return zero;
747}
748
749
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000750Condition MacroAssembler::IsObjectNameType(Register heap_object,
751 Register map,
752 Register instance_type) {
753 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
754 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
755 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
756 return below_equal;
757}
758
759
Steve Blocka7e24c12009-10-30 11:49:00 +0000760void MacroAssembler::FCmp() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000761 fucomip();
762 fstp(0);
763}
764
765
766void MacroAssembler::AssertNumber(Register object) {
767 if (emit_debug_code()) {
768 Label ok;
769 JumpIfSmi(object, &ok);
770 cmp(FieldOperand(object, HeapObject::kMapOffset),
771 isolate()->factory()->heap_number_map());
772 Check(equal, kOperandNotANumber);
773 bind(&ok);
Steve Block3ce2e202009-11-05 08:53:23 +0000774 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000775}
776
777
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000778void MacroAssembler::AssertSmi(Register object) {
779 if (emit_debug_code()) {
780 test(object, Immediate(kSmiTagMask));
781 Check(equal, kOperandIsNotASmi);
782 }
Andrei Popescu402d9372010-02-26 13:31:12 +0000783}
784
785
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000786void MacroAssembler::AssertString(Register object) {
787 if (emit_debug_code()) {
788 test(object, Immediate(kSmiTagMask));
789 Check(not_equal, kOperandIsASmiAndNotAString);
790 push(object);
791 mov(object, FieldOperand(object, HeapObject::kMapOffset));
792 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
793 pop(object);
794 Check(below, kOperandIsNotAString);
795 }
Iain Merrick75681382010-08-19 15:07:18 +0100796}
797
798
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799void MacroAssembler::AssertName(Register object) {
800 if (emit_debug_code()) {
801 test(object, Immediate(kSmiTagMask));
802 Check(not_equal, kOperandIsASmiAndNotAName);
803 push(object);
804 mov(object, FieldOperand(object, HeapObject::kMapOffset));
805 CmpInstanceType(object, LAST_NAME_TYPE);
806 pop(object);
807 Check(below_equal, kOperandIsNotAName);
808 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100809}
810
811
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812void MacroAssembler::AssertFunction(Register object) {
813 if (emit_debug_code()) {
814 test(object, Immediate(kSmiTagMask));
815 Check(not_equal, kOperandIsASmiAndNotAFunction);
816 Push(object);
817 CmpObjectType(object, JS_FUNCTION_TYPE, object);
818 Pop(object);
819 Check(equal, kOperandIsNotAFunction);
820 }
821}
822
823
824void MacroAssembler::AssertBoundFunction(Register object) {
825 if (emit_debug_code()) {
826 test(object, Immediate(kSmiTagMask));
827 Check(not_equal, kOperandIsASmiAndNotABoundFunction);
828 Push(object);
829 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
830 Pop(object);
831 Check(equal, kOperandIsNotABoundFunction);
832 }
833}
834
835
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000836void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
837 if (emit_debug_code()) {
838 Label done_checking;
839 AssertNotSmi(object);
840 cmp(object, isolate()->factory()->undefined_value());
841 j(equal, &done_checking);
842 cmp(FieldOperand(object, 0),
843 Immediate(isolate()->factory()->allocation_site_map()));
844 Assert(equal, kExpectedUndefinedOrCell);
845 bind(&done_checking);
846 }
847}
848
849
850void MacroAssembler::AssertNotSmi(Register object) {
851 if (emit_debug_code()) {
852 test(object, Immediate(kSmiTagMask));
853 Check(not_equal, kOperandIsASmi);
854 }
855}
856
857
858void MacroAssembler::StubPrologue() {
859 push(ebp); // Caller's frame pointer.
860 mov(ebp, esp);
861 push(esi); // Callee's context.
862 push(Immediate(Smi::FromInt(StackFrame::STUB)));
863}
864
865
866void MacroAssembler::Prologue(bool code_pre_aging) {
867 PredictableCodeSizeScope predictible_code_size_scope(this,
868 kNoCodeAgeSequenceLength);
869 if (code_pre_aging) {
870 // Pre-age the code.
871 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
872 RelocInfo::CODE_AGE_SEQUENCE);
873 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
874 } else {
875 push(ebp); // Caller's frame pointer.
876 mov(ebp, esp);
877 push(esi); // Callee's context.
878 push(edi); // Callee's JS function.
879 }
Steve Block6ded16b2010-05-10 14:33:55 +0100880}
881
882
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000883void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
884 mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
885 mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
886 mov(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
887}
888
889
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400890void MacroAssembler::EnterFrame(StackFrame::Type type,
891 bool load_constant_pool_pointer_reg) {
892 // Out-of-line constant pool not implemented on ia32.
893 UNREACHABLE();
894}
895
896
Steve Blocka7e24c12009-10-30 11:49:00 +0000897void MacroAssembler::EnterFrame(StackFrame::Type type) {
898 push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100899 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000900 push(esi);
901 push(Immediate(Smi::FromInt(type)));
902 push(Immediate(CodeObject()));
Steve Block44f0eee2011-05-26 01:26:41 +0100903 if (emit_debug_code()) {
904 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 Check(not_equal, kCodeObjectNotProperlyPatched);
Steve Blocka7e24c12009-10-30 11:49:00 +0000906 }
907}
908
909
910void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +0100911 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000912 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
913 Immediate(Smi::FromInt(type)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000914 Check(equal, kStackFrameTypesMustMatch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000915 }
916 leave();
917}
918
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100919
920void MacroAssembler::EnterExitFramePrologue() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100921 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000922 DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
923 DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
924 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +0000925 push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100926 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000927
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100928 // Reserve room for entry stack pointer and push the code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000930 push(Immediate(0)); // Saved entry sp, patched before call.
931 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000932
933 // Save the frame pointer and the context in top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
935 ExternalReference context_address(Isolate::kContextAddress, isolate());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400936 ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000937 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
938 mov(Operand::StaticVariable(context_address), esi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400939 mov(Operand::StaticVariable(c_function_address), ebx);
Steve Blockd0582a62009-12-15 09:54:21 +0000940}
Steve Blocka7e24c12009-10-30 11:49:00 +0000941
Steve Blocka7e24c12009-10-30 11:49:00 +0000942
Ben Murdochb0fe1622011-05-05 13:52:32 +0100943void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
944 // Optionally save all XMM registers.
945 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
947 argc * kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100948 sub(esp, Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +0100949 const int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000950 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100951 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952 movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100953 }
954 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100955 sub(esp, Immediate(argc * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100956 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000957
958 // Get the required frame alignment for the OS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000959 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000960 if (kFrameAlignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000961 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
Steve Blocka7e24c12009-10-30 11:49:00 +0000962 and_(esp, -kFrameAlignment);
963 }
964
965 // Patch the saved entry sp.
966 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
967}
968
969
Ben Murdochb0fe1622011-05-05 13:52:32 +0100970void MacroAssembler::EnterExitFrame(bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100971 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000972
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100973 // Set up argc and argv in callee-saved registers.
Steve Blockd0582a62009-12-15 09:54:21 +0000974 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100975 mov(edi, eax);
Steve Blockd0582a62009-12-15 09:54:21 +0000976 lea(esi, Operand(ebp, eax, times_4, offset));
977
Steve Block44f0eee2011-05-26 01:26:41 +0100978 // Reserve space for argc, argv and isolate.
979 EnterExitFrameEpilogue(3, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000980}
981
982
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800983void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100984 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100985 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +0000986}
987
988
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100990 // Optionally restore all XMM registers.
991 if (save_doubles) {
Steve Block1e0659c2011-05-24 12:43:12 +0100992 const int offset = -2 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100994 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100996 }
997 }
998
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 if (pop_arguments) {
1000 // Get the return address from the stack and restore the frame pointer.
1001 mov(ecx, Operand(ebp, 1 * kPointerSize));
1002 mov(ebp, Operand(ebp, 0 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001003
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001004 // Pop the arguments and the receiver from the caller stack.
1005 lea(esp, Operand(esi, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001007 // Push the return address to get ready to return.
1008 push(ecx);
1009 } else {
1010 // Otherwise just leave the exit frame.
1011 leave();
1012 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001013
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001014 LeaveExitFrameEpilogue(true);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001015}
1016
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017
1018void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001019 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00001020 ExternalReference context_address(Isolate::kContextAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021 if (restore_context) {
1022 mov(esi, Operand::StaticVariable(context_address));
1023 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001024#ifdef DEBUG
1025 mov(Operand::StaticVariable(context_address), Immediate(0));
1026#endif
1027
Steve Blocka7e24c12009-10-30 11:49:00 +00001028 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00001029 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01001030 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001031 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1032}
1033
1034
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001035void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001036 mov(esp, ebp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001037 pop(ebp);
1038
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001039 LeaveExitFrameEpilogue(restore_context);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001040}
1041
1042
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001043void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001045 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001046 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001047
1048 // Link the current handler as the next handler.
1049 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1050 push(Operand::StaticVariable(handler_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001051
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001052 // Set this new handler as the current one.
1053 mov(Operand::StaticVariable(handler_address), esp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001054}
1055
1056
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001057void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001058 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001059 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1060 pop(Operand::StaticVariable(handler_address));
1061 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1062}
1063
1064
Steve Blocka7e24c12009-10-30 11:49:00 +00001065void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066 Register scratch1,
1067 Register scratch2,
Steve Blocka7e24c12009-10-30 11:49:00 +00001068 Label* miss) {
1069 Label same_contexts;
1070
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001071 DCHECK(!holder_reg.is(scratch1));
1072 DCHECK(!holder_reg.is(scratch2));
1073 DCHECK(!scratch1.is(scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001074
1075 // Load current lexical context from the stack frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001076 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001077
1078 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +01001079 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080 cmp(scratch1, Immediate(0));
1081 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001082 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001083 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084 mov(scratch1, ContextOperand(scratch1, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001085
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001087 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001088 // Read the first word and compare to native_context_map.
1089 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1090 isolate()->factory()->native_context_map());
1091 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001092 }
1093
1094 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001096 j(equal, &same_contexts);
Steve Blocka7e24c12009-10-30 11:49:00 +00001097
1098 // Compare security tokens, save holder_reg on the stack so we can use it
1099 // as a temporary register.
1100 //
Steve Blocka7e24c12009-10-30 11:49:00 +00001101 // Check that the security token in the calling global object is
1102 // compatible with the security token in the receiving global
1103 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001104 mov(scratch2,
1105 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001106
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001107 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001108 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109 cmp(scratch2, isolate()->factory()->null_value());
1110 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001111
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001112 // Read the first word and compare to native_context_map(),
1113 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1114 isolate()->factory()->native_context_map());
1115 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001116 }
1117
1118 int token_offset = Context::kHeaderSize +
1119 Context::SECURITY_TOKEN_INDEX * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001120 mov(scratch1, FieldOperand(scratch1, token_offset));
1121 cmp(scratch1, FieldOperand(scratch2, token_offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001122 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001123
1124 bind(&same_contexts);
1125}
1126
1127
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001128// Compute the hash code from the untagged key. This must be kept in sync with
1129// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1130// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001131//
1132// Note: r0 will contain hash code
1133void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1134 // Xor original key with a seed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135 if (serializer_enabled()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001136 ExternalReference roots_array_start =
1137 ExternalReference::roots_array_start(isolate());
Ben Murdochc7cc0282012-03-05 14:35:55 +00001138 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001139 mov(scratch,
1140 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001141 SmiUntag(scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001142 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001143 } else {
1144 int32_t seed = isolate()->heap()->HashSeed();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001145 xor_(r0, Immediate(seed));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001146 }
1147
1148 // hash = ~hash + (hash << 15);
1149 mov(scratch, r0);
1150 not_(r0);
1151 shl(scratch, 15);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001152 add(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001153 // hash = hash ^ (hash >> 12);
1154 mov(scratch, r0);
1155 shr(scratch, 12);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001156 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001157 // hash = hash + (hash << 2);
1158 lea(r0, Operand(r0, r0, times_4, 0));
1159 // hash = hash ^ (hash >> 4);
1160 mov(scratch, r0);
1161 shr(scratch, 4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001162 xor_(r0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001163 // hash = hash * 2057;
1164 imul(r0, r0, 2057);
1165 // hash = hash ^ (hash >> 16);
1166 mov(scratch, r0);
1167 shr(scratch, 16);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001168 xor_(r0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001169 and_(r0, 0x3fffffff);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001170}
1171
1172
1173
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001174void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1175 Register elements,
1176 Register key,
1177 Register r0,
1178 Register r1,
1179 Register r2,
1180 Register result) {
1181 // Register use:
1182 //
1183 // elements - holds the slow-case elements of the receiver and is unchanged.
1184 //
1185 // key - holds the smi key on entry and is unchanged.
1186 //
1187 // Scratch registers:
1188 //
1189 // r0 - holds the untagged key on entry and holds the hash once computed.
1190 //
1191 // r1 - used to hold the capacity mask of the dictionary
1192 //
1193 // r2 - used for the index into the dictionary.
1194 //
1195 // result - holds the result on exit if the load succeeds and we fall through.
1196
1197 Label done;
1198
Ben Murdochc7cc0282012-03-05 14:35:55 +00001199 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001200
1201 // Compute capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001202 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001203 shr(r1, kSmiTagSize); // convert smi to int
1204 dec(r1);
1205
1206 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001207 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001208 // Use r2 for index calculations and keep the hash intact in r0.
1209 mov(r2, r0);
1210 // Compute the masked index: (hash + i + i * i) & mask.
1211 if (i > 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001212 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001213 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001214 and_(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001215
1216 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001218 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1219
1220 // Check if the key matches.
1221 cmp(key, FieldOperand(elements,
1222 r2,
1223 times_pointer_size,
Ben Murdochc7cc0282012-03-05 14:35:55 +00001224 SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001225 if (i != (kNumberDictionaryProbes - 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001226 j(equal, &done);
1227 } else {
1228 j(not_equal, miss);
1229 }
1230 }
1231
1232 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001233 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001234 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001235 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001236 DCHECK_EQ(DATA, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001237 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00001238 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001239 j(not_zero, miss);
1240
1241 // Get the value at the masked, scaled index.
1242 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001243 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001244 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1245}
1246
1247
Steve Blocka7e24c12009-10-30 11:49:00 +00001248void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001249 Register scratch,
1250 AllocationFlags flags) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001251 ExternalReference allocation_top =
1252 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001253
1254 // Just return if allocation top is already known.
1255 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1256 // No use of scratch if allocation top is provided.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 DCHECK(scratch.is(no_reg));
Steve Blocka7e24c12009-10-30 11:49:00 +00001258#ifdef DEBUG
1259 // Assert that result actually contains top on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001260 cmp(result, Operand::StaticVariable(allocation_top));
1261 Check(equal, kUnexpectedAllocationTop);
Steve Blocka7e24c12009-10-30 11:49:00 +00001262#endif
1263 return;
1264 }
1265
1266 // Move address of new object to result. Use scratch register if available.
1267 if (scratch.is(no_reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001268 mov(result, Operand::StaticVariable(allocation_top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001269 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001270 mov(scratch, Immediate(allocation_top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001271 mov(result, Operand(scratch, 0));
1272 }
1273}
1274
1275
1276void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001277 Register scratch,
1278 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01001279 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00001280 test(result_end, Immediate(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001281 Check(zero, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00001282 }
1283
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001284 ExternalReference allocation_top =
1285 AllocationUtils::GetAllocationTopReference(isolate(), flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001286
1287 // Update new top. Use scratch if available.
1288 if (scratch.is(no_reg)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001289 mov(Operand::StaticVariable(allocation_top), result_end);
Steve Blocka7e24c12009-10-30 11:49:00 +00001290 } else {
1291 mov(Operand(scratch, 0), result_end);
1292 }
1293}
1294
1295
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001296void MacroAssembler::Allocate(int object_size,
1297 Register result,
1298 Register result_end,
1299 Register scratch,
1300 Label* gc_required,
1301 AllocationFlags flags) {
1302 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1303 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07001304 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001305 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001306 // Trash the registers to simulate an allocation failure.
1307 mov(result, Immediate(0x7091));
1308 if (result_end.is_valid()) {
1309 mov(result_end, Immediate(0x7191));
1310 }
1311 if (scratch.is_valid()) {
1312 mov(scratch, Immediate(0x7291));
1313 }
1314 }
1315 jmp(gc_required);
1316 return;
1317 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001319
1320 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001321 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001322
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001323 ExternalReference allocation_limit =
1324 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1325
1326 // Align the next allocation. Storing the filler map without checking top is
1327 // safe in new-space because the limit of the heap is aligned there.
1328 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1330 Label aligned;
1331 test(result, Immediate(kDoubleAlignmentMask));
1332 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001333 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001334 cmp(result, Operand::StaticVariable(allocation_limit));
1335 j(above_equal, gc_required);
1336 }
1337 mov(Operand(result, 0),
1338 Immediate(isolate()->factory()->one_pointer_filler_map()));
1339 add(result, Immediate(kDoubleSize / 2));
1340 bind(&aligned);
1341 }
1342
1343 // Calculate new top and bail out if space is exhausted.
Ben Murdochbb769b22010-08-11 14:56:33 +01001344 Register top_reg = result_end.is_valid() ? result_end : result;
Steve Block1e0659c2011-05-24 12:43:12 +01001345 if (!top_reg.is(result)) {
1346 mov(top_reg, result);
Ben Murdochbb769b22010-08-11 14:56:33 +01001347 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001348 add(top_reg, Immediate(object_size));
Ben Murdoch257744e2011-11-30 15:57:28 +00001349 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001350 cmp(top_reg, Operand::StaticVariable(allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001351 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001352
Leon Clarkee46be812010-01-19 14:06:41 +00001353 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001354 UpdateAllocationTopHelper(top_reg, scratch, flags);
Ben Murdochbb769b22010-08-11 14:56:33 +01001355
1356 // Tag result if requested.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 bool tag_result = (flags & TAG_OBJECT) != 0;
Ben Murdochbb769b22010-08-11 14:56:33 +01001358 if (top_reg.is(result)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001359 if (tag_result) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001360 sub(result, Immediate(object_size - kHeapObjectTag));
Ben Murdochbb769b22010-08-11 14:56:33 +01001361 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001362 sub(result, Immediate(object_size));
Ben Murdochbb769b22010-08-11 14:56:33 +01001363 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001364 } else if (tag_result) {
1365 DCHECK(kHeapObjectTag == 1);
1366 inc(result);
Ben Murdochbb769b22010-08-11 14:56:33 +01001367 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001368}
1369
1370
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001371void MacroAssembler::Allocate(int header_size,
1372 ScaleFactor element_size,
1373 Register element_count,
1374 RegisterValueType element_count_type,
1375 Register result,
1376 Register result_end,
1377 Register scratch,
1378 Label* gc_required,
1379 AllocationFlags flags) {
1380 DCHECK((flags & SIZE_IN_WORDS) == 0);
John Reck59135872010-11-02 12:39:01 -07001381 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001382 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001383 // Trash the registers to simulate an allocation failure.
1384 mov(result, Immediate(0x7091));
1385 mov(result_end, Immediate(0x7191));
1386 if (scratch.is_valid()) {
1387 mov(scratch, Immediate(0x7291));
1388 }
1389 // Register element_count is not modified by the function.
1390 }
1391 jmp(gc_required);
1392 return;
1393 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001394 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001395
1396 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001397 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001398
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001399 ExternalReference allocation_limit =
1400 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block1e0659c2011-05-24 12:43:12 +01001401
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402 // Align the next allocation. Storing the filler map without checking top is
1403 // safe in new-space because the limit of the heap is aligned there.
1404 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001405 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1406 Label aligned;
1407 test(result, Immediate(kDoubleAlignmentMask));
1408 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001409 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001410 cmp(result, Operand::StaticVariable(allocation_limit));
1411 j(above_equal, gc_required);
1412 }
1413 mov(Operand(result, 0),
1414 Immediate(isolate()->factory()->one_pointer_filler_map()));
1415 add(result, Immediate(kDoubleSize / 2));
1416 bind(&aligned);
1417 }
1418
1419 // Calculate new top and bail out if space is exhausted.
Steve Block1e0659c2011-05-24 12:43:12 +01001420 // We assume that element_count*element_size + header_size does not
1421 // overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001422 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1423 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1424 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1425 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1426 DCHECK(element_size >= times_2);
1427 DCHECK(kSmiTagSize == 1);
1428 element_size = static_cast<ScaleFactor>(element_size - 1);
1429 } else {
1430 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1431 }
Steve Block1e0659c2011-05-24 12:43:12 +01001432 lea(result_end, Operand(element_count, element_size, header_size));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001433 add(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01001434 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001435 cmp(result_end, Operand::StaticVariable(allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00001436 j(above, gc_required);
1437
Steve Blocka7e24c12009-10-30 11:49:00 +00001438 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001439 DCHECK(kHeapObjectTag == 1);
1440 inc(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001441 }
Leon Clarkee46be812010-01-19 14:06:41 +00001442
1443 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001444 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001445}
1446
1447
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001448void MacroAssembler::Allocate(Register object_size,
1449 Register result,
1450 Register result_end,
1451 Register scratch,
1452 Label* gc_required,
1453 AllocationFlags flags) {
1454 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
John Reck59135872010-11-02 12:39:01 -07001455 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001456 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001457 // Trash the registers to simulate an allocation failure.
1458 mov(result, Immediate(0x7091));
1459 mov(result_end, Immediate(0x7191));
1460 if (scratch.is_valid()) {
1461 mov(scratch, Immediate(0x7291));
1462 }
1463 // object_size is left unchanged by this function.
1464 }
1465 jmp(gc_required);
1466 return;
1467 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 DCHECK(!result.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001469
1470 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001471 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001472
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001473 ExternalReference allocation_limit =
1474 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1475
1476 // Align the next allocation. Storing the filler map without checking top is
1477 // safe in new-space because the limit of the heap is aligned there.
1478 if ((flags & DOUBLE_ALIGNMENT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001479 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1480 Label aligned;
1481 test(result, Immediate(kDoubleAlignmentMask));
1482 j(zero, &aligned, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001483 if ((flags & PRETENURE) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001484 cmp(result, Operand::StaticVariable(allocation_limit));
1485 j(above_equal, gc_required);
1486 }
1487 mov(Operand(result, 0),
1488 Immediate(isolate()->factory()->one_pointer_filler_map()));
1489 add(result, Immediate(kDoubleSize / 2));
1490 bind(&aligned);
1491 }
1492
1493 // Calculate new top and bail out if space is exhausted.
Steve Blocka7e24c12009-10-30 11:49:00 +00001494 if (!object_size.is(result_end)) {
1495 mov(result_end, object_size);
1496 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001497 add(result_end, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001498 j(carry, gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001499 cmp(result_end, Operand::StaticVariable(allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001500 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001501
Steve Blocka7e24c12009-10-30 11:49:00 +00001502 // Tag result if requested.
1503 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001504 DCHECK(kHeapObjectTag == 1);
1505 inc(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001506 }
Leon Clarkee46be812010-01-19 14:06:41 +00001507
1508 // Update allocation top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001509 UpdateAllocationTopHelper(result_end, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001510}
1511
1512
Steve Block3ce2e202009-11-05 08:53:23 +00001513void MacroAssembler::AllocateHeapNumber(Register result,
1514 Register scratch1,
1515 Register scratch2,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001516 Label* gc_required,
1517 MutableMode mode) {
Steve Block3ce2e202009-11-05 08:53:23 +00001518 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001519 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1520 TAG_OBJECT);
1521
1522 Handle<Map> map = mode == MUTABLE
1523 ? isolate()->factory()->mutable_heap_number_map()
1524 : isolate()->factory()->heap_number_map();
Steve Block3ce2e202009-11-05 08:53:23 +00001525
1526 // Set the map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001527 mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
Steve Block3ce2e202009-11-05 08:53:23 +00001528}
1529
1530
Steve Blockd0582a62009-12-15 09:54:21 +00001531void MacroAssembler::AllocateTwoByteString(Register result,
1532 Register length,
1533 Register scratch1,
1534 Register scratch2,
1535 Register scratch3,
1536 Label* gc_required) {
1537 // Calculate the number of bytes needed for the characters in the string while
1538 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1540 DCHECK(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00001541 // scratch1 = length * 2 + kObjectAlignmentMask.
1542 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001543 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001544
1545 // Allocate two byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001546 Allocate(SeqTwoByteString::kHeaderSize,
1547 times_1,
1548 scratch1,
1549 REGISTER_VALUE_IS_INT32,
1550 result,
1551 scratch2,
1552 scratch3,
1553 gc_required,
1554 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001555
1556 // Set the map, length and hash field.
1557 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001558 Immediate(isolate()->factory()->string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001559 mov(scratch1, length);
1560 SmiTag(scratch1);
1561 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001562 mov(FieldOperand(result, String::kHashFieldOffset),
1563 Immediate(String::kEmptyHashField));
1564}
1565
1566
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001567void MacroAssembler::AllocateOneByteString(Register result, Register length,
1568 Register scratch1, Register scratch2,
1569 Register scratch3,
1570 Label* gc_required) {
Steve Blockd0582a62009-12-15 09:54:21 +00001571 // Calculate the number of bytes needed for the characters in the string while
1572 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001573 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00001574 mov(scratch1, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001575 DCHECK(kCharSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001576 add(scratch1, Immediate(kObjectAlignmentMask));
1577 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001578
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001579 // Allocate one-byte string in new space.
1580 Allocate(SeqOneByteString::kHeaderSize,
1581 times_1,
1582 scratch1,
1583 REGISTER_VALUE_IS_INT32,
1584 result,
1585 scratch2,
1586 scratch3,
1587 gc_required,
1588 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001589
1590 // Set the map, length and hash field.
1591 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001592 Immediate(isolate()->factory()->one_byte_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001593 mov(scratch1, length);
1594 SmiTag(scratch1);
1595 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001596 mov(FieldOperand(result, String::kHashFieldOffset),
1597 Immediate(String::kEmptyHashField));
1598}
1599
1600
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001601void MacroAssembler::AllocateOneByteString(Register result, int length,
1602 Register scratch1, Register scratch2,
1603 Label* gc_required) {
1604 DCHECK(length > 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001605
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001606 // Allocate one-byte string in new space.
1607 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1608 gc_required, TAG_OBJECT);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001609
1610 // Set the map, length and hash field.
1611 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612 Immediate(isolate()->factory()->one_byte_string_map()));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001613 mov(FieldOperand(result, String::kLengthOffset),
1614 Immediate(Smi::FromInt(length)));
1615 mov(FieldOperand(result, String::kHashFieldOffset),
1616 Immediate(String::kEmptyHashField));
1617}
1618
1619
Ben Murdoch589d6972011-11-30 16:04:58 +00001620void MacroAssembler::AllocateTwoByteConsString(Register result,
Steve Blockd0582a62009-12-15 09:54:21 +00001621 Register scratch1,
1622 Register scratch2,
1623 Label* gc_required) {
1624 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001625 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1626 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001627
1628 // Set the map. The other fields are left uninitialized.
1629 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001630 Immediate(isolate()->factory()->cons_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001631}
1632
1633
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634void MacroAssembler::AllocateOneByteConsString(Register result,
1635 Register scratch1,
1636 Register scratch2,
1637 Label* gc_required) {
1638 Allocate(ConsString::kSize,
1639 result,
1640 scratch1,
1641 scratch2,
1642 gc_required,
1643 TAG_OBJECT);
Steve Blockd0582a62009-12-15 09:54:21 +00001644
1645 // Set the map. The other fields are left uninitialized.
1646 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001647 Immediate(isolate()->factory()->cons_one_byte_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001648}
1649
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001650
Ben Murdoch589d6972011-11-30 16:04:58 +00001651void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001652 Register scratch1,
1653 Register scratch2,
1654 Label* gc_required) {
1655 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001656 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1657 TAG_OBJECT);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001658
1659 // Set the map. The other fields are left uninitialized.
1660 mov(FieldOperand(result, HeapObject::kMapOffset),
1661 Immediate(isolate()->factory()->sliced_string_map()));
1662}
1663
1664
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665void MacroAssembler::AllocateOneByteSlicedString(Register result,
1666 Register scratch1,
1667 Register scratch2,
1668 Label* gc_required) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001669 // Allocate heap number in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1671 TAG_OBJECT);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001672
1673 // Set the map. The other fields are left uninitialized.
1674 mov(FieldOperand(result, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001675 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001676}
1677
1678
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679void MacroAssembler::AllocateJSValue(Register result, Register constructor,
1680 Register value, Register scratch,
1681 Label* gc_required) {
1682 DCHECK(!result.is(constructor));
1683 DCHECK(!result.is(scratch));
1684 DCHECK(!result.is(value));
1685
1686 // Allocate JSValue in new space.
1687 Allocate(JSValue::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
1688
1689 // Initialize the JSValue.
1690 LoadGlobalFunctionInitialMap(constructor, scratch);
1691 mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
1692 LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
1693 mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
1694 mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
1695 mov(FieldOperand(result, JSValue::kValueOffset), value);
1696 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1697}
1698
1699
Ben Murdochb8e0da22011-05-16 14:20:40 +01001700// Copy memory, byte-by-byte, from source to destination. Not optimized for
1701// long or aligned copies. The contents of scratch and length are destroyed.
1702// Source and destination are incremented by length.
1703// Many variants of movsb, loop unrolling, word moves, and indexed operands
1704// have been tried here already, and this is fastest.
1705// A simpler loop is faster on small copies, but 30% slower on large ones.
1706// The cld() instruction must have been emitted, to set the direction flag(),
1707// before calling this function.
1708void MacroAssembler::CopyBytes(Register source,
1709 Register destination,
1710 Register length,
1711 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001712 Label short_loop, len4, len8, len12, done, short_string;
1713 DCHECK(source.is(esi));
1714 DCHECK(destination.is(edi));
1715 DCHECK(length.is(ecx));
1716 cmp(length, Immediate(4));
1717 j(below, &short_string, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001718
1719 // Because source is 4-byte aligned in our uses of this function,
1720 // we keep source aligned for the rep_movs call by copying the odd bytes
1721 // at the end of the ranges.
1722 mov(scratch, Operand(source, length, times_1, -4));
1723 mov(Operand(destination, length, times_1, -4), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001724
1725 cmp(length, Immediate(8));
1726 j(below_equal, &len4, Label::kNear);
1727 cmp(length, Immediate(12));
1728 j(below_equal, &len8, Label::kNear);
1729 cmp(length, Immediate(16));
1730 j(below_equal, &len12, Label::kNear);
1731
Ben Murdochb8e0da22011-05-16 14:20:40 +01001732 mov(scratch, ecx);
1733 shr(ecx, 2);
1734 rep_movs();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001735 and_(scratch, Immediate(0x3));
1736 add(destination, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001737 jmp(&done, Label::kNear);
1738
1739 bind(&len12);
1740 mov(scratch, Operand(source, 8));
1741 mov(Operand(destination, 8), scratch);
1742 bind(&len8);
1743 mov(scratch, Operand(source, 4));
1744 mov(Operand(destination, 4), scratch);
1745 bind(&len4);
1746 mov(scratch, Operand(source, 0));
1747 mov(Operand(destination, 0), scratch);
1748 add(destination, length);
1749 jmp(&done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001750
1751 bind(&short_string);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001752 test(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753 j(zero, &done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001754
1755 bind(&short_loop);
1756 mov_b(scratch, Operand(source, 0));
1757 mov_b(Operand(destination, 0), scratch);
1758 inc(source);
1759 inc(destination);
1760 dec(length);
1761 j(not_zero, &short_loop);
1762
1763 bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001764}
1765
Steve Blockd0582a62009-12-15 09:54:21 +00001766
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001767void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
1768 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001769 Register filler) {
1770 Label loop, entry;
1771 jmp(&entry);
1772 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001773 mov(Operand(current_address, 0), filler);
1774 add(current_address, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001775 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001776 cmp(current_address, end_address);
1777 j(below, &loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001778}
1779
1780
1781void MacroAssembler::BooleanBitTest(Register object,
1782 int field_offset,
1783 int bit_index) {
1784 bit_index += kSmiTagSize + kSmiShiftSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001785 DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001786 int byte_index = bit_index / kBitsPerByte;
1787 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1788 test_b(FieldOperand(object, field_offset + byte_index),
1789 static_cast<byte>(1 << byte_bit_index));
1790}
1791
1792
1793
Steve Blocka7e24c12009-10-30 11:49:00 +00001794void MacroAssembler::NegativeZeroTest(Register result,
1795 Register op,
1796 Label* then_label) {
1797 Label ok;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001798 test(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001799 j(not_zero, &ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001800 test(op, op);
Ben Murdoch257744e2011-11-30 15:57:28 +00001801 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001802 bind(&ok);
1803}
1804
1805
1806void MacroAssembler::NegativeZeroTest(Register result,
1807 Register op1,
1808 Register op2,
1809 Register scratch,
1810 Label* then_label) {
1811 Label ok;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001812 test(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001813 j(not_zero, &ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001814 mov(scratch, op1);
1815 or_(scratch, op2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001816 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001817 bind(&ok);
1818}
1819
1820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001821void MacroAssembler::GetMapConstructor(Register result, Register map,
1822 Register temp) {
1823 Label done, loop;
1824 mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
1825 bind(&loop);
1826 JumpIfSmi(result, &done, Label::kNear);
1827 CmpObjectType(result, MAP_TYPE, temp);
1828 j(not_equal, &done, Label::kNear);
1829 mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
1830 jmp(&loop);
1831 bind(&done);
1832}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001834
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001835void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
1836 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001837 // Get the prototype or initial map from the function.
1838 mov(result,
1839 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1840
1841 // If the prototype or initial map is the hole, don't return it and
1842 // simply miss the cache instead. This will allow us to allocate a
1843 // prototype object on-demand in the runtime system.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001844 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001845 j(equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001846
1847 // If the function does not have an initial map, we're done.
1848 Label done;
1849 CmpObjectType(result, MAP_TYPE, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001850 j(not_equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +00001851
1852 // Get the prototype from the initial map.
1853 mov(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001854
Steve Blocka7e24c12009-10-30 11:49:00 +00001855 // All done.
1856 bind(&done);
1857}
1858
1859
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001860void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1861 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
Ben Murdoch257744e2011-11-30 15:57:28 +00001862 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001863}
1864
1865
Steve Blockd0582a62009-12-15 09:54:21 +00001866void MacroAssembler::TailCallStub(CodeStub* stub) {
Steve Blockd0582a62009-12-15 09:54:21 +00001867 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1868}
1869
1870
Steve Blocka7e24c12009-10-30 11:49:00 +00001871void MacroAssembler::StubReturn(int argc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872 DCHECK(argc >= 1 && generating_stub());
Steve Blocka7e24c12009-10-30 11:49:00 +00001873 ret((argc - 1) * kPointerSize);
1874}
1875
1876
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001877bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001878 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00001879}
1880
1881
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001882void MacroAssembler::IndexFromHash(Register hash, Register index) {
1883 // The assert checks that the constants for the maximum number of digits
1884 // for an array index cached in the hash field and the number of bits
1885 // reserved for it does not conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001886 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001887 (1 << String::kArrayIndexValueBits));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001888 if (!index.is(hash)) {
1889 mov(index, hash);
1890 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001891 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001892}
1893
1894
Steve Block44f0eee2011-05-26 01:26:41 +01001895void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001896 int num_arguments,
1897 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001898 // If the expected number of arguments of the runtime function is
1899 // constant, we check that the actual number of arguments match the
1900 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001901 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001902
Leon Clarke4515c472010-02-03 11:58:03 +00001903 // TODO(1236192): Most runtime routines don't need the number of
1904 // arguments passed in because it is constant. At some point we
1905 // should remove this need and make the runtime routine entry code
1906 // smarter.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001907 Move(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01001908 mov(ebx, Immediate(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001909 CEntryStub ces(isolate(), 1, save_doubles);
Leon Clarke4515c472010-02-03 11:58:03 +00001910 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001911}
1912
1913
Ben Murdochbb769b22010-08-11 14:56:33 +01001914void MacroAssembler::CallExternalReference(ExternalReference ref,
1915 int num_arguments) {
1916 mov(eax, Immediate(num_arguments));
1917 mov(ebx, Immediate(ref));
1918
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001919 CEntryStub stub(isolate(), 1);
Ben Murdochbb769b22010-08-11 14:56:33 +01001920 CallStub(&stub);
1921}
1922
1923
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001924void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
1925 // ----------- S t a t e -------------
1926 // -- esp[0] : return address
1927 // -- esp[8] : argument num_arguments - 1
1928 // ...
1929 // -- esp[8 * num_arguments] : argument 0 (receiver)
1930 //
1931 // For runtime functions with variable arguments:
1932 // -- eax : number of arguments
1933 // -----------------------------------
Steve Block6ded16b2010-05-10 14:33:55 +01001934
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001935 const Runtime::Function* function = Runtime::FunctionForId(fid);
1936 DCHECK_EQ(1, function->result_size);
1937 if (function->nargs >= 0) {
1938 // TODO(1236192): Most runtime routines don't need the number of
1939 // arguments passed in because it is constant. At some point we
1940 // should remove this need and make the runtime routine entry code
1941 // smarter.
1942 mov(eax, Immediate(function->nargs));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001943 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001944 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Blockd0582a62009-12-15 09:54:21 +00001945}
1946
1947
Steve Block6ded16b2010-05-10 14:33:55 +01001948void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001949 // Set the entry point and jump to the C entry runtime stub.
1950 mov(ebx, Immediate(ext));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001951 CEntryStub ces(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001952 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1953}
1954
1955
1956void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1957 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001958 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001959 bool* definitely_mismatches,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001960 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001961 Label::Distance done_near,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001963 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001964 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001965 Label invoke;
1966 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001967 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001968 mov(eax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001969 if (expected.immediate() == actual.immediate()) {
1970 definitely_matches = true;
1971 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001972 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1973 if (expected.immediate() == sentinel) {
1974 // Don't worry about adapting arguments for builtins that
1975 // don't want that done. Skip adaption code by making it look
1976 // like we have a match between expected and actual number of
1977 // arguments.
1978 definitely_matches = true;
1979 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001980 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001981 mov(ebx, expected.immediate());
1982 }
1983 }
1984 } else {
1985 if (actual.is_immediate()) {
1986 // Expected is in register, actual is immediate. This is the
1987 // case when we invoke function values without going through the
1988 // IC mechanism.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001989 mov(eax, actual.immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001990 cmp(expected.reg(), actual.immediate());
1991 j(equal, &invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001992 DCHECK(expected.reg().is(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00001993 } else if (!expected.reg().is(actual.reg())) {
1994 // Both expected and actual are in (different) registers. This
1995 // is the case when we invoke functions using call and apply.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001996 cmp(expected.reg(), actual.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001997 j(equal, &invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001998 DCHECK(actual.reg().is(eax));
1999 DCHECK(expected.reg().is(ebx));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002000 } else {
2001 Move(eax, actual.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002002 }
2003 }
2004
2005 if (!definitely_matches) {
2006 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01002007 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00002008 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002009 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
Steve Blocka7e24c12009-10-30 11:49:00 +00002010 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00002011 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002012 if (!*definitely_mismatches) {
2013 jmp(done, done_near);
2014 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002015 } else {
2016 jmp(adaptor, RelocInfo::CODE_TARGET);
2017 }
2018 bind(&invoke);
2019 }
2020}
2021
2022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002023void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
2024 const ParameterCount& expected,
2025 const ParameterCount& actual) {
2026 Label skip_flooding;
2027 ExternalReference step_in_enabled =
2028 ExternalReference::debug_step_in_enabled_address(isolate());
2029 cmpb(Operand::StaticVariable(step_in_enabled), 0);
2030 j(equal, &skip_flooding);
2031 {
2032 FrameScope frame(this,
2033 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2034 if (expected.is_reg()) {
2035 SmiTag(expected.reg());
2036 Push(expected.reg());
2037 }
2038 if (actual.is_reg()) {
2039 SmiTag(actual.reg());
2040 Push(actual.reg());
2041 }
2042 if (new_target.is_valid()) {
2043 Push(new_target);
2044 }
2045 Push(fun);
2046 Push(fun);
2047 CallRuntime(Runtime::kDebugPrepareStepInIfStepping, 1);
2048 Pop(fun);
2049 if (new_target.is_valid()) {
2050 Pop(new_target);
2051 }
2052 if (actual.is_reg()) {
2053 Pop(actual.reg());
2054 SmiUntag(actual.reg());
2055 }
2056 if (expected.is_reg()) {
2057 Pop(expected.reg());
2058 SmiUntag(expected.reg());
2059 }
2060 }
2061 bind(&skip_flooding);
2062}
2063
2064
2065void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2066 const ParameterCount& expected,
2067 const ParameterCount& actual,
2068 InvokeFlag flag,
2069 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002070 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002071 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 DCHECK(function.is(edi));
2073 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
2074
2075 if (call_wrapper.NeedsDebugStepCheck()) {
2076 FloodFunctionIfStepping(function, new_target, expected, actual);
2077 }
2078
2079 // Clear the new.target register if not given.
2080 if (!new_target.is_valid()) {
2081 mov(edx, isolate()->factory()->undefined_value());
2082 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002083
Ben Murdoch257744e2011-11-30 15:57:28 +00002084 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002085 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002086 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2087 Label::kNear, call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002088 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002089 // We call indirectly through the code field in the function to
2090 // allow recompilation to take effect without changing any of the
2091 // call sites.
2092 Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002093 if (flag == CALL_FUNCTION) {
2094 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002095 call(code);
2096 call_wrapper.AfterCall();
2097 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002098 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002099 jmp(code);
2100 }
2101 bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00002102 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002103}
2104
2105
Steve Blocka7e24c12009-10-30 11:49:00 +00002106void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002107 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00002108 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002109 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002110 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002111 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002112 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002113
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002114 DCHECK(fun.is(edi));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002115 mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002116 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002117 mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002118 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00002119
2120 ParameterCount expected(ebx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002121 InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002122}
2123
2124
2125void MacroAssembler::InvokeFunction(Register fun,
2126 const ParameterCount& expected,
2127 const ParameterCount& actual,
2128 InvokeFlag flag,
2129 const CallWrapper& call_wrapper) {
2130 // You can't call a function without a valid frame.
2131 DCHECK(flag == JUMP_FUNCTION || has_frame());
2132
2133 DCHECK(fun.is(edi));
2134 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2135
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002136 InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002137}
2138
2139
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002140void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002141 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00002142 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002143 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002144 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002145 LoadHeapObject(edi, function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146 InvokeFunction(edi, expected, actual, flag, call_wrapper);
Andrei Popescu402d9372010-02-26 13:31:12 +00002147}
2148
2149
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002150void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002151 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002152 // You can't call a builtin without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153 DCHECK(flag == JUMP_FUNCTION || has_frame());
Steve Blocka7e24c12009-10-30 11:49:00 +00002154
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002155 // Fake a parameter count to avoid emitting code to do the check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002156 ParameterCount expected(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157 GetBuiltinFunction(edi, native_context_index);
2158 InvokeFunctionCode(edi, no_reg, expected, expected, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002159}
2160
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002161
Steve Block791712a2010-08-27 10:21:07 +01002162void MacroAssembler::GetBuiltinFunction(Register target,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002163 int native_context_index) {
Steve Block791712a2010-08-27 10:21:07 +01002164 // Load the JavaScript builtin function from the builtins object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 mov(target, NativeContextOperand());
2166 mov(target, ContextOperand(target, native_context_index));
Steve Blocka7e24c12009-10-30 11:49:00 +00002167}
2168
2169
Steve Blockd0582a62009-12-15 09:54:21 +00002170void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2171 if (context_chain_length > 0) {
2172 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002173 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002174 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002175 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002176 }
Steve Block1e0659c2011-05-24 12:43:12 +01002177 } else {
2178 // Slot is in the current function context. Move it into the
2179 // destination register in case we store into it (the write barrier
2180 // cannot be allowed to destroy the context in esi).
2181 mov(dst, esi);
2182 }
2183
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002184 // We should not have found a with context by walking the context chain
Steve Block1e0659c2011-05-24 12:43:12 +01002185 // (i.e., the static scope chain and runtime context chain do not agree).
2186 // A variable occurring in such a scope should have slot type LOOKUP and
2187 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01002188 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002189 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2190 isolate()->factory()->with_context_map());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002191 Check(not_equal, kVariableResolvedToWithContext);
Steve Blockd0582a62009-12-15 09:54:21 +00002192 }
2193}
2194
2195
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002196void MacroAssembler::LoadGlobalProxy(Register dst) {
2197 mov(dst, NativeContextOperand());
2198 mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
2199}
2200
2201
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002202void MacroAssembler::LoadTransitionedArrayMapConditional(
2203 ElementsKind expected_kind,
2204 ElementsKind transitioned_kind,
2205 Register map_in_out,
2206 Register scratch,
2207 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002208 DCHECK(IsFastElementsKind(expected_kind));
2209 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002210
2211 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002212 mov(scratch, NativeContextOperand());
2213 cmp(map_in_out,
2214 ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002215 j(not_equal, no_map_match);
2216
2217 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002218 mov(map_in_out,
2219 ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002220}
2221
2222
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002223void MacroAssembler::LoadGlobalFunction(int index, Register function) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002224 // Load the native context from the current context.
2225 mov(function, NativeContextOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002226 // Load the function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 mov(function, ContextOperand(function, index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002228}
2229
2230
2231void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2232 Register map) {
2233 // Load the initial map. The global functions all have initial maps.
2234 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002235 if (emit_debug_code()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002236 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002237 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002238 jmp(&ok);
2239 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002240 Abort(kGlobalFunctionsMustHaveInitialMap);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002241 bind(&ok);
2242 }
2243}
2244
Steve Blockd0582a62009-12-15 09:54:21 +00002245
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002246// Store the value in register src in the safepoint register stack
2247// slot for register dst.
2248void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2249 mov(SafepointRegisterSlot(dst), src);
2250}
2251
2252
2253void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2254 mov(SafepointRegisterSlot(dst), src);
2255}
2256
2257
2258void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2259 mov(dst, SafepointRegisterSlot(src));
2260}
2261
2262
2263Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2264 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2265}
2266
2267
Ben Murdochb0fe1622011-05-05 13:52:32 +01002268int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2269 // The registers are pushed starting with the lowest encoding,
2270 // which means that lowest encodings are furthest away from
2271 // the stack pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002272 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002273 return kNumSafepointRegisters - reg_code - 1;
2274}
2275
2276
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002277void MacroAssembler::LoadHeapObject(Register result,
2278 Handle<HeapObject> object) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002279 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002280 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002281 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2282 mov(result, Operand::ForCell(cell));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002283 } else {
2284 mov(result, object);
2285 }
2286}
2287
2288
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002289void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2290 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002291 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002292 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2293 cmp(reg, Operand::ForCell(cell));
2294 } else {
2295 cmp(reg, object);
2296 }
2297}
2298
2299
2300void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2301 AllowDeferredHandleDereference using_raw_address;
2302 if (isolate()->heap()->InNewSpace(*object)) {
2303 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2304 push(Operand::ForCell(cell));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002305 } else {
2306 Push(object);
2307 }
2308}
2309
2310
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002311void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2312 Register scratch) {
2313 mov(scratch, cell);
2314 cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2315}
2316
2317
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002318void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002319 mov(value, cell);
2320 mov(value, FieldOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002321}
2322
2323
2324void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2325 Label* miss) {
2326 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002327 JumpIfSmi(value, miss);
2328}
2329
2330
Steve Blocka7e24c12009-10-30 11:49:00 +00002331void MacroAssembler::Ret() {
2332 ret(0);
2333}
2334
2335
Steve Block1e0659c2011-05-24 12:43:12 +01002336void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2337 if (is_uint16(bytes_dropped)) {
2338 ret(bytes_dropped);
2339 } else {
2340 pop(scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002341 add(esp, Immediate(bytes_dropped));
Steve Block1e0659c2011-05-24 12:43:12 +01002342 push(scratch);
2343 ret(0);
2344 }
2345}
2346
2347
Leon Clarkee46be812010-01-19 14:06:41 +00002348void MacroAssembler::Drop(int stack_elements) {
2349 if (stack_elements > 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002350 add(esp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00002351 }
2352}
2353
2354
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002355void MacroAssembler::Move(Register dst, Register src) {
2356 if (!dst.is(src)) {
2357 mov(dst, src);
2358 }
2359}
2360
2361
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002362void MacroAssembler::Move(Register dst, const Immediate& x) {
2363 if (x.is_zero()) {
2364 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2365 } else {
2366 mov(dst, x);
2367 }
2368}
2369
2370
2371void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2372 mov(dst, x);
2373}
2374
2375
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002376void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2377 if (src == 0) {
2378 pxor(dst, dst);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002380 unsigned cnt = base::bits::CountPopulation32(src);
2381 unsigned nlz = base::bits::CountLeadingZeros32(src);
2382 unsigned ntz = base::bits::CountTrailingZeros32(src);
2383 if (nlz + cnt + ntz == 32) {
2384 pcmpeqd(dst, dst);
2385 if (ntz == 0) {
2386 psrld(dst, 32 - cnt);
2387 } else {
2388 pslld(dst, 32 - cnt);
2389 if (nlz != 0) psrld(dst, nlz);
2390 }
2391 } else {
2392 push(eax);
2393 mov(eax, Immediate(src));
2394 movd(dst, Operand(eax));
2395 pop(eax);
2396 }
2397 }
2398}
2399
2400
2401void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002402 if (src == 0) {
2403 pxor(dst, dst);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002404 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002405 uint32_t lower = static_cast<uint32_t>(src);
2406 uint32_t upper = static_cast<uint32_t>(src >> 32);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002407 unsigned cnt = base::bits::CountPopulation64(src);
2408 unsigned nlz = base::bits::CountLeadingZeros64(src);
2409 unsigned ntz = base::bits::CountTrailingZeros64(src);
2410 if (nlz + cnt + ntz == 64) {
2411 pcmpeqd(dst, dst);
2412 if (ntz == 0) {
2413 psrlq(dst, 64 - cnt);
2414 } else {
2415 psllq(dst, 64 - cnt);
2416 if (nlz != 0) psrlq(dst, nlz);
2417 }
2418 } else if (lower == 0) {
2419 Move(dst, upper);
2420 psllq(dst, 32);
2421 } else if (CpuFeatures::IsSupported(SSE4_1)) {
2422 CpuFeatureScope scope(this, SSE4_1);
2423 push(eax);
2424 Move(eax, Immediate(lower));
2425 movd(dst, Operand(eax));
2426 Move(eax, Immediate(upper));
2427 pinsrd(dst, Operand(eax), 1);
2428 pop(eax);
2429 } else {
2430 push(Immediate(upper));
2431 push(Immediate(lower));
2432 movsd(dst, Operand(esp, 0));
2433 add(esp, Immediate(kDoubleSize));
2434 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002435 }
2436}
2437
2438
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002439void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
2440 if (imm8 == 0) {
2441 movd(dst, src);
2442 return;
2443 }
2444 DCHECK_EQ(1, imm8);
2445 if (CpuFeatures::IsSupported(SSE4_1)) {
2446 CpuFeatureScope sse_scope(this, SSE4_1);
2447 pextrd(dst, src, imm8);
2448 return;
2449 }
2450 pshufd(xmm0, src, 1);
2451 movd(dst, xmm0);
2452}
2453
2454
2455void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
2456 DCHECK(imm8 == 0 || imm8 == 1);
2457 if (CpuFeatures::IsSupported(SSE4_1)) {
2458 CpuFeatureScope sse_scope(this, SSE4_1);
2459 pinsrd(dst, src, imm8);
2460 return;
2461 }
2462 movd(xmm0, src);
2463 if (imm8 == 1) {
2464 punpckldq(dst, xmm0);
2465 } else {
2466 DCHECK_EQ(0, imm8);
2467 psrlq(dst, 32);
2468 punpckldq(xmm0, dst);
2469 movaps(dst, xmm0);
2470 }
2471}
2472
2473
2474void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
2475 if (CpuFeatures::IsSupported(LZCNT)) {
2476 CpuFeatureScope scope(this, LZCNT);
2477 lzcnt(dst, src);
2478 return;
2479 }
2480 Label not_zero_src;
2481 bsr(dst, src);
2482 j(not_zero, &not_zero_src, Label::kNear);
2483 Move(dst, Immediate(63)); // 63^31 == 32
2484 bind(&not_zero_src);
2485 xor_(dst, Immediate(31)); // for x in [0..31], 31^x == 31-x.
2486}
2487
2488
2489void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
2490 if (CpuFeatures::IsSupported(BMI1)) {
2491 CpuFeatureScope scope(this, BMI1);
2492 tzcnt(dst, src);
2493 return;
2494 }
2495 Label not_zero_src;
2496 bsf(dst, src);
2497 j(not_zero, &not_zero_src, Label::kNear);
2498 Move(dst, Immediate(32)); // The result of tzcnt is 32 if src = 0.
2499 bind(&not_zero_src);
2500}
2501
2502
2503void MacroAssembler::Popcnt(Register dst, const Operand& src) {
2504 if (CpuFeatures::IsSupported(POPCNT)) {
2505 CpuFeatureScope scope(this, POPCNT);
2506 popcnt(dst, src);
2507 return;
2508 }
2509 UNREACHABLE();
2510}
2511
2512
Steve Blocka7e24c12009-10-30 11:49:00 +00002513void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2514 if (FLAG_native_code_counters && counter->Enabled()) {
2515 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2516 }
2517}
2518
2519
2520void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002521 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002522 if (FLAG_native_code_counters && counter->Enabled()) {
2523 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2524 if (value == 1) {
2525 inc(operand);
2526 } else {
2527 add(operand, Immediate(value));
2528 }
2529 }
2530}
2531
2532
2533void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002534 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002535 if (FLAG_native_code_counters && counter->Enabled()) {
2536 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2537 if (value == 1) {
2538 dec(operand);
2539 } else {
2540 sub(operand, Immediate(value));
2541 }
2542 }
2543}
2544
2545
Leon Clarked91b9f72010-01-27 17:25:45 +00002546void MacroAssembler::IncrementCounter(Condition cc,
2547 StatsCounter* counter,
2548 int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002549 DCHECK(value > 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002550 if (FLAG_native_code_counters && counter->Enabled()) {
2551 Label skip;
2552 j(NegateCondition(cc), &skip);
2553 pushfd();
2554 IncrementCounter(counter, value);
2555 popfd();
2556 bind(&skip);
2557 }
2558}
2559
2560
2561void MacroAssembler::DecrementCounter(Condition cc,
2562 StatsCounter* counter,
2563 int value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002564 DCHECK(value > 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002565 if (FLAG_native_code_counters && counter->Enabled()) {
2566 Label skip;
2567 j(NegateCondition(cc), &skip);
2568 pushfd();
2569 DecrementCounter(counter, value);
2570 popfd();
2571 bind(&skip);
2572 }
2573}
2574
2575
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002576void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2577 if (emit_debug_code()) Check(cc, reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002578}
2579
2580
Iain Merrick75681382010-08-19 15:07:18 +01002581void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002582 if (emit_debug_code()) {
2583 Factory* factory = isolate()->factory();
Iain Merrick75681382010-08-19 15:07:18 +01002584 Label ok;
2585 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002586 Immediate(factory->fixed_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002587 j(equal, &ok);
2588 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002589 Immediate(factory->fixed_double_array_map()));
2590 j(equal, &ok);
2591 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002592 Immediate(factory->fixed_cow_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002593 j(equal, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002594 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002595 bind(&ok);
2596 }
2597}
2598
2599
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002600void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002601 Label L;
Ben Murdoch257744e2011-11-30 15:57:28 +00002602 j(cc, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002603 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002604 // will not return here
2605 bind(&L);
2606}
2607
2608
Steve Block6ded16b2010-05-10 14:33:55 +01002609void MacroAssembler::CheckStackAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002610 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01002611 int frame_alignment_mask = frame_alignment - 1;
2612 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002613 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01002614 Label alignment_as_expected;
2615 test(esp, Immediate(frame_alignment_mask));
2616 j(zero, &alignment_as_expected);
2617 // Abort if stack is not aligned.
2618 int3();
2619 bind(&alignment_as_expected);
2620 }
2621}
2622
2623
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002624void MacroAssembler::Abort(BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002625#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002626 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002627 if (msg != NULL) {
2628 RecordComment("Abort message: ");
2629 RecordComment(msg);
2630 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002631
2632 if (FLAG_trap_on_abort) {
2633 int3();
2634 return;
2635 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002636#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002637
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002638 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002639 // Disable stub call restrictions to always allow calls to abort.
2640 if (!has_frame_) {
2641 // We don't actually want to generate a pile of code for this, so just
2642 // claim there is a stack frame, without generating one.
2643 FrameScope scope(this, StackFrame::NONE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002644 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002645 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002647 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002648 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00002649 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00002650}
2651
2652
Ben Murdoch257744e2011-11-30 15:57:28 +00002653void MacroAssembler::LoadInstanceDescriptors(Register map,
2654 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2656}
2657
2658
2659void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2660 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2661 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
Iain Merrick75681382010-08-19 15:07:18 +01002662}
2663
2664
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002665void MacroAssembler::LoadAccessor(Register dst, Register holder,
2666 int accessor_index,
2667 AccessorComponent accessor) {
2668 mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
2669 LoadInstanceDescriptors(dst, dst);
2670 mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
2671 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
2672 : AccessorPair::kSetterOffset;
2673 mov(dst, FieldOperand(dst, offset));
2674}
2675
2676
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002677void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2678 Register scratch,
2679 int power) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002680 DCHECK(is_uintn(power + HeapNumber::kExponentBias,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002681 HeapNumber::kExponentBits));
2682 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002683 movd(dst, scratch);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002684 psllq(dst, HeapNumber::kMantissaBits);
2685}
2686
2687
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002688void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2689 Register instance_type, Register scratch, Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002690 if (!scratch.is(instance_type)) {
2691 mov(scratch, instance_type);
2692 }
2693 and_(scratch,
2694 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002695 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
Andrei Popescu402d9372010-02-26 13:31:12 +00002696 j(not_equal, failure);
2697}
2698
2699
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002700void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2701 Register object2,
2702 Register scratch1,
2703 Register scratch2,
2704 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002705 // Check that both objects are not smis.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002706 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002707 mov(scratch1, object1);
2708 and_(scratch1, object2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002709 JumpIfSmi(scratch1, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00002710
2711 // Load instance type for both strings.
2712 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2713 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2714 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2715 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2716
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002717 // Check that both are flat one-byte strings.
2718 const int kFlatOneByteStringMask =
Leon Clarked91b9f72010-01-27 17:25:45 +00002719 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002720 const int kFlatOneByteStringTag =
2721 kStringTag | kOneByteStringTag | kSeqStringTag;
Leon Clarked91b9f72010-01-27 17:25:45 +00002722 // Interleave bits from both instance types and compare them in one check.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002723 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2724 and_(scratch1, kFlatOneByteStringMask);
2725 and_(scratch2, kFlatOneByteStringMask);
Leon Clarked91b9f72010-01-27 17:25:45 +00002726 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002727 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
Leon Clarked91b9f72010-01-27 17:25:45 +00002728 j(not_equal, failure);
2729}
2730
2731
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002732void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2733 Label* not_unique_name,
2734 Label::Distance distance) {
2735 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2736 Label succeed;
2737 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2738 j(zero, &succeed);
2739 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2740 j(not_equal, not_unique_name, distance);
2741
2742 bind(&succeed);
2743}
2744
2745
2746void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2747 Register index,
2748 Register value,
2749 uint32_t encoding_mask) {
2750 Label is_object;
2751 JumpIfNotSmi(string, &is_object, Label::kNear);
2752 Abort(kNonObject);
2753 bind(&is_object);
2754
2755 push(value);
2756 mov(value, FieldOperand(string, HeapObject::kMapOffset));
2757 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2758
2759 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2760 cmp(value, Immediate(encoding_mask));
2761 pop(value);
2762 Check(equal, kUnexpectedStringType);
2763
2764 // The index is assumed to be untagged coming in, tag it to compare with the
2765 // string length without using a temp register, it is restored at the end of
2766 // this function.
2767 SmiTag(index);
2768 Check(no_overflow, kIndexIsTooLarge);
2769
2770 cmp(index, FieldOperand(string, String::kLengthOffset));
2771 Check(less, kIndexIsTooLarge);
2772
2773 cmp(index, Immediate(Smi::FromInt(0)));
2774 Check(greater_equal, kIndexIsNegative);
2775
2776 // Restore the index
2777 SmiUntag(index);
2778}
2779
2780
Steve Block6ded16b2010-05-10 14:33:55 +01002781void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002782 int frame_alignment = base::OS::ActivationFrameAlignment();
Ben Murdoch8b112d22011-06-08 16:22:53 +01002783 if (frame_alignment != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01002784 // Make stack end at alignment and make room for num_arguments words
2785 // and the original value of esp.
2786 mov(scratch, esp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002787 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002788 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002789 and_(esp, -frame_alignment);
Steve Block6ded16b2010-05-10 14:33:55 +01002790 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2791 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002792 sub(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002793 }
2794}
2795
2796
2797void MacroAssembler::CallCFunction(ExternalReference function,
2798 int num_arguments) {
2799 // Trashing eax is ok as it will be the return value.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002800 mov(eax, Immediate(function));
Steve Block6ded16b2010-05-10 14:33:55 +01002801 CallCFunction(eax, num_arguments);
2802}
2803
2804
2805void MacroAssembler::CallCFunction(Register function,
2806 int num_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002807 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01002808 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01002809 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01002810 CheckStackAlignment();
2811 }
2812
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002813 call(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002814 if (base::OS::ActivationFrameAlignment() != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01002815 mov(esp, Operand(esp, num_arguments * kPointerSize));
2816 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002817 add(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002818 }
2819}
2820
2821
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002822#ifdef DEBUG
2823bool AreAliased(Register reg1,
2824 Register reg2,
2825 Register reg3,
2826 Register reg4,
2827 Register reg5,
2828 Register reg6,
2829 Register reg7,
2830 Register reg8) {
2831 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2832 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2833 reg7.is_valid() + reg8.is_valid();
2834
2835 RegList regs = 0;
2836 if (reg1.is_valid()) regs |= reg1.bit();
2837 if (reg2.is_valid()) regs |= reg2.bit();
2838 if (reg3.is_valid()) regs |= reg3.bit();
2839 if (reg4.is_valid()) regs |= reg4.bit();
2840 if (reg5.is_valid()) regs |= reg5.bit();
2841 if (reg6.is_valid()) regs |= reg6.bit();
2842 if (reg7.is_valid()) regs |= reg7.bit();
2843 if (reg8.is_valid()) regs |= reg8.bit();
2844 int n_of_non_aliasing_regs = NumRegs(regs);
2845
2846 return n_of_valid_regs != n_of_non_aliasing_regs;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002847}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002848#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002849
2850
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002851CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002852 : address_(address),
2853 size_(size),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002854 masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002855 // Create a new macro assembler pointing to the address of the code to patch.
2856 // The size is adjusted with kGap on order for the assembler to generate size
2857 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002858 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00002859}
2860
2861
2862CodePatcher::~CodePatcher() {
2863 // Indicate that code has changed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002864 Assembler::FlushICache(masm_.isolate(), address_, size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002865
2866 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002867 DCHECK(masm_.pc_ == address_ + size_);
2868 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00002869}
2870
2871
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002872void MacroAssembler::CheckPageFlag(
2873 Register object,
2874 Register scratch,
2875 int mask,
2876 Condition cc,
2877 Label* condition_met,
2878 Label::Distance condition_met_distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002879 DCHECK(cc == zero || cc == not_zero);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002880 if (scratch.is(object)) {
2881 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2882 } else {
2883 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2884 and_(scratch, object);
2885 }
2886 if (mask < (1 << kBitsPerByte)) {
2887 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2888 static_cast<uint8_t>(mask));
2889 } else {
2890 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2891 }
2892 j(cc, condition_met, condition_met_distance);
2893}
2894
2895
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002896void MacroAssembler::CheckPageFlagForMap(
2897 Handle<Map> map,
2898 int mask,
2899 Condition cc,
2900 Label* condition_met,
2901 Label::Distance condition_met_distance) {
2902 DCHECK(cc == zero || cc == not_zero);
2903 Page* page = Page::FromAddress(map->address());
2904 DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
2905 ExternalReference reference(ExternalReference::page_flags(page));
2906 // The inlined static address check of the page's flags relies
2907 // on maps never being compacted.
2908 DCHECK(!isolate()->heap()->mark_compact_collector()->
2909 IsOnEvacuationCandidate(*map));
2910 if (mask < (1 << kBitsPerByte)) {
2911 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2912 } else {
2913 test(Operand::StaticVariable(reference), Immediate(mask));
2914 }
2915 j(cc, condition_met, condition_met_distance);
2916}
2917
2918
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002919void MacroAssembler::JumpIfBlack(Register object,
2920 Register scratch0,
2921 Register scratch1,
2922 Label* on_black,
2923 Label::Distance on_black_near) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002924 HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
2925 1); // kBlackBitPattern.
2926 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002927}
2928
2929
2930void MacroAssembler::HasColor(Register object,
2931 Register bitmap_scratch,
2932 Register mask_scratch,
2933 Label* has_color,
2934 Label::Distance has_color_distance,
2935 int first_bit,
2936 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002937 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002938
2939 GetMarkBits(object, bitmap_scratch, mask_scratch);
2940
2941 Label other_color, word_boundary;
2942 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2943 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2944 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2945 j(zero, &word_boundary, Label::kNear);
2946 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2947 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2948 jmp(&other_color, Label::kNear);
2949
2950 bind(&word_boundary);
2951 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2952
2953 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2954 bind(&other_color);
2955}
2956
2957
2958void MacroAssembler::GetMarkBits(Register addr_reg,
2959 Register bitmap_reg,
2960 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002961 DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002962 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2963 and_(bitmap_reg, addr_reg);
2964 mov(ecx, addr_reg);
2965 int shift =
2966 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2967 shr(ecx, shift);
2968 and_(ecx,
2969 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2970
2971 add(bitmap_reg, ecx);
2972 mov(ecx, addr_reg);
2973 shr(ecx, kPointerSizeLog2);
2974 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2975 mov(mask_reg, Immediate(1));
2976 shl_cl(mask_reg);
2977}
2978
2979
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002980void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
2981 Register mask_scratch, Label* value_is_white,
2982 Label::Distance distance) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002983 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002984 GetMarkBits(value, bitmap_scratch, mask_scratch);
2985
2986 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002987 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002988 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
2989 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002990 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002991
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002992 // Since both black and grey have a 1 in the first position and white does
2993 // not have a 1 there we only need to check one bit.
2994 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002995 j(zero, value_is_white, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002996}
2997
2998
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002999void MacroAssembler::EnumLength(Register dst, Register map) {
3000 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3001 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3002 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3003 SmiTag(dst);
3004}
3005
3006
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003007void MacroAssembler::CheckEnumCache(Label* call_runtime) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003008 Label next, start;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003009 mov(ecx, eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003010
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003011 // Check if the enum length field is properly initialized, indicating that
3012 // there is an enum cache.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003013 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003014
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003015 EnumLength(edx, ebx);
3016 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3017 j(equal, call_runtime);
3018
3019 jmp(&start);
3020
3021 bind(&next);
3022 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003023
3024 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003025 EnumLength(edx, ebx);
3026 cmp(edx, Immediate(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003027 j(not_equal, call_runtime);
3028
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003029 bind(&start);
3030
3031 // Check that there are no elements. Register rcx contains the current JS
3032 // object we've reached through the prototype chain.
3033 Label no_elements;
3034 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3035 cmp(ecx, isolate()->factory()->empty_fixed_array());
3036 j(equal, &no_elements);
3037
3038 // Second chance, the object may be using the empty slow element dictionary.
3039 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3040 j(not_equal, call_runtime);
3041
3042 bind(&no_elements);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003043 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3044 cmp(ecx, isolate()->factory()->null_value());
3045 j(not_equal, &next);
3046}
3047
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003048
3049void MacroAssembler::TestJSArrayForAllocationMemento(
3050 Register receiver_reg,
3051 Register scratch_reg,
3052 Label* no_memento_found) {
3053 ExternalReference new_space_start =
3054 ExternalReference::new_space_start(isolate());
3055 ExternalReference new_space_allocation_top =
3056 ExternalReference::new_space_allocation_top_address(isolate());
3057
3058 lea(scratch_reg, Operand(receiver_reg,
3059 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3060 cmp(scratch_reg, Immediate(new_space_start));
3061 j(less, no_memento_found);
3062 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3063 j(greater, no_memento_found);
3064 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3065 Immediate(isolate()->factory()->allocation_memento_map()));
3066}
3067
3068
3069void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3070 Register object,
3071 Register scratch0,
3072 Register scratch1,
3073 Label* found) {
3074 DCHECK(!scratch1.is(scratch0));
3075 Factory* factory = isolate()->factory();
3076 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003077 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003078
3079 // scratch contained elements pointer.
3080 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003081 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3082 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3083 cmp(current, Immediate(factory->null_value()));
3084 j(equal, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003085
3086 // Loop based on the map going up the prototype chain.
3087 bind(&loop_again);
3088 mov(current, FieldOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003089 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3090 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3091 CmpInstanceType(current, JS_OBJECT_TYPE);
3092 j(below, found);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003093 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3094 DecodeField<Map::ElementsKindBits>(scratch1);
3095 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3096 j(equal, found);
3097 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3098 cmp(current, Immediate(factory->null_value()));
3099 j(not_equal, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003100
3101 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102}
3103
3104
3105void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3106 DCHECK(!dividend.is(eax));
3107 DCHECK(!dividend.is(edx));
3108 base::MagicNumbersForDivision<uint32_t> mag =
3109 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3110 mov(eax, Immediate(mag.multiplier));
3111 imul(dividend);
3112 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3113 if (divisor > 0 && neg) add(edx, dividend);
3114 if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3115 if (mag.shift > 0) sar(edx, mag.shift);
3116 mov(eax, dividend);
3117 shr(eax, 31);
3118 add(edx, eax);
3119}
3120
3121
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003122} // namespace internal
3123} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01003124
3125#endif // V8_TARGET_ARCH_IA32