blob: 7cbbd3ae2f1178f03f9363595f126eb370e90f31 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
Ben Murdoch257744e2011-11-30 15:57:28 +00005#include <limits.h> // For LONG_MIN, LONG_MAX.
Andrei Popescu31002712010-02-23 13:46:05 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/mips/macro-assembler-mips.h"
15#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016#include "src/runtime/runtime.h"
Andrei Popescu31002712010-02-23 13:46:05 +000017
18namespace v8 {
19namespace internal {
20
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
22 CodeObjectRequired create_code_object)
Ben Murdoch257744e2011-11-30 15:57:28 +000023 : Assembler(arg_isolate, buffer, size),
Andrei Popescu31002712010-02-23 13:46:05 +000024 generating_stub_(false),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040025 has_frame_(false),
26 has_double_zero_reg_set_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +000030 }
Andrei Popescu31002712010-02-23 13:46:05 +000031}
32
33
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034void MacroAssembler::Load(Register dst,
35 const MemOperand& src,
36 Representation r) {
37 DCHECK(!r.IsDouble());
38 if (r.IsInteger8()) {
39 lb(dst, src);
40 } else if (r.IsUInteger8()) {
41 lbu(dst, src);
42 } else if (r.IsInteger16()) {
43 lh(dst, src);
44 } else if (r.IsUInteger16()) {
45 lhu(dst, src);
46 } else {
47 lw(dst, src);
48 }
49}
50
51
52void MacroAssembler::Store(Register src,
53 const MemOperand& dst,
54 Representation r) {
55 DCHECK(!r.IsDouble());
56 if (r.IsInteger8() || r.IsUInteger8()) {
57 sb(src, dst);
58 } else if (r.IsInteger16() || r.IsUInteger16()) {
59 sh(src, dst);
60 } else {
61 if (r.IsHeapObject()) {
62 AssertNotSmi(src);
63 } else if (r.IsSmi()) {
64 AssertSmi(src);
65 }
66 sw(src, dst);
67 }
68}
69
70
Andrei Popescu31002712010-02-23 13:46:05 +000071void MacroAssembler::LoadRoot(Register destination,
72 Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010073 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000074}
75
Steve Block44f0eee2011-05-26 01:26:41 +010076
Andrei Popescu31002712010-02-23 13:46:05 +000077void MacroAssembler::LoadRoot(Register destination,
78 Heap::RootListIndex index,
79 Condition cond,
80 Register src1, const Operand& src2) {
Steve Block44f0eee2011-05-26 01:26:41 +010081 Branch(2, NegateCondition(cond), src1, src2);
Steve Block6ded16b2010-05-10 14:33:55 +010082 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000083}
84
85
Steve Block44f0eee2011-05-26 01:26:41 +010086void MacroAssembler::StoreRoot(Register source,
87 Heap::RootListIndex index) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000088 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Steve Block44f0eee2011-05-26 01:26:41 +010089 sw(source, MemOperand(s6, index << kPointerSizeLog2));
90}
91
92
93void MacroAssembler::StoreRoot(Register source,
94 Heap::RootListIndex index,
95 Condition cond,
96 Register src1, const Operand& src2) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000097 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Steve Block44f0eee2011-05-26 01:26:41 +010098 Branch(2, NegateCondition(cond), src1, src2);
99 sw(source, MemOperand(s6, index << kPointerSizeLog2));
100}
101
Ben Murdochda12d292016-06-02 14:46:10 +0100102void MacroAssembler::PushCommonFrame(Register marker_reg) {
103 if (marker_reg.is_valid()) {
104 Push(ra, fp, marker_reg);
105 Addu(fp, sp, Operand(kPointerSize));
106 } else {
107 Push(ra, fp);
108 mov(fp, sp);
109 }
110}
111
112void MacroAssembler::PopCommonFrame(Register marker_reg) {
113 if (marker_reg.is_valid()) {
114 Pop(ra, fp, marker_reg);
115 } else {
116 Pop(ra, fp);
117 }
118}
119
120void MacroAssembler::PushStandardFrame(Register function_reg) {
121 int offset = -StandardFrameConstants::kContextOffset;
122 if (function_reg.is_valid()) {
123 Push(ra, fp, cp, function_reg);
124 offset += kPointerSize;
125 } else {
126 Push(ra, fp, cp);
127 }
128 Addu(fp, sp, Operand(offset));
129}
Steve Block44f0eee2011-05-26 01:26:41 +0100130
Ben Murdoch257744e2011-11-30 15:57:28 +0000131// Push and pop all registers that can hold pointers.
132void MacroAssembler::PushSafepointRegisters() {
133 // Safepoints expect a block of kNumSafepointRegisters values on the
134 // stack, so adjust the stack for unsaved registers.
135 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000136 DCHECK(num_unsaved >= 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100137 if (num_unsaved > 0) {
138 Subu(sp, sp, Operand(num_unsaved * kPointerSize));
139 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000140 MultiPush(kSafepointSavedRegisters);
141}
142
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000143
Ben Murdoch257744e2011-11-30 15:57:28 +0000144void MacroAssembler::PopSafepointRegisters() {
145 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
146 MultiPop(kSafepointSavedRegisters);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100147 if (num_unsaved > 0) {
148 Addu(sp, sp, Operand(num_unsaved * kPointerSize));
149 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000150}
151
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000152
Ben Murdoch257744e2011-11-30 15:57:28 +0000153void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
154 sw(src, SafepointRegisterSlot(dst));
155}
156
157
158void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
159 lw(dst, SafepointRegisterSlot(src));
160}
161
162
163int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
164 // The registers are pushed starting with the highest encoding,
165 // which means that lowest encodings are closest to the stack pointer.
166 return kSafepointRegisterStackIndexMap[reg_code];
167}
168
169
170MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
171 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
172}
173
174
175MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100176 UNIMPLEMENTED_MIPS();
Ben Murdoch257744e2011-11-30 15:57:28 +0000177 // General purpose registers are pushed last on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000178 int doubles_size = DoubleRegister::kMaxNumRegisters * kDoubleSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000179 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
180 return MemOperand(sp, doubles_size + register_offset);
181}
182
183
Steve Block44f0eee2011-05-26 01:26:41 +0100184void MacroAssembler::InNewSpace(Register object,
185 Register scratch,
186 Condition cc,
187 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000188 DCHECK(cc == eq || cc == ne);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100189 const int mask =
190 1 << MemoryChunk::IN_FROM_SPACE | 1 << MemoryChunk::IN_TO_SPACE;
191 CheckPageFlag(object, scratch, mask, cc, branch);
Steve Block44f0eee2011-05-26 01:26:41 +0100192}
193
194
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195// Clobbers object, dst, value, and ra, if (ra_status == kRAHasBeenSaved)
196// The register 'object' contains a heap object pointer. The heap object
197// tag is shifted away.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100198void MacroAssembler::RecordWriteField(
199 Register object,
200 int offset,
201 Register value,
202 Register dst,
203 RAStatus ra_status,
204 SaveFPRegsMode save_fp,
205 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000206 SmiCheck smi_check,
207 PointersToHereCheck pointers_to_here_check_for_value) {
208 DCHECK(!AreAliased(value, dst, t8, object));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100209 // First, check if a write barrier is even needed. The tests below
210 // catch stores of Smis.
Steve Block44f0eee2011-05-26 01:26:41 +0100211 Label done;
212
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100213 // Skip barrier if writing a smi.
214 if (smi_check == INLINE_SMI_CHECK) {
215 JumpIfSmi(value, &done);
216 }
Steve Block44f0eee2011-05-26 01:26:41 +0100217
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100218 // Although the object register is tagged, the offset is relative to the start
219 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000220 DCHECK(IsAligned(offset, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100221
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100222 Addu(dst, object, Operand(offset - kHeapObjectTag));
223 if (emit_debug_code()) {
224 Label ok;
225 And(t8, dst, Operand((1 << kPointerSizeLog2) - 1));
226 Branch(&ok, eq, t8, Operand(zero_reg));
227 stop("Unaligned cell in write barrier");
228 bind(&ok);
229 }
230
231 RecordWrite(object,
232 dst,
233 value,
234 ra_status,
235 save_fp,
236 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 OMIT_SMI_CHECK,
238 pointers_to_here_check_for_value);
Steve Block44f0eee2011-05-26 01:26:41 +0100239
240 bind(&done);
241
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100242 // Clobber clobbered input registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100243 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000244 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 li(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
246 li(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
247 }
248}
249
250
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251// Clobbers object, dst, map, and ra, if (ra_status == kRAHasBeenSaved)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252void MacroAssembler::RecordWriteForMap(Register object,
253 Register map,
254 Register dst,
255 RAStatus ra_status,
256 SaveFPRegsMode fp_mode) {
257 if (emit_debug_code()) {
258 DCHECK(!dst.is(at));
259 lw(dst, FieldMemOperand(map, HeapObject::kMapOffset));
260 Check(eq,
261 kWrongAddressOrValuePassedToRecordWrite,
262 dst,
263 Operand(isolate()->factory()->meta_map()));
264 }
265
266 if (!FLAG_incremental_marking) {
267 return;
268 }
269
270 if (emit_debug_code()) {
271 lw(at, FieldMemOperand(object, HeapObject::kMapOffset));
272 Check(eq,
273 kWrongAddressOrValuePassedToRecordWrite,
274 map,
275 Operand(at));
276 }
277
278 Label done;
279
280 // A single check of the map's pages interesting flag suffices, since it is
281 // only set during incremental collection, and then it's also guaranteed that
282 // the from object's page's interesting flag is also set. This optimization
283 // relies on the fact that maps can never be in new space.
284 CheckPageFlag(map,
285 map, // Used as scratch.
286 MemoryChunk::kPointersToHereAreInterestingMask,
287 eq,
288 &done);
289
290 Addu(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
291 if (emit_debug_code()) {
292 Label ok;
293 And(at, dst, Operand((1 << kPointerSizeLog2) - 1));
294 Branch(&ok, eq, at, Operand(zero_reg));
295 stop("Unaligned cell in write barrier");
296 bind(&ok);
297 }
298
299 // Record the actual write.
300 if (ra_status == kRAHasNotBeenSaved) {
301 push(ra);
302 }
303 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
304 fp_mode);
305 CallStub(&stub);
306 if (ra_status == kRAHasNotBeenSaved) {
307 pop(ra);
308 }
309
310 bind(&done);
311
312 // Count number of write barriers in generated code.
313 isolate()->counters()->write_barriers_static()->Increment();
314 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at, dst);
315
316 // Clobber clobbered registers when running with the debug-code flag
317 // turned on to provoke errors.
318 if (emit_debug_code()) {
319 li(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
320 li(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Steve Block44f0eee2011-05-26 01:26:41 +0100321 }
322}
323
324
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325// Clobbers object, address, value, and ra, if (ra_status == kRAHasBeenSaved)
326// The register 'object' contains a heap object pointer. The heap object
Steve Block44f0eee2011-05-26 01:26:41 +0100327// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328void MacroAssembler::RecordWrite(
329 Register object,
330 Register address,
331 Register value,
332 RAStatus ra_status,
333 SaveFPRegsMode fp_mode,
334 RememberedSetAction remembered_set_action,
335 SmiCheck smi_check,
336 PointersToHereCheck pointers_to_here_check_for_value) {
337 DCHECK(!AreAliased(object, address, value, t8));
338 DCHECK(!AreAliased(object, address, value, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100339
340 if (emit_debug_code()) {
341 lw(at, MemOperand(address));
342 Assert(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100344 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000345
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000346 if (remembered_set_action == OMIT_REMEMBERED_SET &&
347 !FLAG_incremental_marking) {
348 return;
349 }
350
351 // First, check if a write barrier is even needed. The tests below
352 // catch stores of smis and stores into the young generation.
Steve Block44f0eee2011-05-26 01:26:41 +0100353 Label done;
354
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100355 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000356 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100357 JumpIfSmi(value, &done);
358 }
359
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
361 CheckPageFlag(value,
362 value, // Used as scratch.
363 MemoryChunk::kPointersToHereAreInterestingMask,
364 eq,
365 &done);
366 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100367 CheckPageFlag(object,
368 value, // Used as scratch.
369 MemoryChunk::kPointersFromHereAreInterestingMask,
370 eq,
371 &done);
Steve Block44f0eee2011-05-26 01:26:41 +0100372
373 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100374 if (ra_status == kRAHasNotBeenSaved) {
375 push(ra);
376 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
378 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100379 CallStub(&stub);
380 if (ra_status == kRAHasNotBeenSaved) {
381 pop(ra);
382 }
Steve Block44f0eee2011-05-26 01:26:41 +0100383
384 bind(&done);
385
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386 // Count number of write barriers in generated code.
387 isolate()->counters()->write_barriers_static()->Increment();
388 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at,
389 value);
390
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100391 // Clobber clobbered registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100392 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000393 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000394 li(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
395 li(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100396 }
397}
398
Ben Murdoch097c5b22016-05-18 11:27:45 +0100399void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
400 Register code_entry,
401 Register scratch) {
402 const int offset = JSFunction::kCodeEntryOffset;
403
404 // Since a code entry (value) is always in old space, we don't need to update
405 // remembered set. If incremental marking is off, there is nothing for us to
406 // do.
407 if (!FLAG_incremental_marking) return;
408
409 DCHECK(js_function.is(a1));
410 DCHECK(code_entry.is(t0));
411 DCHECK(scratch.is(t1));
412 AssertNotSmi(js_function);
413
414 if (emit_debug_code()) {
415 Addu(scratch, js_function, Operand(offset - kHeapObjectTag));
416 lw(at, MemOperand(scratch));
417 Assert(eq, kWrongAddressOrValuePassedToRecordWrite, at,
418 Operand(code_entry));
419 }
420
421 // First, check if a write barrier is even needed. The tests below
422 // catch stores of Smis and stores into young gen.
423 Label done;
424
425 CheckPageFlag(code_entry, scratch,
426 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
427 CheckPageFlag(js_function, scratch,
428 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
429
430 const Register dst = scratch;
431 Addu(dst, js_function, Operand(offset - kHeapObjectTag));
432
433 // Save caller-saved registers. js_function and code_entry are in the
434 // caller-saved register list.
435 DCHECK(kJSCallerSaved & js_function.bit());
436 DCHECK(kJSCallerSaved & code_entry.bit());
437 MultiPush(kJSCallerSaved | ra.bit());
438
439 int argument_count = 3;
440
441 PrepareCallCFunction(argument_count, 0, code_entry);
442
443 mov(a0, js_function);
444 mov(a1, dst);
445 li(a2, Operand(ExternalReference::isolate_address(isolate())));
446
447 {
448 AllowExternalCallThatCantCauseGC scope(this);
449 CallCFunction(
450 ExternalReference::incremental_marking_record_write_code_entry_function(
451 isolate()),
452 argument_count);
453 }
454
455 // Restore caller-saved registers.
456 MultiPop(kJSCallerSaved | ra.bit());
457
458 bind(&done);
459}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100460
461void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
462 Register address,
463 Register scratch,
464 SaveFPRegsMode fp_mode,
465 RememberedSetFinalAction and_then) {
466 Label done;
467 if (emit_debug_code()) {
468 Label ok;
469 JumpIfNotInNewSpace(object, scratch, &ok);
470 stop("Remembered set pointer is in new space");
471 bind(&ok);
472 }
473 // Load store buffer top.
474 ExternalReference store_buffer =
475 ExternalReference::store_buffer_top(isolate());
476 li(t8, Operand(store_buffer));
477 lw(scratch, MemOperand(t8));
478 // Store pointer to buffer and increment buffer top.
479 sw(address, MemOperand(scratch));
480 Addu(scratch, scratch, kPointerSize);
481 // Write back new top of buffer.
482 sw(scratch, MemOperand(t8));
483 // Call stub on end of buffer.
484 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100485 And(t8, scratch, Operand(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100486 if (and_then == kFallThroughAtEnd) {
Ben Murdochda12d292016-06-02 14:46:10 +0100487 Branch(&done, ne, t8, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100488 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000489 DCHECK(and_then == kReturnAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100490 Ret(ne, t8, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100491 }
492 push(ra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 CallStub(&store_buffer_overflow);
495 pop(ra);
496 bind(&done);
497 if (and_then == kReturnAtEnd) {
498 Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100499 }
500}
501
502
503// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000504// Allocation support.
Steve Block44f0eee2011-05-26 01:26:41 +0100505
506
507void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
508 Register scratch,
509 Label* miss) {
510 Label same_contexts;
Ben Murdochda12d292016-06-02 14:46:10 +0100511 Register temporary = t8;
Steve Block44f0eee2011-05-26 01:26:41 +0100512
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513 DCHECK(!holder_reg.is(scratch));
514 DCHECK(!holder_reg.is(at));
515 DCHECK(!scratch.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100516
Ben Murdochda12d292016-06-02 14:46:10 +0100517 // Load current lexical context from the active StandardFrame, which
518 // may require crawling past STUB frames.
519 Label load_context;
520 Label has_context;
521 mov(at, fp);
522 bind(&load_context);
523 lw(scratch, MemOperand(at, CommonFrameConstants::kContextOrFrameTypeOffset));
524 // Passing temporary register, otherwise JumpIfNotSmi modifies register at.
525 JumpIfNotSmi(scratch, &has_context, temporary);
526 lw(at, MemOperand(at, CommonFrameConstants::kCallerFPOffset));
527 Branch(&load_context);
528 bind(&has_context);
529
Steve Block44f0eee2011-05-26 01:26:41 +0100530 // In debug mode, make sure the lexical context is set.
531#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100533 scratch, Operand(zero_reg));
534#endif
535
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000537 lw(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Block44f0eee2011-05-26 01:26:41 +0100538
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000539 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000540 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000541 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 // Read the first word and compare to the native_context_map.
Steve Block44f0eee2011-05-26 01:26:41 +0100543 lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 LoadRoot(at, Heap::kNativeContextMapRootIndex);
545 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100546 holder_reg, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +0000547 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100548 }
549
550 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000551 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100552 Branch(&same_contexts, eq, scratch, Operand(at));
553
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000554 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000555 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000556 push(holder_reg); // Temporarily save holder on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +0100557 mov(holder_reg, at); // Move at to its holding place.
558 LoadRoot(at, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 Check(ne, kJSGlobalProxyContextShouldNotBeNull,
Steve Block44f0eee2011-05-26 01:26:41 +0100560 holder_reg, Operand(at));
561
562 lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 LoadRoot(at, Heap::kNativeContextMapRootIndex);
564 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100565 holder_reg, Operand(at));
566 // Restore at is not needed. at is reloaded below.
Ben Murdoch257744e2011-11-30 15:57:28 +0000567 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100568 // Restore at to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100570 }
571
572 // Check that the security token in the calling global object is
573 // compatible with the security token in the receiving global
574 // object.
575 int token_offset = Context::kHeaderSize +
576 Context::SECURITY_TOKEN_INDEX * kPointerSize;
577
578 lw(scratch, FieldMemOperand(scratch, token_offset));
579 lw(at, FieldMemOperand(at, token_offset));
580 Branch(miss, ne, scratch, Operand(at));
581
582 bind(&same_contexts);
Andrei Popescu31002712010-02-23 13:46:05 +0000583}
584
585
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586// Compute the hash code from the untagged key. This must be kept in sync with
587// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
588// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +0000589void MacroAssembler::GetNumberHash(Register reg0, Register scratch) {
590 // First of all we assign the hash seed to scratch.
591 LoadRoot(scratch, Heap::kHashSeedRootIndex);
592 SmiUntag(scratch);
593
594 // Xor original key with a seed.
595 xor_(reg0, reg0, scratch);
596
597 // Compute the hash code from the untagged key. This must be kept in sync
598 // with ComputeIntegerHash in utils.h.
599 //
600 // hash = ~hash + (hash << 15);
601 nor(scratch, reg0, zero_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100602 Lsa(reg0, scratch, reg0, 15);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000603
604 // hash = hash ^ (hash >> 12);
605 srl(at, reg0, 12);
606 xor_(reg0, reg0, at);
607
608 // hash = hash + (hash << 2);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100609 Lsa(reg0, reg0, reg0, 2);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000610
611 // hash = hash ^ (hash >> 4);
612 srl(at, reg0, 4);
613 xor_(reg0, reg0, at);
614
615 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100616 sll(scratch, reg0, 11);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100617 Lsa(reg0, reg0, reg0, 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100618 addu(reg0, reg0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000619
620 // hash = hash ^ (hash >> 16);
621 srl(at, reg0, 16);
622 xor_(reg0, reg0, at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000623 And(reg0, reg0, Operand(0x3fffffff));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000624}
625
626
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000627void MacroAssembler::LoadFromNumberDictionary(Label* miss,
628 Register elements,
629 Register key,
630 Register result,
631 Register reg0,
632 Register reg1,
633 Register reg2) {
634 // Register use:
635 //
636 // elements - holds the slow-case elements of the receiver on entry.
637 // Unchanged unless 'result' is the same register.
638 //
639 // key - holds the smi key on entry.
640 // Unchanged unless 'result' is the same register.
641 //
642 //
643 // result - holds the result on exit if the load succeeded.
644 // Allowed to be the same as 'key' or 'result'.
645 // Unchanged on bailout so 'key' or 'result' can be used
646 // in further computation.
647 //
648 // Scratch registers:
649 //
650 // reg0 - holds the untagged key on entry and holds the hash once computed.
651 //
652 // reg1 - Used to hold the capacity mask of the dictionary.
653 //
654 // reg2 - Used for the index into the dictionary.
655 // at - Temporary (avoid MacroAssembler instructions also using 'at').
656 Label done;
657
Ben Murdochc7cc0282012-03-05 14:35:55 +0000658 GetNumberHash(reg0, reg1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000659
660 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +0000661 lw(reg1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000662 sra(reg1, reg1, kSmiTagSize);
663 Subu(reg1, reg1, Operand(1));
664
665 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000666 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000667 // Use reg2 for index calculations and keep the hash intact in reg0.
668 mov(reg2, reg0);
669 // Compute the masked index: (hash + i + i * i) & mask.
670 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +0000671 Addu(reg2, reg2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000672 }
673 and_(reg2, reg2, reg1);
674
675 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100677 Lsa(reg2, reg2, reg2, 1); // reg2 = reg2 * 3.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000678
679 // Check if the key is identical to the name.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100680 Lsa(reg2, elements, reg2, kPointerSizeLog2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000681
Ben Murdochc7cc0282012-03-05 14:35:55 +0000682 lw(at, FieldMemOperand(reg2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000683 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000684 Branch(&done, eq, key, Operand(at));
685 } else {
686 Branch(miss, ne, key, Operand(at));
687 }
688 }
689
690 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400691 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000692 // reg2: elements + (index * kPointerSize).
693 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000694 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000695 lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000696 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +0000697 And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000698 Branch(miss, ne, at, Operand(zero_reg));
699
700 // Get the value at the masked, scaled index and return.
701 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000702 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000703 lw(result, FieldMemOperand(reg2, kValueOffset));
704}
705
706
Andrei Popescu31002712010-02-23 13:46:05 +0000707// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000708// Instruction macros.
Andrei Popescu31002712010-02-23 13:46:05 +0000709
Andrei Popescu31002712010-02-23 13:46:05 +0000710void MacroAssembler::Addu(Register rd, Register rs, const Operand& rt) {
711 if (rt.is_reg()) {
712 addu(rd, rs, rt.rm());
713 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100714 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000715 addiu(rd, rs, rt.imm32_);
716 } else {
717 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000718 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000719 li(at, rt);
720 addu(rd, rs, at);
721 }
722 }
723}
724
725
Steve Block44f0eee2011-05-26 01:26:41 +0100726void MacroAssembler::Subu(Register rd, Register rs, const Operand& rt) {
727 if (rt.is_reg()) {
728 subu(rd, rs, rt.rm());
729 } else {
730 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
731 addiu(rd, rs, -rt.imm32_); // No subiu instr, use addiu(x, y, -imm).
732 } else {
733 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000734 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100735 li(at, rt);
736 subu(rd, rs, at);
737 }
738 }
739}
740
741
Andrei Popescu31002712010-02-23 13:46:05 +0000742void MacroAssembler::Mul(Register rd, Register rs, const Operand& rt) {
743 if (rt.is_reg()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000744 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100745 mult(rs, rt.rm());
746 mflo(rd);
747 } else {
748 mul(rd, rs, rt.rm());
749 }
Andrei Popescu31002712010-02-23 13:46:05 +0000750 } else {
751 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000753 li(at, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000754 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100755 mult(rs, at);
756 mflo(rd);
757 } else {
758 mul(rd, rs, at);
759 }
Andrei Popescu31002712010-02-23 13:46:05 +0000760 }
761}
762
763
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000764void MacroAssembler::Mul(Register rd_hi, Register rd_lo,
765 Register rs, const Operand& rt) {
766 if (rt.is_reg()) {
767 if (!IsMipsArchVariant(kMips32r6)) {
768 mult(rs, rt.rm());
769 mflo(rd_lo);
770 mfhi(rd_hi);
771 } else {
772 if (rd_lo.is(rs)) {
773 DCHECK(!rd_hi.is(rs));
774 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
775 muh(rd_hi, rs, rt.rm());
776 mul(rd_lo, rs, rt.rm());
777 } else {
778 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
779 mul(rd_lo, rs, rt.rm());
780 muh(rd_hi, rs, rt.rm());
781 }
782 }
783 } else {
784 // li handles the relocation.
785 DCHECK(!rs.is(at));
786 li(at, rt);
787 if (!IsMipsArchVariant(kMips32r6)) {
788 mult(rs, at);
789 mflo(rd_lo);
790 mfhi(rd_hi);
791 } else {
792 if (rd_lo.is(rs)) {
793 DCHECK(!rd_hi.is(rs));
794 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
795 muh(rd_hi, rs, at);
796 mul(rd_lo, rs, at);
797 } else {
798 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
799 mul(rd_lo, rs, at);
800 muh(rd_hi, rs, at);
801 }
802 }
803 }
804}
805
Ben Murdochda12d292016-06-02 14:46:10 +0100806void MacroAssembler::Mulu(Register rd_hi, Register rd_lo, Register rs,
807 const Operand& rt) {
808 Register reg;
809 if (rt.is_reg()) {
810 reg = rt.rm();
811 } else {
812 DCHECK(!rs.is(at));
813 reg = at;
814 li(reg, rt);
815 }
816
817 if (!IsMipsArchVariant(kMips32r6)) {
818 multu(rs, reg);
819 mflo(rd_lo);
820 mfhi(rd_hi);
821 } else {
822 if (rd_lo.is(rs)) {
823 DCHECK(!rd_hi.is(rs));
824 DCHECK(!rd_hi.is(reg) && !rd_lo.is(reg));
825 muhu(rd_hi, rs, reg);
826 mulu(rd_lo, rs, reg);
827 } else {
828 DCHECK(!rd_hi.is(reg) && !rd_lo.is(reg));
829 mulu(rd_lo, rs, reg);
830 muhu(rd_hi, rs, reg);
831 }
832 }
833}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834
835void MacroAssembler::Mulh(Register rd, Register rs, const Operand& rt) {
836 if (rt.is_reg()) {
837 if (!IsMipsArchVariant(kMips32r6)) {
838 mult(rs, rt.rm());
839 mfhi(rd);
840 } else {
841 muh(rd, rs, rt.rm());
842 }
843 } else {
844 // li handles the relocation.
845 DCHECK(!rs.is(at));
846 li(at, rt);
847 if (!IsMipsArchVariant(kMips32r6)) {
848 mult(rs, at);
849 mfhi(rd);
850 } else {
851 muh(rd, rs, at);
852 }
853 }
854}
855
856
Andrei Popescu31002712010-02-23 13:46:05 +0000857void MacroAssembler::Mult(Register rs, const Operand& rt) {
858 if (rt.is_reg()) {
859 mult(rs, rt.rm());
860 } else {
861 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000863 li(at, rt);
864 mult(rs, at);
865 }
866}
867
868
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400869void MacroAssembler::Mulhu(Register rd, Register rs, const Operand& rt) {
870 if (rt.is_reg()) {
871 if (!IsMipsArchVariant(kMips32r6)) {
872 multu(rs, rt.rm());
873 mfhi(rd);
874 } else {
875 muhu(rd, rs, rt.rm());
876 }
877 } else {
878 // li handles the relocation.
879 DCHECK(!rs.is(at));
880 li(at, rt);
881 if (!IsMipsArchVariant(kMips32r6)) {
882 multu(rs, at);
883 mfhi(rd);
884 } else {
885 muhu(rd, rs, at);
886 }
887 }
888}
889
890
Andrei Popescu31002712010-02-23 13:46:05 +0000891void MacroAssembler::Multu(Register rs, const Operand& rt) {
892 if (rt.is_reg()) {
893 multu(rs, rt.rm());
894 } else {
895 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000897 li(at, rt);
898 multu(rs, at);
899 }
900}
901
902
903void MacroAssembler::Div(Register rs, const Operand& rt) {
904 if (rt.is_reg()) {
905 div(rs, rt.rm());
906 } else {
907 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000909 li(at, rt);
910 div(rs, at);
911 }
912}
913
914
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000915void MacroAssembler::Div(Register rem, Register res,
916 Register rs, const Operand& rt) {
917 if (rt.is_reg()) {
918 if (!IsMipsArchVariant(kMips32r6)) {
919 div(rs, rt.rm());
920 mflo(res);
921 mfhi(rem);
922 } else {
923 div(res, rs, rt.rm());
924 mod(rem, rs, rt.rm());
925 }
926 } else {
927 // li handles the relocation.
928 DCHECK(!rs.is(at));
929 li(at, rt);
930 if (!IsMipsArchVariant(kMips32r6)) {
931 div(rs, at);
932 mflo(res);
933 mfhi(rem);
934 } else {
935 div(res, rs, at);
936 mod(rem, rs, at);
937 }
938 }
939}
940
941
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400942void MacroAssembler::Div(Register res, Register rs, const Operand& rt) {
943 if (rt.is_reg()) {
944 if (!IsMipsArchVariant(kMips32r6)) {
945 div(rs, rt.rm());
946 mflo(res);
947 } else {
948 div(res, rs, rt.rm());
949 }
950 } else {
951 // li handles the relocation.
952 DCHECK(!rs.is(at));
953 li(at, rt);
954 if (!IsMipsArchVariant(kMips32r6)) {
955 div(rs, at);
956 mflo(res);
957 } else {
958 div(res, rs, at);
959 }
960 }
961}
962
963
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000964void MacroAssembler::Mod(Register rd, Register rs, const Operand& rt) {
965 if (rt.is_reg()) {
966 if (!IsMipsArchVariant(kMips32r6)) {
967 div(rs, rt.rm());
968 mfhi(rd);
969 } else {
970 mod(rd, rs, rt.rm());
971 }
972 } else {
973 // li handles the relocation.
974 DCHECK(!rs.is(at));
975 li(at, rt);
976 if (!IsMipsArchVariant(kMips32r6)) {
977 div(rs, at);
978 mfhi(rd);
979 } else {
980 mod(rd, rs, at);
981 }
982 }
983}
984
985
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400986void MacroAssembler::Modu(Register rd, Register rs, const Operand& rt) {
987 if (rt.is_reg()) {
988 if (!IsMipsArchVariant(kMips32r6)) {
989 divu(rs, rt.rm());
990 mfhi(rd);
991 } else {
992 modu(rd, rs, rt.rm());
993 }
994 } else {
995 // li handles the relocation.
996 DCHECK(!rs.is(at));
997 li(at, rt);
998 if (!IsMipsArchVariant(kMips32r6)) {
999 divu(rs, at);
1000 mfhi(rd);
1001 } else {
1002 modu(rd, rs, at);
1003 }
1004 }
1005}
1006
1007
Andrei Popescu31002712010-02-23 13:46:05 +00001008void MacroAssembler::Divu(Register rs, const Operand& rt) {
1009 if (rt.is_reg()) {
1010 divu(rs, rt.rm());
1011 } else {
1012 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001013 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001014 li(at, rt);
1015 divu(rs, at);
1016 }
1017}
1018
1019
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001020void MacroAssembler::Divu(Register res, Register rs, const Operand& rt) {
1021 if (rt.is_reg()) {
1022 if (!IsMipsArchVariant(kMips32r6)) {
1023 divu(rs, rt.rm());
1024 mflo(res);
1025 } else {
1026 divu(res, rs, rt.rm());
1027 }
1028 } else {
1029 // li handles the relocation.
1030 DCHECK(!rs.is(at));
1031 li(at, rt);
1032 if (!IsMipsArchVariant(kMips32r6)) {
1033 divu(rs, at);
1034 mflo(res);
1035 } else {
1036 divu(res, rs, at);
1037 }
1038 }
1039}
1040
1041
Andrei Popescu31002712010-02-23 13:46:05 +00001042void MacroAssembler::And(Register rd, Register rs, const Operand& rt) {
1043 if (rt.is_reg()) {
1044 and_(rd, rs, rt.rm());
1045 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001046 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001047 andi(rd, rs, rt.imm32_);
1048 } else {
1049 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001050 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001051 li(at, rt);
1052 and_(rd, rs, at);
1053 }
1054 }
1055}
1056
1057
1058void MacroAssembler::Or(Register rd, Register rs, const Operand& rt) {
1059 if (rt.is_reg()) {
1060 or_(rd, rs, rt.rm());
1061 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001062 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001063 ori(rd, rs, rt.imm32_);
1064 } else {
1065 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001067 li(at, rt);
1068 or_(rd, rs, at);
1069 }
1070 }
1071}
1072
1073
1074void MacroAssembler::Xor(Register rd, Register rs, const Operand& rt) {
1075 if (rt.is_reg()) {
1076 xor_(rd, rs, rt.rm());
1077 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001078 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001079 xori(rd, rs, rt.imm32_);
1080 } else {
1081 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001083 li(at, rt);
1084 xor_(rd, rs, at);
1085 }
1086 }
1087}
1088
1089
1090void MacroAssembler::Nor(Register rd, Register rs, const Operand& rt) {
1091 if (rt.is_reg()) {
1092 nor(rd, rs, rt.rm());
1093 } else {
1094 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001096 li(at, rt);
1097 nor(rd, rs, at);
1098 }
1099}
1100
1101
Ben Murdoch257744e2011-11-30 15:57:28 +00001102void MacroAssembler::Neg(Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001103 DCHECK(rt.is_reg());
1104 DCHECK(!at.is(rs));
1105 DCHECK(!at.is(rt.rm()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001106 li(at, -1);
1107 xor_(rs, rt.rm(), at);
1108}
1109
1110
Andrei Popescu31002712010-02-23 13:46:05 +00001111void MacroAssembler::Slt(Register rd, Register rs, const Operand& rt) {
1112 if (rt.is_reg()) {
1113 slt(rd, rs, rt.rm());
1114 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001115 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001116 slti(rd, rs, rt.imm32_);
1117 } else {
1118 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001119 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001120 li(at, rt);
1121 slt(rd, rs, at);
1122 }
1123 }
1124}
1125
1126
1127void MacroAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
1128 if (rt.is_reg()) {
1129 sltu(rd, rs, rt.rm());
1130 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001131 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001132 sltiu(rd, rs, rt.imm32_);
1133 } else {
1134 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001136 li(at, rt);
1137 sltu(rd, rs, at);
1138 }
1139 }
1140}
1141
1142
Steve Block44f0eee2011-05-26 01:26:41 +01001143void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001144 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001145 if (rt.is_reg()) {
1146 rotrv(rd, rs, rt.rm());
1147 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001148 rotr(rd, rs, rt.imm32_ & 0x1f);
Steve Block44f0eee2011-05-26 01:26:41 +01001149 }
1150 } else {
1151 if (rt.is_reg()) {
1152 subu(at, zero_reg, rt.rm());
1153 sllv(at, rs, at);
1154 srlv(rd, rs, rt.rm());
1155 or_(rd, rd, at);
1156 } else {
1157 if (rt.imm32_ == 0) {
1158 srl(rd, rs, 0);
1159 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001160 srl(at, rs, rt.imm32_ & 0x1f);
1161 sll(rd, rs, (0x20 - (rt.imm32_ & 0x1f)) & 0x1f);
Steve Block44f0eee2011-05-26 01:26:41 +01001162 or_(rd, rd, at);
1163 }
1164 }
1165 }
Andrei Popescu31002712010-02-23 13:46:05 +00001166}
1167
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168
1169void MacroAssembler::Pref(int32_t hint, const MemOperand& rs) {
1170 if (IsMipsArchVariant(kLoongson)) {
1171 lw(zero_reg, rs);
1172 } else {
1173 pref(hint, rs);
1174 }
1175}
1176
1177
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001178void MacroAssembler::Lsa(Register rd, Register rt, Register rs, uint8_t sa,
1179 Register scratch) {
Ben Murdochda12d292016-06-02 14:46:10 +01001180 DCHECK(sa >= 1 && sa <= 31);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001181 if (IsMipsArchVariant(kMips32r6) && sa <= 4) {
Ben Murdochda12d292016-06-02 14:46:10 +01001182 lsa(rd, rt, rs, sa - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001183 } else {
1184 Register tmp = rd.is(rt) ? scratch : rd;
1185 DCHECK(!tmp.is(rt));
1186 sll(tmp, rs, sa);
1187 Addu(rd, rt, tmp);
1188 }
1189}
1190
1191
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001192// ------------Pseudo-instructions-------------
1193
1194void MacroAssembler::Ulw(Register rd, const MemOperand& rs) {
1195 lwr(rd, rs);
1196 lwl(rd, MemOperand(rs.rm(), rs.offset() + 3));
1197}
1198
1199
1200void MacroAssembler::Usw(Register rd, const MemOperand& rs) {
1201 swr(rd, rs);
1202 swl(rd, MemOperand(rs.rm(), rs.offset() + 3));
1203}
1204
1205
1206void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) {
1207 AllowDeferredHandleDereference smi_check;
1208 if (value->IsSmi()) {
1209 li(dst, Operand(value), mode);
1210 } else {
1211 DCHECK(value->IsHeapObject());
1212 if (isolate()->heap()->InNewSpace(*value)) {
1213 Handle<Cell> cell = isolate()->factory()->NewCell(value);
1214 li(dst, Operand(cell));
1215 lw(dst, FieldMemOperand(dst, Cell::kValueOffset));
1216 } else {
1217 li(dst, Operand(value));
1218 }
1219 }
1220}
1221
Steve Block44f0eee2011-05-26 01:26:41 +01001222
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001223void MacroAssembler::li(Register rd, Operand j, LiFlags mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001224 DCHECK(!j.is_reg());
Steve Block44f0eee2011-05-26 01:26:41 +01001225 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001226 if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) {
Andrei Popescu31002712010-02-23 13:46:05 +00001227 // Normal load of an immediate value which does not need Relocation Info.
1228 if (is_int16(j.imm32_)) {
1229 addiu(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001230 } else if (!(j.imm32_ & kHiMask)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001231 ori(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001232 } else if (!(j.imm32_ & kImm16Mask)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001233 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +00001234 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001235 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Steve Block44f0eee2011-05-26 01:26:41 +01001236 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001237 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001238 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001239 if (MustUseReg(j.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001240 RecordRelocInfo(j.rmode_, j.imm32_);
1241 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001242 // We always need the same number of instructions as we may need to patch
Andrei Popescu31002712010-02-23 13:46:05 +00001243 // this code to load another value which may need 2 instructions to load.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001244 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Ben Murdoch257744e2011-11-30 15:57:28 +00001245 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001246 }
1247}
1248
1249
Andrei Popescu31002712010-02-23 13:46:05 +00001250void MacroAssembler::MultiPush(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001251 int16_t num_to_push = NumberOfBitsSet(regs);
1252 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001253
Ben Murdoch589d6972011-11-30 16:04:58 +00001254 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001255 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001256 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001257 stack_offset -= kPointerSize;
1258 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001259 }
1260 }
1261}
1262
1263
1264void MacroAssembler::MultiPushReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001265 int16_t num_to_push = NumberOfBitsSet(regs);
1266 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001267
Ben Murdoch589d6972011-11-30 16:04:58 +00001268 Subu(sp, sp, Operand(stack_offset));
Steve Block6ded16b2010-05-10 14:33:55 +01001269 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001270 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001271 stack_offset -= kPointerSize;
1272 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001273 }
1274 }
1275}
1276
1277
1278void MacroAssembler::MultiPop(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001279 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001280
Steve Block6ded16b2010-05-10 14:33:55 +01001281 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001282 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001283 lw(ToRegister(i), MemOperand(sp, stack_offset));
1284 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001285 }
1286 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001287 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001288}
1289
1290
1291void MacroAssembler::MultiPopReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001292 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001293
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001294 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001295 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001296 lw(ToRegister(i), MemOperand(sp, stack_offset));
1297 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001298 }
1299 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001300 addiu(sp, sp, stack_offset);
1301}
1302
1303
1304void MacroAssembler::MultiPushFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001305 int16_t num_to_push = NumberOfBitsSet(regs);
1306 int16_t stack_offset = num_to_push * kDoubleSize;
1307
1308 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001309 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001310 if ((regs & (1 << i)) != 0) {
1311 stack_offset -= kDoubleSize;
1312 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1313 }
1314 }
1315}
1316
1317
1318void MacroAssembler::MultiPushReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001319 int16_t num_to_push = NumberOfBitsSet(regs);
1320 int16_t stack_offset = num_to_push * kDoubleSize;
1321
1322 Subu(sp, sp, Operand(stack_offset));
1323 for (int16_t i = 0; i < kNumRegisters; i++) {
1324 if ((regs & (1 << i)) != 0) {
1325 stack_offset -= kDoubleSize;
1326 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1327 }
1328 }
1329}
1330
1331
1332void MacroAssembler::MultiPopFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001333 int16_t stack_offset = 0;
1334
1335 for (int16_t i = 0; i < kNumRegisters; i++) {
1336 if ((regs & (1 << i)) != 0) {
1337 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1338 stack_offset += kDoubleSize;
1339 }
1340 }
1341 addiu(sp, sp, stack_offset);
1342}
1343
1344
1345void MacroAssembler::MultiPopReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001346 int16_t stack_offset = 0;
1347
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001348 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001349 if ((regs & (1 << i)) != 0) {
1350 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1351 stack_offset += kDoubleSize;
1352 }
1353 }
1354 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001355}
1356
1357
Steve Block44f0eee2011-05-26 01:26:41 +01001358void MacroAssembler::Ext(Register rt,
1359 Register rs,
1360 uint16_t pos,
1361 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362 DCHECK(pos < 32);
1363 DCHECK(pos + size < 33);
Andrei Popescu31002712010-02-23 13:46:05 +00001364
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001365 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001366 ext_(rt, rs, pos, size);
1367 } else {
1368 // Move rs to rt and shift it left then right to get the
1369 // desired bitfield on the right side and zeroes on the left.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001370 int shift_left = 32 - (pos + size);
1371 sll(rt, rs, shift_left); // Acts as a move if shift_left == 0.
1372
1373 int shift_right = 32 - size;
1374 if (shift_right > 0) {
1375 srl(rt, rt, shift_right);
1376 }
Steve Block44f0eee2011-05-26 01:26:41 +01001377 }
1378}
1379
1380
1381void MacroAssembler::Ins(Register rt,
1382 Register rs,
1383 uint16_t pos,
1384 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385 DCHECK(pos < 32);
1386 DCHECK(pos + size <= 32);
1387 DCHECK(size != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01001388
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001389 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001390 ins_(rt, rs, pos, size);
1391 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 DCHECK(!rt.is(t8) && !rs.is(t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001393 Subu(at, zero_reg, Operand(1));
1394 srl(at, at, 32 - size);
1395 and_(t8, rs, at);
1396 sll(t8, t8, pos);
1397 sll(at, at, pos);
1398 nor(at, at, zero_reg);
1399 and_(at, rt, at);
1400 or_(rt, t8, at);
Steve Block44f0eee2011-05-26 01:26:41 +01001401 }
1402}
1403
1404
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001405void MacroAssembler::Cvt_d_uw(FPURegister fd, Register rs,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001406 FPURegister scratch) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001407 // In FP64Mode we do convertion from long.
1408 if (IsFp64Mode()) {
1409 mtc1(rs, scratch);
1410 Mthc1(zero_reg, scratch);
1411 cvt_d_l(fd, scratch);
1412 } else {
1413 // Convert rs to a FP value in fd.
1414 DCHECK(!fd.is(scratch));
1415 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01001416
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001417 Label msb_clear, conversion_done;
1418 // For a value which is < 2^31, regard it as a signed positve word.
1419 Branch(&msb_clear, ge, rs, Operand(zero_reg), USE_DELAY_SLOT);
1420 mtc1(rs, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001421
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001422 li(at, 0x41F00000); // FP value: 2^32.
Steve Block44f0eee2011-05-26 01:26:41 +01001423
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001424 // For unsigned inputs > 2^31, we convert to double as a signed int32,
1425 // then add 2^32 to move it back to unsigned value in range 2^31..2^31-1.
1426 mtc1(zero_reg, scratch);
1427 Mthc1(at, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001428
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001429 cvt_d_w(fd, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431 Branch(USE_DELAY_SLOT, &conversion_done);
1432 add_d(fd, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001433
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001434 bind(&msb_clear);
1435 cvt_d_w(fd, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001436
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001437 bind(&conversion_done);
1438 }
Steve Block44f0eee2011-05-26 01:26:41 +01001439}
1440
1441
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001442void MacroAssembler::Trunc_uw_d(FPURegister fd,
1443 FPURegister fs,
1444 FPURegister scratch) {
1445 Trunc_uw_d(fs, t8, scratch);
1446 mtc1(t8, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001447}
1448
Ben Murdoch097c5b22016-05-18 11:27:45 +01001449void MacroAssembler::Trunc_uw_s(FPURegister fd, FPURegister fs,
1450 FPURegister scratch) {
1451 Trunc_uw_s(fs, t8, scratch);
1452 mtc1(t8, fd);
1453}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001454
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001455void MacroAssembler::Trunc_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001456 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1457 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001458 trunc_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001459 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001460 } else {
1461 trunc_w_d(fd, fs);
1462 }
1463}
1464
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001465
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001466void MacroAssembler::Round_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001467 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1468 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001469 round_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001470 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001471 } else {
1472 round_w_d(fd, fs);
1473 }
1474}
1475
1476
1477void MacroAssembler::Floor_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001478 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1479 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001480 floor_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001481 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001482 } else {
1483 floor_w_d(fd, fs);
1484 }
1485}
1486
1487
1488void MacroAssembler::Ceil_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001489 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1490 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001491 ceil_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001492 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001493 } else {
1494 ceil_w_d(fd, fs);
1495 }
1496}
1497
Steve Block44f0eee2011-05-26 01:26:41 +01001498
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001499void MacroAssembler::Trunc_uw_d(FPURegister fd,
1500 Register rs,
1501 FPURegister scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001502 DCHECK(!fd.is(scratch));
1503 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01001504
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001505 // Load 2^31 into scratch as its float representation.
1506 li(at, 0x41E00000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001507 mtc1(zero_reg, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001508 Mthc1(at, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001509 // Test if scratch > fd.
Ben Murdoch85b71792012-04-11 18:30:58 +01001510 // If fd < 2^31 we can convert it normally.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001511 Label simple_convert;
1512 BranchF(&simple_convert, NULL, lt, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001513
1514 // First we subtract 2^31 from fd, then trunc it to rs
1515 // and add 2^31 to rs.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001516 sub_d(scratch, fd, scratch);
1517 trunc_w_d(scratch, scratch);
1518 mfc1(rs, scratch);
1519 Or(rs, rs, 1 << 31);
Steve Block44f0eee2011-05-26 01:26:41 +01001520
1521 Label done;
1522 Branch(&done);
1523 // Simple conversion.
1524 bind(&simple_convert);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001525 trunc_w_d(scratch, fd);
1526 mfc1(rs, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001527
1528 bind(&done);
1529}
1530
Ben Murdoch097c5b22016-05-18 11:27:45 +01001531void MacroAssembler::Trunc_uw_s(FPURegister fd, Register rs,
1532 FPURegister scratch) {
1533 DCHECK(!fd.is(scratch));
1534 DCHECK(!rs.is(at));
1535
1536 // Load 2^31 into scratch as its float representation.
1537 li(at, 0x4F000000);
1538 mtc1(at, scratch);
1539 // Test if scratch > fd.
1540 // If fd < 2^31 we can convert it normally.
1541 Label simple_convert;
1542 BranchF32(&simple_convert, NULL, lt, fd, scratch);
1543
1544 // First we subtract 2^31 from fd, then trunc it to rs
1545 // and add 2^31 to rs.
1546 sub_s(scratch, fd, scratch);
1547 trunc_w_s(scratch, scratch);
1548 mfc1(rs, scratch);
1549 Or(rs, rs, 1 << 31);
1550
1551 Label done;
1552 Branch(&done);
1553 // Simple conversion.
1554 bind(&simple_convert);
1555 trunc_w_s(scratch, fd);
1556 mfc1(rs, scratch);
1557
1558 bind(&done);
1559}
Steve Block44f0eee2011-05-26 01:26:41 +01001560
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001561void MacroAssembler::Mthc1(Register rt, FPURegister fs) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001562 if (IsFp32Mode()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001563 mtc1(rt, fs.high());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001564 } else {
1565 DCHECK(IsFp64Mode() || IsFpxxMode());
1566 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
1567 mthc1(rt, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001568 }
1569}
1570
1571
1572void MacroAssembler::Mfhc1(Register rt, FPURegister fs) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001573 if (IsFp32Mode()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001574 mfc1(rt, fs.high());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001575 } else {
1576 DCHECK(IsFp64Mode() || IsFpxxMode());
1577 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
1578 mfhc1(rt, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001579 }
1580}
1581
1582
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583void MacroAssembler::BranchFCommon(SecondaryField sizeField, Label* target,
1584 Label* nan, Condition cond, FPURegister cmp1,
1585 FPURegister cmp2, BranchDelaySlot bd) {
1586 {
1587 BlockTrampolinePoolScope block_trampoline_pool(this);
1588 if (cond == al) {
1589 Branch(bd, target);
1590 return;
1591 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001592
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 if (IsMipsArchVariant(kMips32r6)) {
1594 sizeField = sizeField == D ? L : W;
1595 }
1596 DCHECK(nan || target);
1597 // Check for unordered (NaN) cases.
1598 if (nan) {
1599 bool long_branch =
1600 nan->is_bound() ? is_near(nan) : is_trampoline_emitted();
1601 if (!IsMipsArchVariant(kMips32r6)) {
1602 if (long_branch) {
1603 Label skip;
1604 c(UN, sizeField, cmp1, cmp2);
1605 bc1f(&skip);
1606 nop();
1607 BranchLong(nan, bd);
1608 bind(&skip);
1609 } else {
1610 c(UN, sizeField, cmp1, cmp2);
1611 bc1t(nan);
1612 if (bd == PROTECT) {
1613 nop();
1614 }
1615 }
1616 } else {
1617 // Use kDoubleCompareReg for comparison result. It has to be unavailable
1618 // to lithium register allocator.
1619 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
1620 if (long_branch) {
1621 Label skip;
1622 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
1623 bc1eqz(&skip, kDoubleCompareReg);
1624 nop();
1625 BranchLong(nan, bd);
1626 bind(&skip);
1627 } else {
1628 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
1629 bc1nez(nan, kDoubleCompareReg);
1630 if (bd == PROTECT) {
1631 nop();
1632 }
1633 }
1634 }
1635 }
1636
1637 if (target) {
1638 bool long_branch =
1639 target->is_bound() ? is_near(target) : is_trampoline_emitted();
1640 if (long_branch) {
1641 Label skip;
1642 Condition neg_cond = NegateFpuCondition(cond);
1643 BranchShortF(sizeField, &skip, neg_cond, cmp1, cmp2, bd);
1644 BranchLong(target, bd);
1645 bind(&skip);
1646 } else {
1647 BranchShortF(sizeField, target, cond, cmp1, cmp2, bd);
1648 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001650 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001651}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001652
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001653void MacroAssembler::BranchShortF(SecondaryField sizeField, Label* target,
1654 Condition cc, FPURegister cmp1,
1655 FPURegister cmp2, BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001656 if (!IsMipsArchVariant(kMips32r6)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001657 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001658 if (target) {
1659 // Here NaN cases were either handled by this function or are assumed to
1660 // have been handled by the caller.
1661 switch (cc) {
1662 case lt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001663 c(OLT, sizeField, cmp1, cmp2);
1664 bc1t(target);
1665 break;
1666 case ult:
1667 c(ULT, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001668 bc1t(target);
1669 break;
1670 case gt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001671 c(ULE, sizeField, cmp1, cmp2);
1672 bc1f(target);
1673 break;
1674 case ugt:
1675 c(OLE, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001676 bc1f(target);
1677 break;
1678 case ge:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679 c(ULT, sizeField, cmp1, cmp2);
1680 bc1f(target);
1681 break;
1682 case uge:
1683 c(OLT, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001684 bc1f(target);
1685 break;
1686 case le:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001687 c(OLE, sizeField, cmp1, cmp2);
1688 bc1t(target);
1689 break;
1690 case ule:
1691 c(ULE, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001692 bc1t(target);
1693 break;
1694 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695 c(EQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001696 bc1t(target);
1697 break;
1698 case ueq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001699 c(UEQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001700 bc1t(target);
1701 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001702 case ne: // Unordered or not equal.
1703 c(EQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001704 bc1f(target);
1705 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 case ogl:
1707 c(UEQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001708 bc1f(target);
1709 break;
1710 default:
1711 CHECK(0);
1712 }
1713 }
1714 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001715 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001716 if (target) {
1717 // Here NaN cases were either handled by this function or are assumed to
1718 // have been handled by the caller.
1719 // Unsigned conditions are treated as their signed counterpart.
1720 // Use kDoubleCompareReg for comparison result, it is
1721 // valid in fp64 (FR = 1) mode which is implied for mips32r6.
1722 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
1723 switch (cc) {
1724 case lt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725 cmp(OLT, sizeField, kDoubleCompareReg, cmp1, cmp2);
1726 bc1nez(target, kDoubleCompareReg);
1727 break;
1728 case ult:
1729 cmp(ULT, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001730 bc1nez(target, kDoubleCompareReg);
1731 break;
1732 case gt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001733 cmp(ULE, sizeField, kDoubleCompareReg, cmp1, cmp2);
1734 bc1eqz(target, kDoubleCompareReg);
1735 break;
1736 case ugt:
1737 cmp(OLE, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001738 bc1eqz(target, kDoubleCompareReg);
1739 break;
1740 case ge:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001741 cmp(ULT, sizeField, kDoubleCompareReg, cmp1, cmp2);
1742 bc1eqz(target, kDoubleCompareReg);
1743 break;
1744 case uge:
1745 cmp(OLT, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 bc1eqz(target, kDoubleCompareReg);
1747 break;
1748 case le:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001749 cmp(OLE, sizeField, kDoubleCompareReg, cmp1, cmp2);
1750 bc1nez(target, kDoubleCompareReg);
1751 break;
1752 case ule:
1753 cmp(ULE, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001754 bc1nez(target, kDoubleCompareReg);
1755 break;
1756 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001757 cmp(EQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001758 bc1nez(target, kDoubleCompareReg);
1759 break;
1760 case ueq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001761 cmp(UEQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001762 bc1nez(target, kDoubleCompareReg);
1763 break;
1764 case ne:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001765 cmp(EQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001766 bc1eqz(target, kDoubleCompareReg);
1767 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001768 case ogl:
1769 cmp(UEQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001770 bc1eqz(target, kDoubleCompareReg);
1771 break;
1772 default:
1773 CHECK(0);
1774 }
1775 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001776 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001777 if (bd == PROTECT) {
1778 nop();
1779 }
1780}
1781
1782
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001783void MacroAssembler::FmoveLow(FPURegister dst, Register src_low) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001784 if (IsFp32Mode()) {
1785 mtc1(src_low, dst);
1786 } else {
1787 DCHECK(IsFp64Mode() || IsFpxxMode());
1788 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001789 DCHECK(!src_low.is(at));
1790 mfhc1(at, dst);
1791 mtc1(src_low, dst);
1792 mthc1(at, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001793 }
1794}
1795
1796
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001797void MacroAssembler::Move(FPURegister dst, float imm) {
1798 li(at, Operand(bit_cast<int32_t>(imm)));
1799 mtc1(at, dst);
1800}
1801
1802
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001803void MacroAssembler::Move(FPURegister dst, double imm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001804 static const DoubleRepresentation minus_zero(-0.0);
1805 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001806 DoubleRepresentation value_rep(imm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001807 // Handle special values first.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001808 if (value_rep == zero && has_double_zero_reg_set_) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001809 mov_d(dst, kDoubleRegZero);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001810 } else if (value_rep == minus_zero && has_double_zero_reg_set_) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001811 neg_d(dst, kDoubleRegZero);
1812 } else {
1813 uint32_t lo, hi;
1814 DoubleAsTwoUInt32(imm, &lo, &hi);
1815 // Move the low part of the double into the lower of the corresponding FPU
1816 // register of FPU register pair.
1817 if (lo != 0) {
1818 li(at, Operand(lo));
1819 mtc1(at, dst);
1820 } else {
1821 mtc1(zero_reg, dst);
1822 }
1823 // Move the high part of the double into the higher of the corresponding FPU
1824 // register of FPU register pair.
1825 if (hi != 0) {
1826 li(at, Operand(hi));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001827 Mthc1(at, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001828 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001829 Mthc1(zero_reg, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001830 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001831 if (dst.is(kDoubleRegZero)) has_double_zero_reg_set_ = true;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001832 }
1833}
1834
1835
1836void MacroAssembler::Movz(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001837 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001838 Label done;
1839 Branch(&done, ne, rt, Operand(zero_reg));
1840 mov(rd, rs);
1841 bind(&done);
1842 } else {
1843 movz(rd, rs, rt);
1844 }
1845}
1846
1847
1848void MacroAssembler::Movn(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001849 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001850 Label done;
1851 Branch(&done, eq, rt, Operand(zero_reg));
1852 mov(rd, rs);
1853 bind(&done);
1854 } else {
1855 movn(rd, rs, rt);
1856 }
1857}
1858
1859
1860void MacroAssembler::Movt(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001861 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001862 // Tests an FP condition code and then conditionally move rs to rd.
1863 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001864 DCHECK(cc == 0);
1865 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001866 Label done;
1867 Register scratch = t8;
1868 // For testing purposes we need to fetch content of the FCSR register and
1869 // than test its cc (floating point condition code) bit (for cc = 0, it is
1870 // 24. bit of the FCSR).
1871 cfc1(scratch, FCSR);
1872 // For the MIPS I, II and III architectures, the contents of scratch is
1873 // UNPREDICTABLE for the instruction immediately following CFC1.
1874 nop();
1875 srl(scratch, scratch, 16);
1876 andi(scratch, scratch, 0x0080);
1877 Branch(&done, eq, scratch, Operand(zero_reg));
1878 mov(rd, rs);
1879 bind(&done);
1880 } else {
1881 movt(rd, rs, cc);
1882 }
1883}
1884
1885
1886void MacroAssembler::Movf(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001888 // Tests an FP condition code and then conditionally move rs to rd.
1889 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890 DCHECK(cc == 0);
1891 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001892 Label done;
1893 Register scratch = t8;
1894 // For testing purposes we need to fetch content of the FCSR register and
1895 // than test its cc (floating point condition code) bit (for cc = 0, it is
1896 // 24. bit of the FCSR).
1897 cfc1(scratch, FCSR);
1898 // For the MIPS I, II and III architectures, the contents of scratch is
1899 // UNPREDICTABLE for the instruction immediately following CFC1.
1900 nop();
1901 srl(scratch, scratch, 16);
1902 andi(scratch, scratch, 0x0080);
1903 Branch(&done, ne, scratch, Operand(zero_reg));
1904 mov(rd, rs);
1905 bind(&done);
1906 } else {
1907 movf(rd, rs, cc);
1908 }
1909}
1910
Ben Murdochda12d292016-06-02 14:46:10 +01001911#define __ masm->
1912
1913static bool ZeroHelper_d(MacroAssembler* masm, MaxMinKind kind, FPURegister dst,
1914 FPURegister src1, FPURegister src2, Label* equal) {
1915 if (src1.is(src2)) {
1916 __ Move(dst, src1);
1917 return true;
1918 }
1919
1920 Label other, compare_not_equal;
1921 FPURegister left, right;
1922 if (kind == MaxMinKind::kMin) {
1923 left = src1;
1924 right = src2;
1925 } else {
1926 left = src2;
1927 right = src1;
1928 }
1929
1930 __ BranchF64(&compare_not_equal, nullptr, ne, src1, src2);
1931 // Left and right hand side are equal, check for -0 vs. +0.
1932 __ FmoveHigh(t8, src1);
1933 __ Branch(&other, eq, t8, Operand(0x80000000));
1934 __ Move_d(dst, right);
1935 __ Branch(equal);
1936 __ bind(&other);
1937 __ Move_d(dst, left);
1938 __ Branch(equal);
1939 __ bind(&compare_not_equal);
1940 return false;
1941}
1942
1943static bool ZeroHelper_s(MacroAssembler* masm, MaxMinKind kind, FPURegister dst,
1944 FPURegister src1, FPURegister src2, Label* equal) {
1945 if (src1.is(src2)) {
1946 __ Move(dst, src1);
1947 return true;
1948 }
1949
1950 Label other, compare_not_equal;
1951 FPURegister left, right;
1952 if (kind == MaxMinKind::kMin) {
1953 left = src1;
1954 right = src2;
1955 } else {
1956 left = src2;
1957 right = src1;
1958 }
1959
1960 __ BranchF32(&compare_not_equal, nullptr, ne, src1, src2);
1961 // Left and right hand side are equal, check for -0 vs. +0.
1962 __ FmoveLow(t8, src1);
1963 __ Branch(&other, eq, t8, Operand(0x80000000));
1964 __ Move_s(dst, right);
1965 __ Branch(equal);
1966 __ bind(&other);
1967 __ Move_s(dst, left);
1968 __ Branch(equal);
1969 __ bind(&compare_not_equal);
1970 return false;
1971}
1972
1973#undef __
1974
1975void MacroAssembler::MinNaNCheck_d(FPURegister dst, FPURegister src1,
1976 FPURegister src2, Label* nan) {
1977 if (nan) {
1978 BranchF64(nullptr, nan, eq, src1, src2);
1979 }
1980 if (IsMipsArchVariant(kMips32r6)) {
1981 min_d(dst, src1, src2);
1982 } else {
1983 Label skip;
1984 if (!ZeroHelper_d(this, MaxMinKind::kMin, dst, src1, src2, &skip)) {
1985 if (dst.is(src1)) {
1986 BranchF64(&skip, nullptr, le, src1, src2);
1987 Move_d(dst, src2);
1988 } else if (dst.is(src2)) {
1989 BranchF64(&skip, nullptr, ge, src1, src2);
1990 Move_d(dst, src1);
1991 } else {
1992 Label right;
1993 BranchF64(&right, nullptr, gt, src1, src2);
1994 Move_d(dst, src1);
1995 Branch(&skip);
1996 bind(&right);
1997 Move_d(dst, src2);
1998 }
1999 }
2000 bind(&skip);
2001 }
2002}
2003
2004void MacroAssembler::MaxNaNCheck_d(FPURegister dst, FPURegister src1,
2005 FPURegister src2, Label* nan) {
2006 if (nan) {
2007 BranchF64(nullptr, nan, eq, src1, src2);
2008 }
2009 if (IsMipsArchVariant(kMips32r6)) {
2010 max_d(dst, src1, src2);
2011 } else {
2012 Label skip;
2013 if (!ZeroHelper_d(this, MaxMinKind::kMax, dst, src1, src2, &skip)) {
2014 if (dst.is(src1)) {
2015 BranchF64(&skip, nullptr, ge, src1, src2);
2016 Move_d(dst, src2);
2017 } else if (dst.is(src2)) {
2018 BranchF64(&skip, nullptr, le, src1, src2);
2019 Move_d(dst, src1);
2020 } else {
2021 Label right;
2022 BranchF64(&right, nullptr, lt, src1, src2);
2023 Move_d(dst, src1);
2024 Branch(&skip);
2025 bind(&right);
2026 Move_d(dst, src2);
2027 }
2028 }
2029 bind(&skip);
2030 }
2031}
2032
2033void MacroAssembler::MinNaNCheck_s(FPURegister dst, FPURegister src1,
2034 FPURegister src2, Label* nan) {
2035 if (nan) {
2036 BranchF32(nullptr, nan, eq, src1, src2);
2037 }
2038 if (IsMipsArchVariant(kMips32r6)) {
2039 min_s(dst, src1, src2);
2040 } else {
2041 Label skip;
2042 if (!ZeroHelper_s(this, MaxMinKind::kMin, dst, src1, src2, &skip)) {
2043 if (dst.is(src1)) {
2044 BranchF32(&skip, nullptr, le, src1, src2);
2045 Move_s(dst, src2);
2046 } else if (dst.is(src2)) {
2047 BranchF32(&skip, nullptr, ge, src1, src2);
2048 Move_s(dst, src1);
2049 } else {
2050 Label right;
2051 BranchF32(&right, nullptr, gt, src1, src2);
2052 Move_s(dst, src1);
2053 Branch(&skip);
2054 bind(&right);
2055 Move_s(dst, src2);
2056 }
2057 }
2058 bind(&skip);
2059 }
2060}
2061
2062void MacroAssembler::MaxNaNCheck_s(FPURegister dst, FPURegister src1,
2063 FPURegister src2, Label* nan) {
2064 if (nan) {
2065 BranchF32(nullptr, nan, eq, src1, src2);
2066 }
2067 if (IsMipsArchVariant(kMips32r6)) {
2068 max_s(dst, src1, src2);
2069 } else {
2070 Label skip;
2071 if (!ZeroHelper_s(this, MaxMinKind::kMax, dst, src1, src2, &skip)) {
2072 if (dst.is(src1)) {
2073 BranchF32(&skip, nullptr, ge, src1, src2);
2074 Move_s(dst, src2);
2075 } else if (dst.is(src2)) {
2076 BranchF32(&skip, nullptr, le, src1, src2);
2077 Move_s(dst, src1);
2078 } else {
2079 Label right;
2080 BranchF32(&right, nullptr, lt, src1, src2);
2081 Move_s(dst, src1);
2082 Branch(&skip);
2083 bind(&right);
2084 Move_s(dst, src2);
2085 }
2086 }
2087 bind(&skip);
2088 }
2089}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002090
2091void MacroAssembler::Clz(Register rd, Register rs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002092 if (IsMipsArchVariant(kLoongson)) {
2093 DCHECK(!(rd.is(t8) || rd.is(t9)) && !(rs.is(t8) || rs.is(t9)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002094 Register mask = t8;
2095 Register scratch = t9;
2096 Label loop, end;
2097 mov(at, rs);
2098 mov(rd, zero_reg);
2099 lui(mask, 0x8000);
2100 bind(&loop);
2101 and_(scratch, at, mask);
2102 Branch(&end, ne, scratch, Operand(zero_reg));
2103 addiu(rd, rd, 1);
2104 Branch(&loop, ne, mask, Operand(zero_reg), USE_DELAY_SLOT);
2105 srl(mask, mask, 1);
2106 bind(&end);
2107 } else {
2108 clz(rd, rs);
2109 }
2110}
2111
2112
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002113void MacroAssembler::EmitFPUTruncate(FPURoundingMode rounding_mode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002114 Register result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002115 DoubleRegister double_input,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002116 Register scratch,
2117 DoubleRegister double_scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002118 Register except_flag,
2119 CheckForInexactConversion check_inexact) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120 DCHECK(!result.is(scratch));
2121 DCHECK(!double_input.is(double_scratch));
2122 DCHECK(!except_flag.is(scratch));
2123
2124 Label done;
2125
2126 // Clear the except flag (0 = no exception)
2127 mov(except_flag, zero_reg);
2128
2129 // Test for values that can be exactly represented as a signed 32-bit integer.
2130 cvt_w_d(double_scratch, double_input);
2131 mfc1(result, double_scratch);
2132 cvt_d_w(double_scratch, double_scratch);
2133 BranchF(&done, NULL, eq, double_input, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002134
2135 int32_t except_mask = kFCSRFlagMask; // Assume interested in all exceptions.
2136
2137 if (check_inexact == kDontCheckForInexactConversion) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002138 // Ignore inexact exceptions.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002139 except_mask &= ~kFCSRInexactFlagMask;
2140 }
2141
2142 // Save FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002143 cfc1(scratch, FCSR);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002144 // Disable FPU exceptions.
2145 ctc1(zero_reg, FCSR);
2146
2147 // Do operation based on rounding mode.
2148 switch (rounding_mode) {
2149 case kRoundToNearest:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002150 Round_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002151 break;
2152 case kRoundToZero:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153 Trunc_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002154 break;
2155 case kRoundToPlusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002156 Ceil_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002157 break;
2158 case kRoundToMinusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002159 Floor_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002160 break;
2161 } // End of switch-statement.
2162
2163 // Retrieve FCSR.
2164 cfc1(except_flag, FCSR);
2165 // Restore FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002166 ctc1(scratch, FCSR);
2167 // Move the converted value into the result register.
2168 mfc1(result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002169
2170 // Check for fpu exceptions.
2171 And(except_flag, except_flag, Operand(except_mask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002172
Ben Murdoch257744e2011-11-30 15:57:28 +00002173 bind(&done);
2174}
2175
2176
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002177void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2178 DoubleRegister double_input,
2179 Label* done) {
2180 DoubleRegister single_scratch = kLithiumScratchDouble.low();
2181 Register scratch = at;
2182 Register scratch2 = t9;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002183
2184 // Clear cumulative exception flags and save the FCSR.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002185 cfc1(scratch2, FCSR);
2186 ctc1(zero_reg, FCSR);
2187 // Try a conversion to a signed integer.
2188 trunc_w_d(single_scratch, double_input);
2189 mfc1(result, single_scratch);
2190 // Retrieve and restore the FCSR.
2191 cfc1(scratch, FCSR);
2192 ctc1(scratch2, FCSR);
2193 // Check for overflow and NaNs.
2194 And(scratch,
2195 scratch,
2196 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
2197 // If we had no exceptions we are done.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002198 Branch(done, eq, scratch, Operand(zero_reg));
2199}
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002200
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002201
2202void MacroAssembler::TruncateDoubleToI(Register result,
2203 DoubleRegister double_input) {
2204 Label done;
2205
2206 TryInlineTruncateDoubleToI(result, double_input, &done);
2207
2208 // If we fell through then inline version didn't succeed - call stub instead.
2209 push(ra);
2210 Subu(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2211 sdc1(double_input, MemOperand(sp, 0));
2212
2213 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2214 CallStub(&stub);
2215
2216 Addu(sp, sp, Operand(kDoubleSize));
2217 pop(ra);
2218
2219 bind(&done);
2220}
2221
2222
2223void MacroAssembler::TruncateHeapNumberToI(Register result, Register object) {
2224 Label done;
2225 DoubleRegister double_scratch = f12;
2226 DCHECK(!result.is(object));
2227
2228 ldc1(double_scratch,
2229 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2230 TryInlineTruncateDoubleToI(result, double_scratch, &done);
2231
2232 // If we fell through then inline version didn't succeed - call stub instead.
2233 push(ra);
2234 DoubleToIStub stub(isolate(),
2235 object,
2236 result,
2237 HeapNumber::kValueOffset - kHeapObjectTag,
2238 true,
2239 true);
2240 CallStub(&stub);
2241 pop(ra);
2242
2243 bind(&done);
2244}
2245
2246
2247void MacroAssembler::TruncateNumberToI(Register object,
2248 Register result,
2249 Register heap_number_map,
2250 Register scratch,
2251 Label* not_number) {
2252 Label done;
2253 DCHECK(!result.is(object));
2254
2255 UntagAndJumpIfSmi(result, object, &done);
2256 JumpIfNotHeapNumber(object, heap_number_map, scratch, not_number);
2257 TruncateHeapNumberToI(result, object);
2258
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002259 bind(&done);
2260}
2261
2262
Ben Murdoch257744e2011-11-30 15:57:28 +00002263void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2264 Register src,
2265 int num_least_bits) {
2266 Ext(dst, src, kSmiTagSize, num_least_bits);
2267}
2268
2269
2270void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2271 Register src,
2272 int num_least_bits) {
2273 And(dst, src, Operand((1 << num_least_bits) - 1));
2274}
2275
2276
Steve Block44f0eee2011-05-26 01:26:41 +01002277// Emulated condtional branches do not emit a nop in the branch delay slot.
2278//
2279// BRANCH_ARGS_CHECK checks that conditional jump arguments are correct.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002280#define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \
Steve Block44f0eee2011-05-26 01:26:41 +01002281 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
2282 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
2283
2284
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002285void MacroAssembler::Branch(int32_t offset, BranchDelaySlot bdslot) {
2286 DCHECK(IsMipsArchVariant(kMips32r6) ? is_int26(offset) : is_int16(offset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002287 BranchShort(offset, bdslot);
2288}
2289
2290
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002291void MacroAssembler::Branch(int32_t offset, Condition cond, Register rs,
2292 const Operand& rt, BranchDelaySlot bdslot) {
2293 bool is_near = BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2294 DCHECK(is_near);
2295 USE(is_near);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002296}
2297
2298
2299void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002300 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002301 if (is_near_branch(L)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002302 BranchShort(L, bdslot);
2303 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002304 BranchLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002305 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002306 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002307 if (is_trampoline_emitted()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308 BranchLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002309 } else {
2310 BranchShort(L, bdslot);
2311 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002312 }
2313}
2314
2315
2316void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
2317 const Operand& rt,
2318 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002319 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002320 if (!BranchShortCheck(0, L, cond, rs, rt, bdslot)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002321 if (cond != cc_always) {
2322 Label skip;
2323 Condition neg_cond = NegateCondition(cond);
2324 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002325 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002326 bind(&skip);
2327 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002328 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002329 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002330 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002331 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002332 if (is_trampoline_emitted()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002333 if (cond != cc_always) {
2334 Label skip;
2335 Condition neg_cond = NegateCondition(cond);
2336 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002337 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002338 bind(&skip);
2339 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002340 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002341 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002342 } else {
2343 BranchShort(L, cond, rs, rt, bdslot);
2344 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002345 }
2346}
2347
2348
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002349void MacroAssembler::Branch(Label* L,
2350 Condition cond,
2351 Register rs,
2352 Heap::RootListIndex index,
2353 BranchDelaySlot bdslot) {
2354 LoadRoot(at, index);
2355 Branch(L, cond, rs, Operand(at), bdslot);
2356}
2357
2358
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002359void MacroAssembler::BranchShortHelper(int16_t offset, Label* L,
2360 BranchDelaySlot bdslot) {
2361 DCHECK(L == nullptr || offset == 0);
2362 offset = GetOffset(offset, L, OffsetSize::kOffset16);
Steve Block44f0eee2011-05-26 01:26:41 +01002363 b(offset);
2364
2365 // Emit a nop in the branch delay slot if required.
2366 if (bdslot == PROTECT)
2367 nop();
2368}
2369
2370
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002371void MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L) {
2372 DCHECK(L == nullptr || offset == 0);
2373 offset = GetOffset(offset, L, OffsetSize::kOffset26);
2374 bc(offset);
2375}
Steve Block44f0eee2011-05-26 01:26:41 +01002376
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002377
2378void MacroAssembler::BranchShort(int32_t offset, BranchDelaySlot bdslot) {
2379 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2380 DCHECK(is_int26(offset));
2381 BranchShortHelperR6(offset, nullptr);
Steve Block44f0eee2011-05-26 01:26:41 +01002382 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002383 DCHECK(is_int16(offset));
2384 BranchShortHelper(offset, nullptr, bdslot);
Andrei Popescu31002712010-02-23 13:46:05 +00002385 }
2386}
2387
2388
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002389void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002390 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2391 BranchShortHelperR6(0, L);
2392 } else {
2393 BranchShortHelper(0, L, bdslot);
2394 }
Andrei Popescu31002712010-02-23 13:46:05 +00002395}
2396
2397
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002398static inline bool IsZero(const Operand& rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01002399 if (rt.is_reg()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400 return rt.rm().is(zero_reg);
2401 } else {
2402 return rt.immediate() == 0;
2403 }
2404}
2405
2406
2407int32_t MacroAssembler::GetOffset(int32_t offset, Label* L, OffsetSize bits) {
2408 if (L) {
2409 offset = branch_offset_helper(L, bits) >> 2;
2410 } else {
2411 DCHECK(is_intn(offset, bits));
2412 }
2413 return offset;
2414}
2415
2416
2417Register MacroAssembler::GetRtAsRegisterHelper(const Operand& rt,
2418 Register scratch) {
2419 Register r2 = no_reg;
2420 if (rt.is_reg()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002421 r2 = rt.rm_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002422 } else {
2423 r2 = scratch;
2424 li(r2, rt);
2425 }
2426
2427 return r2;
2428}
2429
2430
2431bool MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L,
2432 Condition cond, Register rs,
2433 const Operand& rt) {
2434 DCHECK(L == nullptr || offset == 0);
2435 Register scratch = rs.is(at) ? t8 : at;
2436 OffsetSize bits = OffsetSize::kOffset16;
2437
2438 // Be careful to always use shifted_branch_offset only just before the
2439 // branch instruction, as the location will be remember for patching the
2440 // target.
2441 {
2442 BlockTrampolinePoolScope block_trampoline_pool(this);
Steve Block44f0eee2011-05-26 01:26:41 +01002443 switch (cond) {
2444 case cc_always:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002445 bits = OffsetSize::kOffset26;
2446 if (!is_near(L, bits)) return false;
2447 offset = GetOffset(offset, L, bits);
2448 bc(offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002449 break;
2450 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002451 if (rs.code() == rt.rm_.reg_code) {
2452 // Pre R6 beq is used here to make the code patchable. Otherwise bc
2453 // should be used which has no condition field so is not patchable.
2454 bits = OffsetSize::kOffset16;
2455 if (!is_near(L, bits)) return false;
2456 scratch = GetRtAsRegisterHelper(rt, scratch);
2457 offset = GetOffset(offset, L, bits);
2458 beq(rs, scratch, offset);
2459 nop();
2460 } else if (IsZero(rt)) {
2461 bits = OffsetSize::kOffset21;
2462 if (!is_near(L, bits)) return false;
2463 offset = GetOffset(offset, L, bits);
2464 beqzc(rs, offset);
2465 } else {
2466 // We don't want any other register but scratch clobbered.
2467 bits = OffsetSize::kOffset16;
2468 if (!is_near(L, bits)) return false;
2469 scratch = GetRtAsRegisterHelper(rt, scratch);
2470 offset = GetOffset(offset, L, bits);
2471 beqc(rs, scratch, offset);
2472 }
Steve Block44f0eee2011-05-26 01:26:41 +01002473 break;
2474 case ne:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002475 if (rs.code() == rt.rm_.reg_code) {
2476 // Pre R6 bne is used here to make the code patchable. Otherwise we
2477 // should not generate any instruction.
2478 bits = OffsetSize::kOffset16;
2479 if (!is_near(L, bits)) return false;
2480 scratch = GetRtAsRegisterHelper(rt, scratch);
2481 offset = GetOffset(offset, L, bits);
2482 bne(rs, scratch, offset);
2483 nop();
2484 } else if (IsZero(rt)) {
2485 bits = OffsetSize::kOffset21;
2486 if (!is_near(L, bits)) return false;
2487 offset = GetOffset(offset, L, bits);
2488 bnezc(rs, offset);
2489 } else {
2490 // We don't want any other register but scratch clobbered.
2491 bits = OffsetSize::kOffset16;
2492 if (!is_near(L, bits)) return false;
2493 scratch = GetRtAsRegisterHelper(rt, scratch);
2494 offset = GetOffset(offset, L, bits);
2495 bnec(rs, scratch, offset);
2496 }
Steve Block44f0eee2011-05-26 01:26:41 +01002497 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002498
Ben Murdoch257744e2011-11-30 15:57:28 +00002499 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01002500 case greater:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002501 // rs > rt
2502 if (rs.code() == rt.rm_.reg_code) {
2503 break; // No code needs to be emitted.
2504 } else if (rs.is(zero_reg)) {
2505 bits = OffsetSize::kOffset16;
2506 if (!is_near(L, bits)) return false;
2507 scratch = GetRtAsRegisterHelper(rt, scratch);
2508 offset = GetOffset(offset, L, bits);
2509 bltzc(scratch, offset);
2510 } else if (IsZero(rt)) {
2511 bits = OffsetSize::kOffset16;
2512 if (!is_near(L, bits)) return false;
2513 offset = GetOffset(offset, L, bits);
2514 bgtzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002515 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002516 bits = OffsetSize::kOffset16;
2517 if (!is_near(L, bits)) return false;
2518 scratch = GetRtAsRegisterHelper(rt, scratch);
2519 DCHECK(!rs.is(scratch));
2520 offset = GetOffset(offset, L, bits);
2521 bltc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002522 }
2523 break;
2524 case greater_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002525 // rs >= rt
2526 if (rs.code() == rt.rm_.reg_code) {
2527 bits = OffsetSize::kOffset26;
2528 if (!is_near(L, bits)) return false;
2529 offset = GetOffset(offset, L, bits);
2530 bc(offset);
2531 } else if (rs.is(zero_reg)) {
2532 bits = OffsetSize::kOffset16;
2533 if (!is_near(L, bits)) return false;
2534 scratch = GetRtAsRegisterHelper(rt, scratch);
2535 offset = GetOffset(offset, L, bits);
2536 blezc(scratch, offset);
2537 } else if (IsZero(rt)) {
2538 bits = OffsetSize::kOffset16;
2539 if (!is_near(L, bits)) return false;
2540 offset = GetOffset(offset, L, bits);
2541 bgezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002542 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002543 bits = OffsetSize::kOffset16;
2544 if (!is_near(L, bits)) return false;
2545 scratch = GetRtAsRegisterHelper(rt, scratch);
2546 DCHECK(!rs.is(scratch));
2547 offset = GetOffset(offset, L, bits);
2548 bgec(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002549 }
2550 break;
2551 case less:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002552 // rs < rt
2553 if (rs.code() == rt.rm_.reg_code) {
2554 break; // No code needs to be emitted.
2555 } else if (rs.is(zero_reg)) {
2556 bits = OffsetSize::kOffset16;
2557 if (!is_near(L, bits)) return false;
2558 scratch = GetRtAsRegisterHelper(rt, scratch);
2559 offset = GetOffset(offset, L, bits);
2560 bgtzc(scratch, offset);
2561 } else if (IsZero(rt)) {
2562 bits = OffsetSize::kOffset16;
2563 if (!is_near(L, bits)) return false;
2564 offset = GetOffset(offset, L, bits);
2565 bltzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002566 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002567 bits = OffsetSize::kOffset16;
2568 if (!is_near(L, bits)) return false;
2569 scratch = GetRtAsRegisterHelper(rt, scratch);
2570 DCHECK(!rs.is(scratch));
2571 offset = GetOffset(offset, L, bits);
2572 bltc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002573 }
2574 break;
2575 case less_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002576 // rs <= rt
2577 if (rs.code() == rt.rm_.reg_code) {
2578 bits = OffsetSize::kOffset26;
2579 if (!is_near(L, bits)) return false;
2580 offset = GetOffset(offset, L, bits);
2581 bc(offset);
2582 } else if (rs.is(zero_reg)) {
2583 bits = OffsetSize::kOffset16;
2584 if (!is_near(L, bits)) return false;
2585 scratch = GetRtAsRegisterHelper(rt, scratch);
2586 offset = GetOffset(offset, L, bits);
2587 bgezc(scratch, offset);
2588 } else if (IsZero(rt)) {
2589 bits = OffsetSize::kOffset16;
2590 if (!is_near(L, bits)) return false;
2591 offset = GetOffset(offset, L, bits);
2592 blezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002593 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002594 bits = OffsetSize::kOffset16;
2595 if (!is_near(L, bits)) return false;
2596 scratch = GetRtAsRegisterHelper(rt, scratch);
2597 DCHECK(!rs.is(scratch));
2598 offset = GetOffset(offset, L, bits);
2599 bgec(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002600 }
2601 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002602
Steve Block44f0eee2011-05-26 01:26:41 +01002603 // Unsigned comparison.
2604 case Ugreater:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002605 // rs > rt
2606 if (rs.code() == rt.rm_.reg_code) {
2607 break; // No code needs to be emitted.
2608 } else if (rs.is(zero_reg)) {
2609 bits = OffsetSize::kOffset21;
2610 if (!is_near(L, bits)) return false;
2611 scratch = GetRtAsRegisterHelper(rt, scratch);
2612 offset = GetOffset(offset, L, bits);
2613 bnezc(scratch, offset);
2614 } else if (IsZero(rt)) {
2615 bits = OffsetSize::kOffset21;
2616 if (!is_near(L, bits)) return false;
2617 offset = GetOffset(offset, L, bits);
2618 bnezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002619 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002620 bits = OffsetSize::kOffset16;
2621 if (!is_near(L, bits)) return false;
2622 scratch = GetRtAsRegisterHelper(rt, scratch);
2623 DCHECK(!rs.is(scratch));
2624 offset = GetOffset(offset, L, bits);
2625 bltuc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002626 }
2627 break;
2628 case Ugreater_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002629 // rs >= rt
2630 if (rs.code() == rt.rm_.reg_code) {
2631 bits = OffsetSize::kOffset26;
2632 if (!is_near(L, bits)) return false;
2633 offset = GetOffset(offset, L, bits);
2634 bc(offset);
2635 } else if (rs.is(zero_reg)) {
2636 bits = OffsetSize::kOffset21;
2637 if (!is_near(L, bits)) return false;
2638 scratch = GetRtAsRegisterHelper(rt, scratch);
2639 offset = GetOffset(offset, L, bits);
2640 beqzc(scratch, offset);
2641 } else if (IsZero(rt)) {
2642 bits = OffsetSize::kOffset26;
2643 if (!is_near(L, bits)) return false;
2644 offset = GetOffset(offset, L, bits);
2645 bc(offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002646 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002647 bits = OffsetSize::kOffset16;
2648 if (!is_near(L, bits)) return false;
2649 scratch = GetRtAsRegisterHelper(rt, scratch);
2650 DCHECK(!rs.is(scratch));
2651 offset = GetOffset(offset, L, bits);
2652 bgeuc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002653 }
2654 break;
2655 case Uless:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002656 // rs < rt
2657 if (rs.code() == rt.rm_.reg_code) {
2658 break; // No code needs to be emitted.
2659 } else if (rs.is(zero_reg)) {
2660 bits = OffsetSize::kOffset21;
2661 if (!is_near(L, bits)) return false;
2662 scratch = GetRtAsRegisterHelper(rt, scratch);
2663 offset = GetOffset(offset, L, bits);
2664 bnezc(scratch, offset);
2665 } else if (IsZero(rt)) {
2666 break; // No code needs to be emitted.
Steve Block44f0eee2011-05-26 01:26:41 +01002667 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002668 bits = OffsetSize::kOffset16;
2669 if (!is_near(L, bits)) return false;
2670 scratch = GetRtAsRegisterHelper(rt, scratch);
2671 DCHECK(!rs.is(scratch));
2672 offset = GetOffset(offset, L, bits);
2673 bltuc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002674 }
2675 break;
2676 case Uless_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002677 // rs <= rt
2678 if (rs.code() == rt.rm_.reg_code) {
2679 bits = OffsetSize::kOffset26;
2680 if (!is_near(L, bits)) return false;
2681 offset = GetOffset(offset, L, bits);
2682 bc(offset);
2683 } else if (rs.is(zero_reg)) {
2684 bits = OffsetSize::kOffset26;
2685 if (!is_near(L, bits)) return false;
2686 scratch = GetRtAsRegisterHelper(rt, scratch);
2687 offset = GetOffset(offset, L, bits);
2688 bc(offset);
2689 } else if (IsZero(rt)) {
2690 bits = OffsetSize::kOffset21;
2691 if (!is_near(L, bits)) return false;
2692 offset = GetOffset(offset, L, bits);
2693 beqzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002694 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002695 bits = OffsetSize::kOffset16;
2696 if (!is_near(L, bits)) return false;
2697 scratch = GetRtAsRegisterHelper(rt, scratch);
2698 DCHECK(!rs.is(scratch));
2699 offset = GetOffset(offset, L, bits);
2700 bgeuc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002701 }
2702 break;
2703 default:
2704 UNREACHABLE();
2705 }
2706 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002707 CheckTrampolinePoolQuick(1);
2708 return true;
Steve Block44f0eee2011-05-26 01:26:41 +01002709}
2710
2711
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002712bool MacroAssembler::BranchShortHelper(int16_t offset, Label* L, Condition cond,
2713 Register rs, const Operand& rt,
2714 BranchDelaySlot bdslot) {
2715 DCHECK(L == nullptr || offset == 0);
2716 if (!is_near(L, OffsetSize::kOffset16)) return false;
2717
2718 Register scratch = at;
2719 int32_t offset32;
2720
2721 // Be careful to always use shifted_branch_offset only just before the
2722 // branch instruction, as the location will be remember for patching the
2723 // target.
2724 {
2725 BlockTrampolinePoolScope block_trampoline_pool(this);
2726 switch (cond) {
2727 case cc_always:
2728 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2729 b(offset32);
2730 break;
2731 case eq:
2732 if (IsZero(rt)) {
2733 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2734 beq(rs, zero_reg, offset32);
2735 } else {
2736 // We don't want any other register but scratch clobbered.
2737 scratch = GetRtAsRegisterHelper(rt, scratch);
2738 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2739 beq(rs, scratch, offset32);
2740 }
2741 break;
2742 case ne:
2743 if (IsZero(rt)) {
2744 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2745 bne(rs, zero_reg, offset32);
2746 } else {
2747 // We don't want any other register but scratch clobbered.
2748 scratch = GetRtAsRegisterHelper(rt, scratch);
2749 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2750 bne(rs, scratch, offset32);
2751 }
2752 break;
2753
2754 // Signed comparison.
2755 case greater:
2756 if (IsZero(rt)) {
2757 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2758 bgtz(rs, offset32);
2759 } else {
2760 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2761 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2762 bne(scratch, zero_reg, offset32);
2763 }
2764 break;
2765 case greater_equal:
2766 if (IsZero(rt)) {
2767 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2768 bgez(rs, offset32);
2769 } else {
2770 Slt(scratch, rs, rt);
2771 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2772 beq(scratch, zero_reg, offset32);
2773 }
2774 break;
2775 case less:
2776 if (IsZero(rt)) {
2777 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2778 bltz(rs, offset32);
2779 } else {
2780 Slt(scratch, rs, rt);
2781 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2782 bne(scratch, zero_reg, offset32);
2783 }
2784 break;
2785 case less_equal:
2786 if (IsZero(rt)) {
2787 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2788 blez(rs, offset32);
2789 } else {
2790 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2791 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2792 beq(scratch, zero_reg, offset32);
2793 }
2794 break;
2795
2796 // Unsigned comparison.
2797 case Ugreater:
2798 if (IsZero(rt)) {
2799 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2800 bne(rs, zero_reg, offset32);
2801 } else {
2802 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2803 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2804 bne(scratch, zero_reg, offset32);
2805 }
2806 break;
2807 case Ugreater_equal:
2808 if (IsZero(rt)) {
2809 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2810 b(offset32);
2811 } else {
2812 Sltu(scratch, rs, rt);
2813 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2814 beq(scratch, zero_reg, offset32);
2815 }
2816 break;
2817 case Uless:
2818 if (IsZero(rt)) {
2819 return true; // No code needs to be emitted.
2820 } else {
2821 Sltu(scratch, rs, rt);
2822 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2823 bne(scratch, zero_reg, offset32);
2824 }
2825 break;
2826 case Uless_equal:
2827 if (IsZero(rt)) {
2828 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2829 beq(rs, zero_reg, offset32);
2830 } else {
2831 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2832 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2833 beq(scratch, zero_reg, offset32);
2834 }
2835 break;
2836 default:
2837 UNREACHABLE();
2838 }
2839 }
2840 // Emit a nop in the branch delay slot if required.
2841 if (bdslot == PROTECT)
2842 nop();
2843
2844 return true;
2845}
2846
2847
2848bool MacroAssembler::BranchShortCheck(int32_t offset, Label* L, Condition cond,
2849 Register rs, const Operand& rt,
2850 BranchDelaySlot bdslot) {
2851 BRANCH_ARGS_CHECK(cond, rs, rt);
2852
2853 if (!L) {
2854 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2855 DCHECK(is_int26(offset));
2856 return BranchShortHelperR6(offset, nullptr, cond, rs, rt);
2857 } else {
2858 DCHECK(is_int16(offset));
2859 return BranchShortHelper(offset, nullptr, cond, rs, rt, bdslot);
2860 }
2861 } else {
2862 DCHECK(offset == 0);
2863 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2864 return BranchShortHelperR6(0, L, cond, rs, rt);
2865 } else {
2866 return BranchShortHelper(0, L, cond, rs, rt, bdslot);
2867 }
2868 }
2869 return false;
2870}
2871
2872
2873void MacroAssembler::BranchShort(int32_t offset, Condition cond, Register rs,
2874 const Operand& rt, BranchDelaySlot bdslot) {
2875 BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2876}
2877
2878
2879void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs,
2880 const Operand& rt, BranchDelaySlot bdslot) {
2881 BranchShortCheck(0, L, cond, rs, rt, bdslot);
2882}
2883
2884
2885void MacroAssembler::BranchAndLink(int32_t offset, BranchDelaySlot bdslot) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002886 BranchAndLinkShort(offset, bdslot);
2887}
2888
2889
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002890void MacroAssembler::BranchAndLink(int32_t offset, Condition cond, Register rs,
2891 const Operand& rt, BranchDelaySlot bdslot) {
2892 bool is_near = BranchAndLinkShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2893 DCHECK(is_near);
2894 USE(is_near);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002895}
2896
2897
2898void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002899 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002900 if (is_near_branch(L)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002901 BranchAndLinkShort(L, bdslot);
2902 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002903 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002904 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002905 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002906 if (is_trampoline_emitted()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002907 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002908 } else {
2909 BranchAndLinkShort(L, bdslot);
2910 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002911 }
2912}
2913
2914
2915void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs,
2916 const Operand& rt,
2917 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002918 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002919 if (!BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002920 Label skip;
2921 Condition neg_cond = NegateCondition(cond);
2922 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002923 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002924 bind(&skip);
2925 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002926 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002927 if (is_trampoline_emitted()) {
2928 Label skip;
2929 Condition neg_cond = NegateCondition(cond);
2930 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002931 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002932 bind(&skip);
2933 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002934 BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002935 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002936 }
2937}
2938
2939
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002940void MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
2941 BranchDelaySlot bdslot) {
2942 DCHECK(L == nullptr || offset == 0);
2943 offset = GetOffset(offset, L, OffsetSize::kOffset16);
Steve Block44f0eee2011-05-26 01:26:41 +01002944 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00002945
Steve Block44f0eee2011-05-26 01:26:41 +01002946 // Emit a nop in the branch delay slot if required.
2947 if (bdslot == PROTECT)
2948 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00002949}
2950
2951
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002952void MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L) {
2953 DCHECK(L == nullptr || offset == 0);
2954 offset = GetOffset(offset, L, OffsetSize::kOffset26);
2955 balc(offset);
2956}
2957
2958
2959void MacroAssembler::BranchAndLinkShort(int32_t offset,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002960 BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002961 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2962 DCHECK(is_int26(offset));
2963 BranchAndLinkShortHelperR6(offset, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002964 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002965 DCHECK(is_int16(offset));
2966 BranchAndLinkShortHelper(offset, nullptr, bdslot);
Andrei Popescu31002712010-02-23 13:46:05 +00002967 }
Steve Block44f0eee2011-05-26 01:26:41 +01002968}
2969
2970
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002971void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002972 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2973 BranchAndLinkShortHelperR6(0, L);
2974 } else {
2975 BranchAndLinkShortHelper(0, L, bdslot);
2976 }
Steve Block44f0eee2011-05-26 01:26:41 +01002977}
2978
2979
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002980bool MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L,
2981 Condition cond, Register rs,
2982 const Operand& rt) {
2983 DCHECK(L == nullptr || offset == 0);
2984 Register scratch = rs.is(at) ? t8 : at;
2985 OffsetSize bits = OffsetSize::kOffset16;
Steve Block44f0eee2011-05-26 01:26:41 +01002986
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002987 BlockTrampolinePoolScope block_trampoline_pool(this);
2988 DCHECK((cond == cc_always && is_int26(offset)) || is_int16(offset));
2989 switch (cond) {
2990 case cc_always:
2991 bits = OffsetSize::kOffset26;
2992 if (!is_near(L, bits)) return false;
2993 offset = GetOffset(offset, L, bits);
2994 balc(offset);
2995 break;
2996 case eq:
2997 if (!is_near(L, bits)) return false;
2998 Subu(scratch, rs, rt);
2999 offset = GetOffset(offset, L, bits);
3000 beqzalc(scratch, offset);
3001 break;
3002 case ne:
3003 if (!is_near(L, bits)) return false;
3004 Subu(scratch, rs, rt);
3005 offset = GetOffset(offset, L, bits);
3006 bnezalc(scratch, offset);
3007 break;
3008
3009 // Signed comparison.
3010 case greater:
3011 // rs > rt
3012 if (rs.code() == rt.rm_.reg_code) {
3013 break; // No code needs to be emitted.
3014 } else if (rs.is(zero_reg)) {
3015 if (!is_near(L, bits)) return false;
3016 scratch = GetRtAsRegisterHelper(rt, scratch);
3017 offset = GetOffset(offset, L, bits);
3018 bltzalc(scratch, offset);
3019 } else if (IsZero(rt)) {
3020 if (!is_near(L, bits)) return false;
3021 offset = GetOffset(offset, L, bits);
3022 bgtzalc(rs, offset);
3023 } else {
3024 if (!is_near(L, bits)) return false;
3025 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3026 offset = GetOffset(offset, L, bits);
3027 bnezalc(scratch, offset);
3028 }
3029 break;
3030 case greater_equal:
3031 // rs >= rt
3032 if (rs.code() == rt.rm_.reg_code) {
3033 bits = OffsetSize::kOffset26;
3034 if (!is_near(L, bits)) return false;
3035 offset = GetOffset(offset, L, bits);
3036 balc(offset);
3037 } else if (rs.is(zero_reg)) {
3038 if (!is_near(L, bits)) return false;
3039 scratch = GetRtAsRegisterHelper(rt, scratch);
3040 offset = GetOffset(offset, L, bits);
3041 blezalc(scratch, offset);
3042 } else if (IsZero(rt)) {
3043 if (!is_near(L, bits)) return false;
3044 offset = GetOffset(offset, L, bits);
3045 bgezalc(rs, offset);
3046 } else {
3047 if (!is_near(L, bits)) return false;
3048 Slt(scratch, rs, rt);
3049 offset = GetOffset(offset, L, bits);
3050 beqzalc(scratch, offset);
3051 }
3052 break;
3053 case less:
3054 // rs < rt
3055 if (rs.code() == rt.rm_.reg_code) {
3056 break; // No code needs to be emitted.
3057 } else if (rs.is(zero_reg)) {
3058 if (!is_near(L, bits)) return false;
3059 scratch = GetRtAsRegisterHelper(rt, scratch);
3060 offset = GetOffset(offset, L, bits);
3061 bgtzalc(scratch, offset);
3062 } else if (IsZero(rt)) {
3063 if (!is_near(L, bits)) return false;
3064 offset = GetOffset(offset, L, bits);
3065 bltzalc(rs, offset);
3066 } else {
3067 if (!is_near(L, bits)) return false;
3068 Slt(scratch, rs, rt);
3069 offset = GetOffset(offset, L, bits);
3070 bnezalc(scratch, offset);
3071 }
3072 break;
3073 case less_equal:
3074 // rs <= r2
3075 if (rs.code() == rt.rm_.reg_code) {
3076 bits = OffsetSize::kOffset26;
3077 if (!is_near(L, bits)) return false;
3078 offset = GetOffset(offset, L, bits);
3079 balc(offset);
3080 } else if (rs.is(zero_reg)) {
3081 if (!is_near(L, bits)) return false;
3082 scratch = GetRtAsRegisterHelper(rt, scratch);
3083 offset = GetOffset(offset, L, bits);
3084 bgezalc(scratch, offset);
3085 } else if (IsZero(rt)) {
3086 if (!is_near(L, bits)) return false;
3087 offset = GetOffset(offset, L, bits);
3088 blezalc(rs, offset);
3089 } else {
3090 if (!is_near(L, bits)) return false;
3091 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3092 offset = GetOffset(offset, L, bits);
3093 beqzalc(scratch, offset);
3094 }
3095 break;
3096
3097
3098 // Unsigned comparison.
3099 case Ugreater:
3100 // rs > r2
3101 if (!is_near(L, bits)) return false;
3102 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3103 offset = GetOffset(offset, L, bits);
3104 bnezalc(scratch, offset);
3105 break;
3106 case Ugreater_equal:
3107 // rs >= r2
3108 if (!is_near(L, bits)) return false;
3109 Sltu(scratch, rs, rt);
3110 offset = GetOffset(offset, L, bits);
3111 beqzalc(scratch, offset);
3112 break;
3113 case Uless:
3114 // rs < r2
3115 if (!is_near(L, bits)) return false;
3116 Sltu(scratch, rs, rt);
3117 offset = GetOffset(offset, L, bits);
3118 bnezalc(scratch, offset);
3119 break;
3120 case Uless_equal:
3121 // rs <= r2
3122 if (!is_near(L, bits)) return false;
3123 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3124 offset = GetOffset(offset, L, bits);
3125 beqzalc(scratch, offset);
3126 break;
3127 default:
3128 UNREACHABLE();
Steve Block44f0eee2011-05-26 01:26:41 +01003129 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003130 return true;
3131}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003132
3133
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003134// Pre r6 we need to use a bgezal or bltzal, but they can't be used directly
3135// with the slt instructions. We could use sub or add instead but we would miss
3136// overflow cases, so we keep slt and add an intermediate third instruction.
3137bool MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
3138 Condition cond, Register rs,
3139 const Operand& rt,
3140 BranchDelaySlot bdslot) {
3141 DCHECK(L == nullptr || offset == 0);
3142 if (!is_near(L, OffsetSize::kOffset16)) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003143
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003144 Register scratch = t8;
3145 BlockTrampolinePoolScope block_trampoline_pool(this);
3146
3147 switch (cond) {
3148 case cc_always:
3149 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3150 bal(offset);
3151 break;
3152 case eq:
3153 bne(rs, GetRtAsRegisterHelper(rt, scratch), 2);
3154 nop();
3155 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3156 bal(offset);
3157 break;
3158 case ne:
3159 beq(rs, GetRtAsRegisterHelper(rt, scratch), 2);
3160 nop();
3161 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3162 bal(offset);
3163 break;
3164
3165 // Signed comparison.
3166 case greater:
3167 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3168 addiu(scratch, scratch, -1);
3169 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3170 bgezal(scratch, offset);
3171 break;
3172 case greater_equal:
3173 Slt(scratch, rs, rt);
3174 addiu(scratch, scratch, -1);
3175 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3176 bltzal(scratch, offset);
3177 break;
3178 case less:
3179 Slt(scratch, rs, rt);
3180 addiu(scratch, scratch, -1);
3181 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3182 bgezal(scratch, offset);
3183 break;
3184 case less_equal:
3185 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3186 addiu(scratch, scratch, -1);
3187 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3188 bltzal(scratch, offset);
3189 break;
3190
3191 // Unsigned comparison.
3192 case Ugreater:
3193 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3194 addiu(scratch, scratch, -1);
3195 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3196 bgezal(scratch, offset);
3197 break;
3198 case Ugreater_equal:
3199 Sltu(scratch, rs, rt);
3200 addiu(scratch, scratch, -1);
3201 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3202 bltzal(scratch, offset);
3203 break;
3204 case Uless:
3205 Sltu(scratch, rs, rt);
3206 addiu(scratch, scratch, -1);
3207 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3208 bgezal(scratch, offset);
3209 break;
3210 case Uless_equal:
3211 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3212 addiu(scratch, scratch, -1);
3213 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3214 bltzal(scratch, offset);
3215 break;
3216
3217 default:
3218 UNREACHABLE();
Steve Block44f0eee2011-05-26 01:26:41 +01003219 }
3220
Steve Block44f0eee2011-05-26 01:26:41 +01003221 // Emit a nop in the branch delay slot if required.
3222 if (bdslot == PROTECT)
3223 nop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003224
3225 return true;
3226}
3227
3228
3229bool MacroAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
3230 Condition cond, Register rs,
3231 const Operand& rt,
3232 BranchDelaySlot bdslot) {
3233 BRANCH_ARGS_CHECK(cond, rs, rt);
3234
3235 if (!L) {
3236 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3237 DCHECK(is_int26(offset));
3238 return BranchAndLinkShortHelperR6(offset, nullptr, cond, rs, rt);
3239 } else {
3240 DCHECK(is_int16(offset));
3241 return BranchAndLinkShortHelper(offset, nullptr, cond, rs, rt, bdslot);
3242 }
3243 } else {
3244 DCHECK(offset == 0);
3245 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3246 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt);
3247 } else {
3248 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot);
3249 }
3250 }
3251 return false;
Steve Block44f0eee2011-05-26 01:26:41 +01003252}
3253
3254
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003255void MacroAssembler::Jump(Register target,
Steve Block44f0eee2011-05-26 01:26:41 +01003256 Condition cond,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003257 Register rs,
3258 const Operand& rt,
3259 BranchDelaySlot bd) {
3260 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochda12d292016-06-02 14:46:10 +01003261 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
3262 if (cond == cc_always) {
3263 jic(target, 0);
3264 } else {
3265 BRANCH_ARGS_CHECK(cond, rs, rt);
3266 Branch(2, NegateCondition(cond), rs, rt);
3267 jic(target, 0);
3268 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003269 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003270 if (cond == cc_always) {
3271 jr(target);
3272 } else {
3273 BRANCH_ARGS_CHECK(cond, rs, rt);
3274 Branch(2, NegateCondition(cond), rs, rt);
3275 jr(target);
3276 }
3277 // Emit a nop in the branch delay slot if required.
3278 if (bd == PROTECT) nop();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003279 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003280}
3281
3282
3283void MacroAssembler::Jump(intptr_t target,
3284 RelocInfo::Mode rmode,
3285 Condition cond,
3286 Register rs,
3287 const Operand& rt,
3288 BranchDelaySlot bd) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003289 Label skip;
3290 if (cond != cc_always) {
3291 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt);
3292 }
3293 // The first instruction of 'li' may be placed in the delay slot.
3294 // This is not an issue, t9 is expected to be clobbered anyway.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003295 li(t9, Operand(target, rmode));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003296 Jump(t9, al, zero_reg, Operand(zero_reg), bd);
3297 bind(&skip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003298}
3299
3300
3301void MacroAssembler::Jump(Address target,
3302 RelocInfo::Mode rmode,
3303 Condition cond,
3304 Register rs,
3305 const Operand& rt,
3306 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003307 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003308 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
3309}
3310
3311
3312void MacroAssembler::Jump(Handle<Code> code,
3313 RelocInfo::Mode rmode,
3314 Condition cond,
3315 Register rs,
3316 const Operand& rt,
3317 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003318 DCHECK(RelocInfo::IsCodeTarget(rmode));
3319 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003320 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
3321}
3322
3323
3324int MacroAssembler::CallSize(Register target,
3325 Condition cond,
3326 Register rs,
3327 const Operand& rt,
3328 BranchDelaySlot bd) {
3329 int size = 0;
3330
3331 if (cond == cc_always) {
3332 size += 1;
3333 } else {
3334 size += 3;
Steve Block44f0eee2011-05-26 01:26:41 +01003335 }
3336
Ben Murdochda12d292016-06-02 14:46:10 +01003337 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1;
Steve Block44f0eee2011-05-26 01:26:41 +01003338
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003339 return size * kInstrSize;
3340}
Steve Block44f0eee2011-05-26 01:26:41 +01003341
Steve Block44f0eee2011-05-26 01:26:41 +01003342
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003343// Note: To call gcc-compiled C code on mips, you must call thru t9.
3344void MacroAssembler::Call(Register target,
3345 Condition cond,
3346 Register rs,
3347 const Operand& rt,
3348 BranchDelaySlot bd) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003349#ifdef DEBUG
3350 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0;
3351#endif
3352
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003353 BlockTrampolinePoolScope block_trampoline_pool(this);
3354 Label start;
3355 bind(&start);
Ben Murdochda12d292016-06-02 14:46:10 +01003356 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
3357 if (cond == cc_always) {
3358 jialc(target, 0);
3359 } else {
3360 BRANCH_ARGS_CHECK(cond, rs, rt);
3361 Branch(2, NegateCondition(cond), rs, rt);
3362 jialc(target, 0);
3363 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003364 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003365 if (cond == cc_always) {
3366 jalr(target);
3367 } else {
3368 BRANCH_ARGS_CHECK(cond, rs, rt);
3369 Branch(2, NegateCondition(cond), rs, rt);
3370 jalr(target);
3371 }
3372 // Emit a nop in the branch delay slot if required.
3373 if (bd == PROTECT) nop();
Steve Block44f0eee2011-05-26 01:26:41 +01003374 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003375
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003376#ifdef DEBUG
3377 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd),
3378 SizeOfCodeGeneratedSince(&start));
3379#endif
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003380}
3381
3382
3383int MacroAssembler::CallSize(Address target,
3384 RelocInfo::Mode rmode,
3385 Condition cond,
3386 Register rs,
3387 const Operand& rt,
3388 BranchDelaySlot bd) {
3389 int size = CallSize(t9, cond, rs, rt, bd);
3390 return size + 2 * kInstrSize;
3391}
3392
3393
3394void MacroAssembler::Call(Address target,
3395 RelocInfo::Mode rmode,
3396 Condition cond,
3397 Register rs,
3398 const Operand& rt,
3399 BranchDelaySlot bd) {
3400 BlockTrampolinePoolScope block_trampoline_pool(this);
3401 Label start;
3402 bind(&start);
3403 int32_t target_int = reinterpret_cast<int32_t>(target);
3404 // Must record previous source positions before the
3405 // li() generates a new code target.
3406 positions_recorder()->WriteRecordedPositions();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003407 li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003408 Call(t9, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003409 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003410 SizeOfCodeGeneratedSince(&start));
3411}
3412
3413
3414int MacroAssembler::CallSize(Handle<Code> code,
3415 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003416 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003417 Condition cond,
3418 Register rs,
3419 const Operand& rt,
3420 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003421 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003422 return CallSize(reinterpret_cast<Address>(code.location()),
3423 rmode, cond, rs, rt, bd);
3424}
3425
3426
3427void MacroAssembler::Call(Handle<Code> code,
3428 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003429 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003430 Condition cond,
3431 Register rs,
3432 const Operand& rt,
3433 BranchDelaySlot bd) {
3434 BlockTrampolinePoolScope block_trampoline_pool(this);
3435 Label start;
3436 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003437 DCHECK(RelocInfo::IsCodeTarget(rmode));
3438 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003439 SetRecordedAstId(ast_id);
3440 rmode = RelocInfo::CODE_TARGET_WITH_ID;
3441 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003442 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003443 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003444 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003445 SizeOfCodeGeneratedSince(&start));
3446}
3447
3448
3449void MacroAssembler::Ret(Condition cond,
3450 Register rs,
3451 const Operand& rt,
3452 BranchDelaySlot bd) {
3453 Jump(ra, cond, rs, rt, bd);
3454}
3455
3456
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003457void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) {
3458 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT &&
3459 (!L->is_bound() || is_near_r6(L))) {
3460 BranchShortHelperR6(0, L);
3461 } else {
3462 BlockTrampolinePoolScope block_trampoline_pool(this);
3463 uint32_t imm32;
3464 imm32 = jump_address(L);
Ben Murdochda12d292016-06-02 14:46:10 +01003465 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3466 uint32_t lui_offset, jic_offset;
3467 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset);
3468 {
3469 BlockGrowBufferScope block_buf_growth(this);
3470 // Buffer growth (and relocation) must be blocked for internal
3471 // references until associated instructions are emitted and
3472 // available to be patched.
3473 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3474 lui(at, lui_offset);
3475 jic(at, jic_offset);
3476 }
3477 CheckBuffer();
3478 } else {
3479 {
3480 BlockGrowBufferScope block_buf_growth(this);
3481 // Buffer growth (and relocation) must be blocked for internal
3482 // references
3483 // until associated instructions are emitted and available to be
3484 // patched.
3485 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3486 lui(at, (imm32 & kHiMask) >> kLuiShift);
3487 ori(at, at, (imm32 & kImm16Mask));
3488 }
3489 CheckBuffer();
3490 jr(at);
3491 // Emit a nop in the branch delay slot if required.
3492 if (bdslot == PROTECT) nop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003493 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003494 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003495}
3496
3497
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003498void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) {
3499 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT &&
3500 (!L->is_bound() || is_near_r6(L))) {
3501 BranchAndLinkShortHelperR6(0, L);
3502 } else {
3503 BlockTrampolinePoolScope block_trampoline_pool(this);
3504 uint32_t imm32;
3505 imm32 = jump_address(L);
Ben Murdochda12d292016-06-02 14:46:10 +01003506 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3507 uint32_t lui_offset, jic_offset;
3508 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset);
3509 {
3510 BlockGrowBufferScope block_buf_growth(this);
3511 // Buffer growth (and relocation) must be blocked for internal
3512 // references until associated instructions are emitted and
3513 // available to be patched.
3514 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3515 lui(at, lui_offset);
3516 jialc(at, jic_offset);
3517 }
3518 CheckBuffer();
3519 } else {
3520 {
3521 BlockGrowBufferScope block_buf_growth(this);
3522 // Buffer growth (and relocation) must be blocked for internal
3523 // references
3524 // until associated instructions are emitted and available to be
3525 // patched.
3526 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3527 lui(at, (imm32 & kHiMask) >> kLuiShift);
3528 ori(at, at, (imm32 & kImm16Mask));
3529 }
3530 CheckBuffer();
3531 jalr(at);
3532 // Emit a nop in the branch delay slot if required.
3533 if (bdslot == PROTECT) nop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003534 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003535 }
Steve Block44f0eee2011-05-26 01:26:41 +01003536}
3537
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003538
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003539void MacroAssembler::DropAndRet(int drop) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003540 DCHECK(is_int16(drop * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003541 Ret(USE_DELAY_SLOT);
3542 addiu(sp, sp, drop * kPointerSize);
3543}
Steve Block44f0eee2011-05-26 01:26:41 +01003544
3545void MacroAssembler::DropAndRet(int drop,
3546 Condition cond,
3547 Register r1,
3548 const Operand& r2) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003549 // Both Drop and Ret need to be conditional.
Steve Block44f0eee2011-05-26 01:26:41 +01003550 Label skip;
3551 if (cond != cc_always) {
3552 Branch(&skip, NegateCondition(cond), r1, r2);
3553 }
3554
3555 Drop(drop);
3556 Ret();
3557
3558 if (cond != cc_always) {
3559 bind(&skip);
3560 }
3561}
3562
3563
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003564void MacroAssembler::Drop(int count,
3565 Condition cond,
3566 Register reg,
3567 const Operand& op) {
3568 if (count <= 0) {
3569 return;
3570 }
3571
3572 Label skip;
3573
3574 if (cond != al) {
3575 Branch(&skip, NegateCondition(cond), reg, op);
3576 }
3577
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003578 Addu(sp, sp, Operand(count * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003579
3580 if (cond != al) {
3581 bind(&skip);
3582 }
3583}
3584
3585
3586
Steve Block44f0eee2011-05-26 01:26:41 +01003587void MacroAssembler::Swap(Register reg1,
3588 Register reg2,
3589 Register scratch) {
3590 if (scratch.is(no_reg)) {
3591 Xor(reg1, reg1, Operand(reg2));
3592 Xor(reg2, reg2, Operand(reg1));
3593 Xor(reg1, reg1, Operand(reg2));
3594 } else {
3595 mov(scratch, reg1);
3596 mov(reg1, reg2);
3597 mov(reg2, scratch);
3598 }
Andrei Popescu31002712010-02-23 13:46:05 +00003599}
3600
3601
3602void MacroAssembler::Call(Label* target) {
Steve Block44f0eee2011-05-26 01:26:41 +01003603 BranchAndLink(target);
3604}
3605
3606
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003607void MacroAssembler::Push(Handle<Object> handle) {
3608 li(at, Operand(handle));
3609 push(at);
3610}
3611
3612
Steve Block44f0eee2011-05-26 01:26:41 +01003613void MacroAssembler::DebugBreak() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003614 PrepareCEntryArgs(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003615 PrepareCEntryFunction(
3616 ExternalReference(Runtime::kHandleDebuggerStatement, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003617 CEntryStub ces(isolate(), 1);
3618 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003619 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Steve Block44f0eee2011-05-26 01:26:41 +01003620}
3621
Steve Block6ded16b2010-05-10 14:33:55 +01003622
Andrei Popescu31002712010-02-23 13:46:05 +00003623// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00003624// Exception handling.
Andrei Popescu31002712010-02-23 13:46:05 +00003625
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003626void MacroAssembler::PushStackHandler() {
Steve Block6ded16b2010-05-10 14:33:55 +01003627 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003628 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003629 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003630
3631 // Link the current handler as the next handler.
3632 li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
3633 lw(t1, MemOperand(t2));
3634 push(t1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003635
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003636 // Set this new handler as the current one.
3637 sw(sp, MemOperand(t2));
Andrei Popescu31002712010-02-23 13:46:05 +00003638}
3639
3640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003641void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003642 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003643 pop(a1);
3644 Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
Ben Murdoch589d6972011-11-30 16:04:58 +00003645 li(at, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01003646 sw(a1, MemOperand(at));
Andrei Popescu31002712010-02-23 13:46:05 +00003647}
3648
3649
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003650void MacroAssembler::Allocate(int object_size,
3651 Register result,
3652 Register scratch1,
3653 Register scratch2,
3654 Label* gc_required,
3655 AllocationFlags flags) {
3656 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003657 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003658 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003659 // Trash the registers to simulate an allocation failure.
3660 li(result, 0x7091);
3661 li(scratch1, 0x7191);
3662 li(scratch2, 0x7291);
3663 }
3664 jmp(gc_required);
3665 return;
Steve Block6ded16b2010-05-10 14:33:55 +01003666 }
3667
Ben Murdoch097c5b22016-05-18 11:27:45 +01003668 DCHECK(!AreAliased(result, scratch1, scratch2, t9, at));
Steve Block6ded16b2010-05-10 14:33:55 +01003669
Steve Block44f0eee2011-05-26 01:26:41 +01003670 // Make object size into bytes.
3671 if ((flags & SIZE_IN_WORDS) != 0) {
3672 object_size *= kPointerSize;
3673 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003674 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +01003675
Steve Block44f0eee2011-05-26 01:26:41 +01003676 // Check relative positions of allocation top and limit addresses.
3677 // ARM adds additional checks to make sure the ldm instruction can be
3678 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003679 ExternalReference allocation_top =
3680 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3681 ExternalReference allocation_limit =
3682 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
3683
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003684 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
3685 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003686 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003687
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003688 // Set up allocation top address and allocation limit registers.
3689 Register top_address = scratch1;
Steve Block44f0eee2011-05-26 01:26:41 +01003690 // This code stores a temporary value in t9.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003691 Register alloc_limit = t9;
3692 Register result_end = scratch2;
3693 li(top_address, Operand(allocation_top));
3694
Steve Block44f0eee2011-05-26 01:26:41 +01003695 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003696 // Load allocation top into result and allocation limit into alloc_limit.
3697 lw(result, MemOperand(top_address));
3698 lw(alloc_limit, MemOperand(top_address, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003699 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003700 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003701 // Assert that result actually contains top on entry.
3702 lw(alloc_limit, MemOperand(top_address));
3703 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01003704 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003705 // Load allocation limit. Result already contains allocation top.
3706 lw(alloc_limit, MemOperand(top_address, limit - top));
Steve Block44f0eee2011-05-26 01:26:41 +01003707 }
3708
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003709 if ((flags & DOUBLE_ALIGNMENT) != 0) {
3710 // Align the next allocation. Storing the filler map without checking top is
3711 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003712 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003713 And(result_end, result, Operand(kDoubleAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003714 Label aligned;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003715 Branch(&aligned, eq, result_end, Operand(zero_reg));
3716 if ((flags & PRETENURE) != 0) {
3717 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003718 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003719 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
3720 sw(result_end, MemOperand(result));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003721 Addu(result, result, Operand(kDoubleSize / 2));
3722 bind(&aligned);
3723 }
3724
Steve Block44f0eee2011-05-26 01:26:41 +01003725 // Calculate new top and bail out if new space is exhausted. Use result
3726 // to calculate the new top.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003727 Addu(result_end, result, Operand(object_size));
3728 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit));
3729 sw(result_end, MemOperand(top_address));
Steve Block44f0eee2011-05-26 01:26:41 +01003730
3731 // Tag object if requested.
3732 if ((flags & TAG_OBJECT) != 0) {
3733 Addu(result, result, Operand(kHeapObjectTag));
3734 }
Steve Block6ded16b2010-05-10 14:33:55 +01003735}
3736
3737
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003738void MacroAssembler::Allocate(Register object_size, Register result,
3739 Register result_end, Register scratch,
3740 Label* gc_required, AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01003741 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003742 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003743 // Trash the registers to simulate an allocation failure.
3744 li(result, 0x7091);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003745 li(scratch, 0x7191);
3746 li(result_end, 0x7291);
Steve Block44f0eee2011-05-26 01:26:41 +01003747 }
3748 jmp(gc_required);
3749 return;
3750 }
3751
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003752 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
3753 // is not specified. Other registers must not overlap.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003754 DCHECK(!AreAliased(object_size, result, scratch, t9, at));
3755 DCHECK(!AreAliased(result_end, result, scratch, t9, at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003756 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Block44f0eee2011-05-26 01:26:41 +01003757
3758 // Check relative positions of allocation top and limit addresses.
3759 // ARM adds additional checks to make sure the ldm instruction can be
3760 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003761 ExternalReference allocation_top =
3762 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3763 ExternalReference allocation_limit =
3764 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003765 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
3766 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003767 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003768
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003769 // Set up allocation top address and allocation limit registers.
3770 Register top_address = scratch;
Steve Block44f0eee2011-05-26 01:26:41 +01003771 // This code stores a temporary value in t9.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003772 Register alloc_limit = t9;
3773 li(top_address, Operand(allocation_top));
3774
Steve Block44f0eee2011-05-26 01:26:41 +01003775 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003776 // Load allocation top into result and allocation limit into alloc_limit.
3777 lw(result, MemOperand(top_address));
3778 lw(alloc_limit, MemOperand(top_address, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003779 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003780 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003781 // Assert that result actually contains top on entry.
3782 lw(alloc_limit, MemOperand(top_address));
3783 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01003784 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003785 // Load allocation limit. Result already contains allocation top.
3786 lw(alloc_limit, MemOperand(top_address, limit - top));
Steve Block44f0eee2011-05-26 01:26:41 +01003787 }
3788
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003789 if ((flags & DOUBLE_ALIGNMENT) != 0) {
3790 // Align the next allocation. Storing the filler map without checking top is
3791 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003792 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003793 And(result_end, result, Operand(kDoubleAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003794 Label aligned;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003795 Branch(&aligned, eq, result_end, Operand(zero_reg));
3796 if ((flags & PRETENURE) != 0) {
3797 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003798 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003799 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
3800 sw(result_end, MemOperand(result));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003801 Addu(result, result, Operand(kDoubleSize / 2));
3802 bind(&aligned);
3803 }
3804
Steve Block44f0eee2011-05-26 01:26:41 +01003805 // Calculate new top and bail out if new space is exhausted. Use result
3806 // to calculate the new top. Object size may be in words so a shift is
3807 // required to get the number of bytes.
3808 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003809 Lsa(result_end, result, object_size, kPointerSizeLog2);
Steve Block44f0eee2011-05-26 01:26:41 +01003810 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003811 Addu(result_end, result, Operand(object_size));
Steve Block44f0eee2011-05-26 01:26:41 +01003812 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003813 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01003814
3815 // Update allocation top. result temporarily holds the new top.
Ben Murdoch257744e2011-11-30 15:57:28 +00003816 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003817 And(alloc_limit, result_end, Operand(kObjectAlignmentMask));
3818 Check(eq, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01003819 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003820 sw(result_end, MemOperand(top_address));
Steve Block44f0eee2011-05-26 01:26:41 +01003821
3822 // Tag object if requested.
3823 if ((flags & TAG_OBJECT) != 0) {
3824 Addu(result, result, Operand(kHeapObjectTag));
3825 }
3826}
3827
3828
Steve Block44f0eee2011-05-26 01:26:41 +01003829void MacroAssembler::AllocateTwoByteString(Register result,
3830 Register length,
3831 Register scratch1,
3832 Register scratch2,
3833 Register scratch3,
3834 Label* gc_required) {
3835 // Calculate the number of bytes needed for the characters in the string while
3836 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003837 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003838 sll(scratch1, length, 1); // Length in bytes, not chars.
3839 addiu(scratch1, scratch1,
3840 kObjectAlignmentMask + SeqTwoByteString::kHeaderSize);
3841 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
3842
3843 // Allocate two-byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003844 Allocate(scratch1,
3845 result,
3846 scratch2,
3847 scratch3,
3848 gc_required,
3849 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003850
3851 // Set the map, length and hash field.
3852 InitializeNewString(result,
3853 length,
3854 Heap::kStringMapRootIndex,
3855 scratch1,
3856 scratch2);
3857}
3858
3859
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003860void MacroAssembler::AllocateOneByteString(Register result, Register length,
3861 Register scratch1, Register scratch2,
3862 Register scratch3,
3863 Label* gc_required) {
Steve Block44f0eee2011-05-26 01:26:41 +01003864 // Calculate the number of bytes needed for the characters in the string
3865 // while observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003866 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
3867 DCHECK(kCharSize == 1);
3868 addiu(scratch1, length, kObjectAlignmentMask + SeqOneByteString::kHeaderSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003869 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
3870
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003871 // Allocate one-byte string in new space.
3872 Allocate(scratch1,
3873 result,
3874 scratch2,
3875 scratch3,
3876 gc_required,
3877 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003878
3879 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003880 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
3881 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01003882}
3883
3884
3885void MacroAssembler::AllocateTwoByteConsString(Register result,
3886 Register length,
3887 Register scratch1,
3888 Register scratch2,
3889 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003890 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
3891 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003892 InitializeNewString(result,
3893 length,
3894 Heap::kConsStringMapRootIndex,
3895 scratch1,
3896 scratch2);
3897}
3898
3899
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003900void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
3901 Register scratch1,
3902 Register scratch2,
3903 Label* gc_required) {
3904 Allocate(ConsString::kSize,
3905 result,
3906 scratch1,
3907 scratch2,
3908 gc_required,
3909 TAG_OBJECT);
3910
3911 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
3912 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01003913}
3914
3915
Ben Murdoch589d6972011-11-30 16:04:58 +00003916void MacroAssembler::AllocateTwoByteSlicedString(Register result,
3917 Register length,
3918 Register scratch1,
3919 Register scratch2,
3920 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003921 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3922 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00003923
3924 InitializeNewString(result,
3925 length,
3926 Heap::kSlicedStringMapRootIndex,
3927 scratch1,
3928 scratch2);
3929}
3930
3931
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003932void MacroAssembler::AllocateOneByteSlicedString(Register result,
3933 Register length,
3934 Register scratch1,
3935 Register scratch2,
3936 Label* gc_required) {
3937 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3938 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00003939
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003940 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
3941 scratch1, scratch2);
3942}
3943
3944
3945void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3946 Label* not_unique_name) {
3947 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3948 Label succeed;
3949 And(at, reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3950 Branch(&succeed, eq, at, Operand(zero_reg));
3951 Branch(not_unique_name, ne, reg, Operand(SYMBOL_TYPE));
3952
3953 bind(&succeed);
Ben Murdoch589d6972011-11-30 16:04:58 +00003954}
3955
3956
Steve Block44f0eee2011-05-26 01:26:41 +01003957// Allocates a heap number or jumps to the label if the young space is full and
3958// a scavenge is needed.
3959void MacroAssembler::AllocateHeapNumber(Register result,
3960 Register scratch1,
3961 Register scratch2,
3962 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003963 Label* need_gc,
3964 TaggingMode tagging_mode,
3965 MutableMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003966 // Allocate an object in the heap for the heap number and tag it as a heap
3967 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003968 Allocate(HeapNumber::kSize, result, scratch1, scratch2, need_gc,
3969 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
3970
3971 Heap::RootListIndex map_index = mode == MUTABLE
3972 ? Heap::kMutableHeapNumberMapRootIndex
3973 : Heap::kHeapNumberMapRootIndex;
3974 AssertIsRoot(heap_number_map, map_index);
Steve Block44f0eee2011-05-26 01:26:41 +01003975
3976 // Store heap number map in the allocated object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003977 if (tagging_mode == TAG_RESULT) {
3978 sw(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
3979 } else {
3980 sw(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
3981 }
Steve Block44f0eee2011-05-26 01:26:41 +01003982}
3983
3984
3985void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3986 FPURegister value,
3987 Register scratch1,
3988 Register scratch2,
3989 Label* gc_required) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003990 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
3991 AllocateHeapNumber(result, scratch1, scratch2, t8, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003992 sdc1(value, FieldMemOperand(result, HeapNumber::kValueOffset));
3993}
3994
3995
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003996void MacroAssembler::AllocateJSValue(Register result, Register constructor,
3997 Register value, Register scratch1,
3998 Register scratch2, Label* gc_required) {
3999 DCHECK(!result.is(constructor));
4000 DCHECK(!result.is(scratch1));
4001 DCHECK(!result.is(scratch2));
4002 DCHECK(!result.is(value));
Steve Block44f0eee2011-05-26 01:26:41 +01004003
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004004 // Allocate JSValue in new space.
4005 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required, TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01004006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004007 // Initialize the JSValue.
4008 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
4009 sw(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
4010 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
4011 sw(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
4012 sw(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
4013 sw(value, FieldMemOperand(result, JSValue::kValueOffset));
4014 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01004015}
4016
4017
Ben Murdoch257744e2011-11-30 15:57:28 +00004018void MacroAssembler::CopyBytes(Register src,
4019 Register dst,
4020 Register length,
4021 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004022 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoch257744e2011-11-30 15:57:28 +00004023
4024 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004025 Branch(&byte_loop, le, length, Operand(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00004026 bind(&align_loop_1);
4027 And(scratch, src, kPointerSize - 1);
4028 Branch(&word_loop, eq, scratch, Operand(zero_reg));
4029 lbu(scratch, MemOperand(src));
4030 Addu(src, src, 1);
4031 sb(scratch, MemOperand(dst));
4032 Addu(dst, dst, 1);
4033 Subu(length, length, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004034 Branch(&align_loop_1, ne, length, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00004035
4036 // Copy bytes in word size chunks.
4037 bind(&word_loop);
4038 if (emit_debug_code()) {
4039 And(scratch, src, kPointerSize - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004040 Assert(eq, kExpectingAlignmentForCopyBytes,
Ben Murdoch257744e2011-11-30 15:57:28 +00004041 scratch, Operand(zero_reg));
4042 }
4043 Branch(&byte_loop, lt, length, Operand(kPointerSize));
4044 lw(scratch, MemOperand(src));
4045 Addu(src, src, kPointerSize);
4046
4047 // TODO(kalmard) check if this can be optimized to use sw in most cases.
4048 // Can't use unaligned access - copy byte by byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004049 if (kArchEndian == kLittle) {
4050 sb(scratch, MemOperand(dst, 0));
4051 srl(scratch, scratch, 8);
4052 sb(scratch, MemOperand(dst, 1));
4053 srl(scratch, scratch, 8);
4054 sb(scratch, MemOperand(dst, 2));
4055 srl(scratch, scratch, 8);
4056 sb(scratch, MemOperand(dst, 3));
4057 } else {
4058 sb(scratch, MemOperand(dst, 3));
4059 srl(scratch, scratch, 8);
4060 sb(scratch, MemOperand(dst, 2));
4061 srl(scratch, scratch, 8);
4062 sb(scratch, MemOperand(dst, 1));
4063 srl(scratch, scratch, 8);
4064 sb(scratch, MemOperand(dst, 0));
4065 }
4066
Ben Murdoch257744e2011-11-30 15:57:28 +00004067 Addu(dst, dst, 4);
4068
4069 Subu(length, length, Operand(kPointerSize));
4070 Branch(&word_loop);
4071
4072 // Copy the last bytes if any left.
4073 bind(&byte_loop);
4074 Branch(&done, eq, length, Operand(zero_reg));
4075 bind(&byte_loop_1);
4076 lbu(scratch, MemOperand(src));
4077 Addu(src, src, 1);
4078 sb(scratch, MemOperand(dst));
4079 Addu(dst, dst, 1);
4080 Subu(length, length, Operand(1));
4081 Branch(&byte_loop_1, ne, length, Operand(zero_reg));
4082 bind(&done);
4083}
4084
4085
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004086void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
4087 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004088 Register filler) {
4089 Label loop, entry;
4090 Branch(&entry);
4091 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004092 sw(filler, MemOperand(current_address));
4093 Addu(current_address, current_address, kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004094 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004095 Branch(&loop, ult, current_address, Operand(end_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004096}
4097
4098
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004099void MacroAssembler::CheckFastElements(Register map,
4100 Register scratch,
4101 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004102 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4103 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4104 STATIC_ASSERT(FAST_ELEMENTS == 2);
4105 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004106 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004107 Branch(fail, hi, scratch,
4108 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004109}
4110
4111
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004112void MacroAssembler::CheckFastObjectElements(Register map,
4113 Register scratch,
4114 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004115 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4116 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4117 STATIC_ASSERT(FAST_ELEMENTS == 2);
4118 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004119 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
4120 Branch(fail, ls, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004121 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004122 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004123 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004124}
4125
4126
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004127void MacroAssembler::CheckFastSmiElements(Register map,
4128 Register scratch,
4129 Label* fail) {
4130 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4131 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004132 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
4133 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004134 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004135}
4136
4137
4138void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
4139 Register key_reg,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004140 Register elements_reg,
4141 Register scratch1,
4142 Register scratch2,
4143 Register scratch3,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004144 Label* fail,
4145 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004146 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1, scratch2,
4147 scratch3));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004148 Label smi_value, maybe_nan, have_double_value, is_nan, done;
4149 Register mantissa_reg = scratch2;
4150 Register exponent_reg = scratch3;
4151
4152 // Handle smi values specially.
4153 JumpIfSmi(value_reg, &smi_value);
4154
4155 // Ensure that the object is a heap number
4156 CheckMap(value_reg,
4157 scratch1,
4158 Heap::kHeapNumberMapRootIndex,
4159 fail,
4160 DONT_DO_SMI_CHECK);
4161
4162 // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000
4163 // in the exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004164 li(scratch1, Operand(kHoleNanUpper32 & HeapNumber::kExponentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004165 lw(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
4166 Branch(&maybe_nan, ge, exponent_reg, Operand(scratch1));
4167
4168 lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4169
4170 bind(&have_double_value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004171 Lsa(scratch1, elements_reg, key_reg, kDoubleSizeLog2 - kSmiTagSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004172 sw(mantissa_reg,
4173 FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset
4174 + kHoleNanLower32Offset));
4175 sw(exponent_reg,
4176 FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset
4177 + kHoleNanUpper32Offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004178 jmp(&done);
4179
4180 bind(&maybe_nan);
4181 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
4182 // it's an Infinity, and the non-NaN code path applies.
4183 Branch(&is_nan, gt, exponent_reg, Operand(scratch1));
4184 lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4185 Branch(&have_double_value, eq, mantissa_reg, Operand(zero_reg));
4186 bind(&is_nan);
4187 // Load canonical NaN for storing into the double array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004188 LoadRoot(at, Heap::kNanValueRootIndex);
4189 lw(mantissa_reg, FieldMemOperand(at, HeapNumber::kMantissaOffset));
4190 lw(exponent_reg, FieldMemOperand(at, HeapNumber::kExponentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004191 jmp(&have_double_value);
4192
4193 bind(&smi_value);
4194 Addu(scratch1, elements_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004195 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag -
4196 elements_offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004197 Lsa(scratch1, scratch1, key_reg, kDoubleSizeLog2 - kSmiTagSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004198 // scratch1 is now effective address of the double element
4199
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004200 Register untagged_value = scratch2;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004201 SmiUntag(untagged_value, value_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004202 mtc1(untagged_value, f2);
4203 cvt_d_w(f0, f2);
4204 sdc1(f0, MemOperand(scratch1, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004205 bind(&done);
4206}
4207
4208
4209void MacroAssembler::CompareMapAndBranch(Register obj,
4210 Register scratch,
4211 Handle<Map> map,
4212 Label* early_success,
4213 Condition cond,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004214 Label* branch_to) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004215 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004216 CompareMapAndBranch(scratch, map, early_success, cond, branch_to);
4217}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004218
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004219
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004220void MacroAssembler::CompareMapAndBranch(Register obj_map,
4221 Handle<Map> map,
4222 Label* early_success,
4223 Condition cond,
4224 Label* branch_to) {
4225 Branch(branch_to, cond, obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004226}
4227
4228
Steve Block44f0eee2011-05-26 01:26:41 +01004229void MacroAssembler::CheckMap(Register obj,
4230 Register scratch,
4231 Handle<Map> map,
4232 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004233 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004234 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01004235 JumpIfSmi(obj, fail);
4236 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004237 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004238 CompareMapAndBranch(obj, scratch, map, &success, ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004239 bind(&success);
Steve Block44f0eee2011-05-26 01:26:41 +01004240}
4241
4242
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004243void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
4244 Register scratch2, Handle<WeakCell> cell,
4245 Handle<Code> success,
4246 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004247 Label fail;
4248 if (smi_check_type == DO_SMI_CHECK) {
4249 JumpIfSmi(obj, &fail);
4250 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004251 lw(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
4252 GetWeakValue(scratch2, cell);
4253 Jump(success, RelocInfo::CODE_TARGET, eq, scratch1, Operand(scratch2));
Ben Murdoch257744e2011-11-30 15:57:28 +00004254 bind(&fail);
4255}
4256
4257
Steve Block44f0eee2011-05-26 01:26:41 +01004258void MacroAssembler::CheckMap(Register obj,
4259 Register scratch,
4260 Heap::RootListIndex index,
4261 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00004262 SmiCheckType smi_check_type) {
4263 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01004264 JumpIfSmi(obj, fail);
4265 }
4266 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
4267 LoadRoot(at, index);
4268 Branch(fail, ne, scratch, Operand(at));
Steve Block6ded16b2010-05-10 14:33:55 +01004269}
4270
4271
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004272void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
4273 li(value, Operand(cell));
4274 lw(value, FieldMemOperand(value, WeakCell::kValueOffset));
4275}
4276
4277
4278void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
4279 Label* miss) {
4280 GetWeakValue(value, cell);
4281 JumpIfSmi(value, miss);
4282}
4283
4284
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004285void MacroAssembler::MovFromFloatResult(DoubleRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004286 if (IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004287 if (kArchEndian == kLittle) {
4288 Move(dst, v0, v1);
4289 } else {
4290 Move(dst, v1, v0);
4291 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004292 } else {
4293 Move(dst, f0); // Reg f0 is o32 ABI FP return value.
4294 }
4295}
4296
4297
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004298void MacroAssembler::MovFromFloatParameter(DoubleRegister dst) {
4299 if (IsMipsSoftFloatABI) {
4300 if (kArchEndian == kLittle) {
4301 Move(dst, a0, a1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004302 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004303 Move(dst, a1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004304 }
4305 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004306 Move(dst, f12); // Reg f12 is o32 ABI FP first argument value.
Ben Murdoch257744e2011-11-30 15:57:28 +00004307 }
4308}
4309
4310
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004311void MacroAssembler::MovToFloatParameter(DoubleRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004312 if (!IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004313 Move(f12, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00004314 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004315 if (kArchEndian == kLittle) {
4316 Move(a0, a1, src);
4317 } else {
4318 Move(a1, a0, src);
4319 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004320 }
4321}
4322
4323
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004324void MacroAssembler::MovToFloatResult(DoubleRegister src) {
4325 if (!IsMipsSoftFloatABI) {
4326 Move(f0, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00004327 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004328 if (kArchEndian == kLittle) {
4329 Move(v0, v1, src);
4330 } else {
4331 Move(v1, v0, src);
4332 }
4333 }
4334}
4335
4336
4337void MacroAssembler::MovToFloatParameters(DoubleRegister src1,
4338 DoubleRegister src2) {
4339 if (!IsMipsSoftFloatABI) {
4340 if (src2.is(f12)) {
4341 DCHECK(!src1.is(f14));
4342 Move(f14, src2);
4343 Move(f12, src1);
4344 } else {
4345 Move(f12, src1);
4346 Move(f14, src2);
4347 }
4348 } else {
4349 if (kArchEndian == kLittle) {
4350 Move(a0, a1, src1);
4351 Move(a2, a3, src2);
4352 } else {
4353 Move(a1, a0, src1);
4354 Move(a3, a2, src2);
4355 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004356 }
4357}
4358
4359
Steve Block6ded16b2010-05-10 14:33:55 +01004360// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004361// JavaScript invokes.
Steve Block6ded16b2010-05-10 14:33:55 +01004362
Ben Murdochda12d292016-06-02 14:46:10 +01004363void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
4364 Register caller_args_count_reg,
4365 Register scratch0, Register scratch1) {
4366#if DEBUG
4367 if (callee_args_count.is_reg()) {
4368 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
4369 scratch1));
4370 } else {
4371 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
4372 }
4373#endif
4374
4375 // Calculate the end of destination area where we will put the arguments
4376 // after we drop current frame. We add kPointerSize to count the receiver
4377 // argument which is not included into formal parameters count.
4378 Register dst_reg = scratch0;
4379 Lsa(dst_reg, fp, caller_args_count_reg, kPointerSizeLog2);
4380 Addu(dst_reg, dst_reg,
4381 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
4382
4383 Register src_reg = caller_args_count_reg;
4384 // Calculate the end of source area. +kPointerSize is for the receiver.
4385 if (callee_args_count.is_reg()) {
4386 Lsa(src_reg, sp, callee_args_count.reg(), kPointerSizeLog2);
4387 Addu(src_reg, src_reg, Operand(kPointerSize));
4388 } else {
4389 Addu(src_reg, sp,
4390 Operand((callee_args_count.immediate() + 1) * kPointerSize));
4391 }
4392
4393 if (FLAG_debug_code) {
4394 Check(lo, kStackAccessBelowStackPointer, src_reg, Operand(dst_reg));
4395 }
4396
4397 // Restore caller's frame pointer and return address now as they will be
4398 // overwritten by the copying loop.
4399 lw(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
4400 lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4401
4402 // Now copy callee arguments to the caller frame going backwards to avoid
4403 // callee arguments corruption (source and destination areas could overlap).
4404
4405 // Both src_reg and dst_reg are pointing to the word after the one to copy,
4406 // so they must be pre-decremented in the loop.
4407 Register tmp_reg = scratch1;
4408 Label loop, entry;
4409 Branch(&entry);
4410 bind(&loop);
4411 Subu(src_reg, src_reg, Operand(kPointerSize));
4412 Subu(dst_reg, dst_reg, Operand(kPointerSize));
4413 lw(tmp_reg, MemOperand(src_reg));
4414 sw(tmp_reg, MemOperand(dst_reg));
4415 bind(&entry);
4416 Branch(&loop, ne, sp, Operand(src_reg));
4417
4418 // Leave current frame.
4419 mov(sp, dst_reg);
4420}
4421
Steve Block6ded16b2010-05-10 14:33:55 +01004422void MacroAssembler::InvokePrologue(const ParameterCount& expected,
4423 const ParameterCount& actual,
Steve Block6ded16b2010-05-10 14:33:55 +01004424 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004425 bool* definitely_mismatches,
Steve Block44f0eee2011-05-26 01:26:41 +01004426 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004427 const CallWrapper& call_wrapper) {
Steve Block6ded16b2010-05-10 14:33:55 +01004428 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004429 *definitely_mismatches = false;
Steve Block6ded16b2010-05-10 14:33:55 +01004430 Label regular_invoke;
4431
4432 // Check whether the expected and actual arguments count match. If not,
4433 // setup registers according to contract with ArgumentsAdaptorTrampoline:
4434 // a0: actual arguments count
4435 // a1: function (passed through to callee)
4436 // a2: expected arguments count
Steve Block6ded16b2010-05-10 14:33:55 +01004437
4438 // The code below is made a lot easier because the calling code already sets
4439 // up actual and expected registers according to the contract if values are
4440 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004441 DCHECK(actual.is_immediate() || actual.reg().is(a0));
4442 DCHECK(expected.is_immediate() || expected.reg().is(a2));
Steve Block6ded16b2010-05-10 14:33:55 +01004443
4444 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004445 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004446 li(a0, Operand(actual.immediate()));
Steve Block6ded16b2010-05-10 14:33:55 +01004447 if (expected.immediate() == actual.immediate()) {
4448 definitely_matches = true;
4449 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004450 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
4451 if (expected.immediate() == sentinel) {
4452 // Don't worry about adapting arguments for builtins that
4453 // don't want that done. Skip adaption code by making it look
4454 // like we have a match between expected and actual number of
4455 // arguments.
4456 definitely_matches = true;
4457 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004458 *definitely_mismatches = true;
Steve Block6ded16b2010-05-10 14:33:55 +01004459 li(a2, Operand(expected.immediate()));
4460 }
4461 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004462 } else if (actual.is_immediate()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004463 li(a0, Operand(actual.immediate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004464 Branch(&regular_invoke, eq, expected.reg(), Operand(a0));
Steve Block6ded16b2010-05-10 14:33:55 +01004465 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004466 Branch(&regular_invoke, eq, expected.reg(), Operand(actual.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01004467 }
4468
4469 if (!definitely_matches) {
Steve Block44f0eee2011-05-26 01:26:41 +01004470 Handle<Code> adaptor =
4471 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Block6ded16b2010-05-10 14:33:55 +01004472 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004473 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004474 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00004475 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004476 if (!*definitely_mismatches) {
4477 Branch(done);
4478 }
Steve Block6ded16b2010-05-10 14:33:55 +01004479 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004480 Jump(adaptor, RelocInfo::CODE_TARGET);
Steve Block6ded16b2010-05-10 14:33:55 +01004481 }
4482 bind(&regular_invoke);
4483 }
4484}
4485
Steve Block44f0eee2011-05-26 01:26:41 +01004486
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004487void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
4488 const ParameterCount& expected,
4489 const ParameterCount& actual) {
4490 Label skip_flooding;
4491 ExternalReference step_in_enabled =
4492 ExternalReference::debug_step_in_enabled_address(isolate());
4493 li(t0, Operand(step_in_enabled));
4494 lb(t0, MemOperand(t0));
4495 Branch(&skip_flooding, eq, t0, Operand(zero_reg));
4496 {
4497 FrameScope frame(this,
4498 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
4499 if (expected.is_reg()) {
4500 SmiTag(expected.reg());
4501 Push(expected.reg());
4502 }
4503 if (actual.is_reg()) {
4504 SmiTag(actual.reg());
4505 Push(actual.reg());
4506 }
4507 if (new_target.is_valid()) {
4508 Push(new_target);
4509 }
4510 Push(fun);
4511 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004512 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004513 Pop(fun);
4514 if (new_target.is_valid()) {
4515 Pop(new_target);
4516 }
4517 if (actual.is_reg()) {
4518 Pop(actual.reg());
4519 SmiUntag(actual.reg());
4520 }
4521 if (expected.is_reg()) {
4522 Pop(expected.reg());
4523 SmiUntag(expected.reg());
4524 }
4525 }
4526 bind(&skip_flooding);
4527}
4528
4529
4530void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
4531 const ParameterCount& expected,
4532 const ParameterCount& actual,
4533 InvokeFlag flag,
4534 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004535 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004536 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004537 DCHECK(function.is(a1));
4538 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(a3));
4539
4540 if (call_wrapper.NeedsDebugStepCheck()) {
4541 FloodFunctionIfStepping(function, new_target, expected, actual);
4542 }
4543
4544 // Clear the new.target register if not given.
4545 if (!new_target.is_valid()) {
4546 LoadRoot(a3, Heap::kUndefinedValueRootIndex);
4547 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004548
Steve Block6ded16b2010-05-10 14:33:55 +01004549 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004550 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004551 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004552 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004553 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004554 // We call indirectly through the code field in the function to
4555 // allow recompilation to take effect without changing any of the
4556 // call sites.
4557 Register code = t0;
4558 lw(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004559 if (flag == CALL_FUNCTION) {
4560 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004561 Call(code);
4562 call_wrapper.AfterCall();
4563 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004564 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004565 Jump(code);
4566 }
4567 // Continue here if InvokePrologue does handle the invocation due to
4568 // mismatched parameter counts.
4569 bind(&done);
Steve Block6ded16b2010-05-10 14:33:55 +01004570 }
Steve Block6ded16b2010-05-10 14:33:55 +01004571}
4572
4573
Steve Block6ded16b2010-05-10 14:33:55 +01004574void MacroAssembler::InvokeFunction(Register function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004575 Register new_target,
Steve Block6ded16b2010-05-10 14:33:55 +01004576 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +01004577 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004578 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004579 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004580 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004581
Steve Block6ded16b2010-05-10 14:33:55 +01004582 // Contract with called JS functions requires that function is passed in a1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004583 DCHECK(function.is(a1));
Steve Block6ded16b2010-05-10 14:33:55 +01004584 Register expected_reg = a2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004585 Register temp_reg = t0;
Steve Block6ded16b2010-05-10 14:33:55 +01004586
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004587 lw(temp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004588 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4589 lw(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004590 FieldMemOperand(temp_reg,
4591 SharedFunctionInfo::kFormalParameterCountOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004592 sra(expected_reg, expected_reg, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01004593
4594 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004595 InvokeFunctionCode(function, new_target, expected, actual, flag,
4596 call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01004597}
4598
4599
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004600void MacroAssembler::InvokeFunction(Register function,
4601 const ParameterCount& expected,
Steve Block44f0eee2011-05-26 01:26:41 +01004602 const ParameterCount& actual,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004603 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004604 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004605 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004606 DCHECK(flag == JUMP_FUNCTION || has_frame());
4607
4608 // Contract with called JS functions requires that function is passed in a1.
4609 DCHECK(function.is(a1));
Steve Block44f0eee2011-05-26 01:26:41 +01004610
4611 // Get the function and setup the context.
Steve Block44f0eee2011-05-26 01:26:41 +01004612 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4613
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004614 InvokeFunctionCode(a1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004615}
4616
4617
4618void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
4619 const ParameterCount& expected,
4620 const ParameterCount& actual,
4621 InvokeFlag flag,
4622 const CallWrapper& call_wrapper) {
4623 li(a1, function);
4624 InvokeFunction(a1, expected, actual, flag, call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01004625}
4626
4627
Steve Block44f0eee2011-05-26 01:26:41 +01004628void MacroAssembler::IsObjectJSStringType(Register object,
4629 Register scratch,
4630 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004631 DCHECK(kNotStringTag != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004632
4633 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4634 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4635 And(scratch, scratch, Operand(kIsNotStringMask));
4636 Branch(fail, ne, scratch, Operand(zero_reg));
Steve Block6ded16b2010-05-10 14:33:55 +01004637}
4638
4639
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004640void MacroAssembler::IsObjectNameType(Register object,
4641 Register scratch,
4642 Label* fail) {
4643 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4644 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4645 Branch(fail, hi, scratch, Operand(LAST_NAME_TYPE));
4646}
4647
4648
Steve Block6ded16b2010-05-10 14:33:55 +01004649// ---------------------------------------------------------------------------
4650// Support functions.
4651
Steve Block44f0eee2011-05-26 01:26:41 +01004652
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004653void MacroAssembler::GetMapConstructor(Register result, Register map,
4654 Register temp, Register temp2) {
4655 Label done, loop;
4656 lw(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
4657 bind(&loop);
4658 JumpIfSmi(result, &done);
4659 GetObjectType(result, temp, temp2);
4660 Branch(&done, ne, temp2, Operand(MAP_TYPE));
4661 lw(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
4662 Branch(&loop);
4663 bind(&done);
4664}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004665
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004666
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004667void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
4668 Register scratch, Label* miss) {
Steve Block44f0eee2011-05-26 01:26:41 +01004669 // Get the prototype or initial map from the function.
4670 lw(result,
4671 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4672
4673 // If the prototype or initial map is the hole, don't return it and
4674 // simply miss the cache instead. This will allow us to allocate a
4675 // prototype object on-demand in the runtime system.
4676 LoadRoot(t8, Heap::kTheHoleValueRootIndex);
4677 Branch(miss, eq, result, Operand(t8));
4678
4679 // If the function does not have an initial map, we're done.
4680 Label done;
4681 GetObjectType(result, scratch, scratch);
4682 Branch(&done, ne, scratch, Operand(MAP_TYPE));
4683
4684 // Get the prototype from the initial map.
4685 lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004686
Steve Block44f0eee2011-05-26 01:26:41 +01004687 // All done.
4688 bind(&done);
4689}
Steve Block6ded16b2010-05-10 14:33:55 +01004690
4691
Steve Block44f0eee2011-05-26 01:26:41 +01004692void MacroAssembler::GetObjectType(Register object,
4693 Register map,
4694 Register type_reg) {
4695 lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
4696 lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
4697}
Steve Block6ded16b2010-05-10 14:33:55 +01004698
4699
4700// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004701// Runtime calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004702
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004703void MacroAssembler::CallStub(CodeStub* stub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004704 TypeFeedbackId ast_id,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004705 Condition cond,
4706 Register r1,
4707 const Operand& r2,
4708 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004709 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
4710 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id,
4711 cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004712}
4713
4714
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004715void MacroAssembler::TailCallStub(CodeStub* stub,
4716 Condition cond,
4717 Register r1,
4718 const Operand& r2,
4719 BranchDelaySlot bd) {
4720 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004721}
4722
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004723
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004724bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004725 return has_frame_ || !stub->SometimesSetsUpAFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +00004726}
4727
Andrei Popescu31002712010-02-23 13:46:05 +00004728
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004729void MacroAssembler::IndexFromHash(Register hash, Register index) {
Steve Block44f0eee2011-05-26 01:26:41 +01004730 // If the hash field contains an array index pick it out. The assert checks
4731 // that the constants for the maximum number of digits for an array index
4732 // cached in the hash field and the number of bits reserved for it does not
4733 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004734 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Steve Block44f0eee2011-05-26 01:26:41 +01004735 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004736 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Steve Block44f0eee2011-05-26 01:26:41 +01004737}
4738
4739
4740void MacroAssembler::ObjectToDoubleFPURegister(Register object,
4741 FPURegister result,
4742 Register scratch1,
4743 Register scratch2,
4744 Register heap_number_map,
4745 Label* not_number,
4746 ObjectToDoubleFlags flags) {
4747 Label done;
4748 if ((flags & OBJECT_NOT_SMI) == 0) {
4749 Label not_smi;
4750 JumpIfNotSmi(object, &not_smi);
4751 // Remove smi tag and convert to double.
4752 sra(scratch1, object, kSmiTagSize);
4753 mtc1(scratch1, result);
4754 cvt_d_w(result, result);
4755 Branch(&done);
4756 bind(&not_smi);
4757 }
4758 // Check for heap number and load double value from it.
4759 lw(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
4760 Branch(not_number, ne, scratch1, Operand(heap_number_map));
4761
4762 if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
4763 // If exponent is all ones the number is either a NaN or +/-Infinity.
4764 Register exponent = scratch1;
4765 Register mask_reg = scratch2;
4766 lw(exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
4767 li(mask_reg, HeapNumber::kExponentMask);
4768
4769 And(exponent, exponent, mask_reg);
4770 Branch(not_number, eq, exponent, Operand(mask_reg));
4771 }
4772 ldc1(result, FieldMemOperand(object, HeapNumber::kValueOffset));
4773 bind(&done);
4774}
4775
4776
Steve Block44f0eee2011-05-26 01:26:41 +01004777void MacroAssembler::SmiToDoubleFPURegister(Register smi,
4778 FPURegister value,
4779 Register scratch1) {
4780 sra(scratch1, smi, kSmiTagSize);
4781 mtc1(scratch1, value);
4782 cvt_d_w(value, value);
4783}
4784
4785
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004786static inline void BranchOvfHelper(MacroAssembler* masm, Register overflow_dst,
4787 Label* overflow_label,
4788 Label* no_overflow_label) {
4789 DCHECK(overflow_label || no_overflow_label);
4790 if (!overflow_label) {
4791 DCHECK(no_overflow_label);
4792 masm->Branch(no_overflow_label, ge, overflow_dst, Operand(zero_reg));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004793 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004794 masm->Branch(overflow_label, lt, overflow_dst, Operand(zero_reg));
4795 if (no_overflow_label) masm->Branch(no_overflow_label);
4796 }
4797}
4798
4799
4800void MacroAssembler::AddBranchOvf(Register dst, Register left,
4801 const Operand& right, Label* overflow_label,
4802 Label* no_overflow_label, Register scratch) {
4803 if (right.is_reg()) {
4804 AddBranchOvf(dst, left, right.rm(), overflow_label, no_overflow_label,
4805 scratch);
4806 } else {
4807 if (IsMipsArchVariant(kMips32r6)) {
4808 Register right_reg = t9;
4809 DCHECK(!left.is(right_reg));
4810 li(right_reg, Operand(right));
4811 AddBranchOvf(dst, left, right_reg, overflow_label, no_overflow_label);
4812 } else {
4813 Register overflow_dst = t9;
4814 DCHECK(!dst.is(scratch));
4815 DCHECK(!dst.is(overflow_dst));
4816 DCHECK(!scratch.is(overflow_dst));
4817 DCHECK(!left.is(overflow_dst));
4818 if (dst.is(left)) {
4819 mov(scratch, left); // Preserve left.
4820 Addu(dst, left, right.immediate()); // Left is overwritten.
4821 xor_(scratch, dst, scratch); // Original left.
4822 // Load right since xori takes uint16 as immediate.
4823 Addu(overflow_dst, zero_reg, right);
4824 xor_(overflow_dst, dst, overflow_dst);
4825 and_(overflow_dst, overflow_dst, scratch);
4826 } else {
4827 Addu(dst, left, right.immediate());
4828 xor_(overflow_dst, dst, left);
4829 // Load right since xori takes uint16 as immediate.
4830 Addu(scratch, zero_reg, right);
4831 xor_(scratch, dst, scratch);
4832 and_(overflow_dst, scratch, overflow_dst);
4833 }
4834 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
4835 }
4836 }
4837}
4838
4839
4840void MacroAssembler::AddBranchOvf(Register dst, Register left, Register right,
4841 Label* overflow_label,
4842 Label* no_overflow_label, Register scratch) {
4843 if (IsMipsArchVariant(kMips32r6)) {
4844 if (!overflow_label) {
4845 DCHECK(no_overflow_label);
4846 DCHECK(!dst.is(scratch));
4847 Register left_reg = left.is(dst) ? scratch : left;
4848 Register right_reg = right.is(dst) ? t9 : right;
4849 DCHECK(!dst.is(left_reg));
4850 DCHECK(!dst.is(right_reg));
4851 Move(left_reg, left);
4852 Move(right_reg, right);
4853 addu(dst, left, right);
4854 bnvc(left_reg, right_reg, no_overflow_label);
4855 } else {
4856 bovc(left, right, overflow_label);
4857 addu(dst, left, right);
4858 if (no_overflow_label) bc(no_overflow_label);
4859 }
4860 } else {
4861 Register overflow_dst = t9;
4862 DCHECK(!dst.is(scratch));
4863 DCHECK(!dst.is(overflow_dst));
4864 DCHECK(!scratch.is(overflow_dst));
4865 DCHECK(!left.is(overflow_dst));
4866 DCHECK(!right.is(overflow_dst));
4867 DCHECK(!left.is(scratch));
4868 DCHECK(!right.is(scratch));
4869
4870 if (left.is(right) && dst.is(left)) {
4871 mov(overflow_dst, right);
4872 right = overflow_dst;
4873 }
4874
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004875 if (dst.is(left)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004876 mov(scratch, left); // Preserve left.
4877 addu(dst, left, right); // Left is overwritten.
4878 xor_(scratch, dst, scratch); // Original left.
4879 xor_(overflow_dst, dst, right);
4880 and_(overflow_dst, overflow_dst, scratch);
4881 } else if (dst.is(right)) {
4882 mov(scratch, right); // Preserve right.
4883 addu(dst, left, right); // Right is overwritten.
4884 xor_(scratch, dst, scratch); // Original right.
4885 xor_(overflow_dst, dst, left);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004886 and_(overflow_dst, overflow_dst, scratch);
4887 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004888 addu(dst, left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004889 xor_(overflow_dst, dst, left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004890 xor_(scratch, dst, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004891 and_(overflow_dst, scratch, overflow_dst);
4892 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004893 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004894 }
4895}
4896
4897
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004898void MacroAssembler::SubBranchOvf(Register dst, Register left,
4899 const Operand& right, Label* overflow_label,
4900 Label* no_overflow_label, Register scratch) {
4901 DCHECK(overflow_label || no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004902 if (right.is_reg()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004903 SubBranchOvf(dst, left, right.rm(), overflow_label, no_overflow_label,
4904 scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004905 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004906 Register overflow_dst = t9;
4907 DCHECK(!dst.is(scratch));
4908 DCHECK(!dst.is(overflow_dst));
4909 DCHECK(!scratch.is(overflow_dst));
4910 DCHECK(!left.is(overflow_dst));
4911 DCHECK(!left.is(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004912 if (dst.is(left)) {
4913 mov(scratch, left); // Preserve left.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004914 Subu(dst, left, right.immediate()); // Left is overwritten.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004915 // Load right since xori takes uint16 as immediate.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004916 Addu(overflow_dst, zero_reg, right);
4917 xor_(overflow_dst, scratch, overflow_dst); // scratch is original left.
4918 xor_(scratch, dst, scratch); // scratch is original left.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004919 and_(overflow_dst, scratch, overflow_dst);
4920 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004921 Subu(dst, left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004922 xor_(overflow_dst, dst, left);
4923 // Load right since xori takes uint16 as immediate.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004924 Addu(scratch, zero_reg, right);
4925 xor_(scratch, left, scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004926 and_(overflow_dst, scratch, overflow_dst);
4927 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004928 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004929 }
4930}
4931
4932
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004933void MacroAssembler::SubBranchOvf(Register dst, Register left, Register right,
4934 Label* overflow_label,
4935 Label* no_overflow_label, Register scratch) {
4936 DCHECK(overflow_label || no_overflow_label);
4937 Register overflow_dst = t9;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004938 DCHECK(!dst.is(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004939 DCHECK(!dst.is(overflow_dst));
4940 DCHECK(!scratch.is(overflow_dst));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004941 DCHECK(!overflow_dst.is(left));
4942 DCHECK(!overflow_dst.is(right));
4943 DCHECK(!scratch.is(left));
4944 DCHECK(!scratch.is(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00004945
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004946 // This happens with some crankshaft code. Since Subu works fine if
4947 // left == right, let's not make that restriction here.
4948 if (left.is(right)) {
4949 mov(dst, zero_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004950 if (no_overflow_label) {
4951 Branch(no_overflow_label);
4952 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004953 }
4954
Ben Murdoch257744e2011-11-30 15:57:28 +00004955 if (dst.is(left)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004956 mov(scratch, left); // Preserve left.
4957 subu(dst, left, right); // Left is overwritten.
4958 xor_(overflow_dst, dst, scratch); // scratch is original left.
4959 xor_(scratch, scratch, right); // scratch is original left.
4960 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00004961 } else if (dst.is(right)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004962 mov(scratch, right); // Preserve right.
4963 subu(dst, left, right); // Right is overwritten.
4964 xor_(overflow_dst, dst, left);
4965 xor_(scratch, left, scratch); // Original right.
4966 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00004967 } else {
4968 subu(dst, left, right);
4969 xor_(overflow_dst, dst, left);
4970 xor_(scratch, left, right);
4971 and_(overflow_dst, scratch, overflow_dst);
4972 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004973 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Ben Murdoch257744e2011-11-30 15:57:28 +00004974}
4975
4976
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004977void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
4978 SaveFPRegsMode save_doubles,
4979 BranchDelaySlot bd) {
Steve Block6ded16b2010-05-10 14:33:55 +01004980 // All parameters are on the stack. v0 has the return value after call.
4981
4982 // If the expected number of arguments of the runtime function is
4983 // constant, we check that the actual number of arguments match the
4984 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004985 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01004986
4987 // TODO(1236192): Most runtime routines don't need the number of
4988 // arguments passed in because it is constant. At some point we
4989 // should remove this need and make the runtime routine entry code
4990 // smarter.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004991 PrepareCEntryArgs(num_arguments);
4992 PrepareCEntryFunction(ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004993 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004994 CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004995}
4996
4997
Steve Block44f0eee2011-05-26 01:26:41 +01004998void MacroAssembler::CallExternalReference(const ExternalReference& ext,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004999 int num_arguments,
5000 BranchDelaySlot bd) {
5001 PrepareCEntryArgs(num_arguments);
5002 PrepareCEntryFunction(ext);
Steve Block44f0eee2011-05-26 01:26:41 +01005003
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005004 CEntryStub stub(isolate(), 1);
5005 CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
Steve Block44f0eee2011-05-26 01:26:41 +01005006}
5007
5008
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005009void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
5010 const Runtime::Function* function = Runtime::FunctionForId(fid);
5011 DCHECK_EQ(1, function->result_size);
5012 if (function->nargs >= 0) {
5013 PrepareCEntryArgs(function->nargs);
5014 }
5015 JumpToExternalReference(ExternalReference(fid, isolate()));
Andrei Popescu31002712010-02-23 13:46:05 +00005016}
5017
5018
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005019void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
5020 BranchDelaySlot bd) {
5021 PrepareCEntryFunction(builtin);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005022 CEntryStub stub(isolate(), 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005023 Jump(stub.GetCode(),
5024 RelocInfo::CODE_TARGET,
5025 al,
5026 zero_reg,
5027 Operand(zero_reg),
5028 bd);
Andrei Popescu31002712010-02-23 13:46:05 +00005029}
5030
5031
Andrei Popescu31002712010-02-23 13:46:05 +00005032void MacroAssembler::SetCounter(StatsCounter* counter, int value,
5033 Register scratch1, Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +01005034 if (FLAG_native_code_counters && counter->Enabled()) {
5035 li(scratch1, Operand(value));
5036 li(scratch2, Operand(ExternalReference(counter)));
5037 sw(scratch1, MemOperand(scratch2));
5038 }
Andrei Popescu31002712010-02-23 13:46:05 +00005039}
5040
5041
5042void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
5043 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005044 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005045 if (FLAG_native_code_counters && counter->Enabled()) {
5046 li(scratch2, Operand(ExternalReference(counter)));
5047 lw(scratch1, MemOperand(scratch2));
5048 Addu(scratch1, scratch1, Operand(value));
5049 sw(scratch1, MemOperand(scratch2));
5050 }
Andrei Popescu31002712010-02-23 13:46:05 +00005051}
5052
5053
5054void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
5055 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005056 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005057 if (FLAG_native_code_counters && counter->Enabled()) {
5058 li(scratch2, Operand(ExternalReference(counter)));
5059 lw(scratch1, MemOperand(scratch2));
5060 Subu(scratch1, scratch1, Operand(value));
5061 sw(scratch1, MemOperand(scratch2));
5062 }
Andrei Popescu31002712010-02-23 13:46:05 +00005063}
5064
5065
Steve Block6ded16b2010-05-10 14:33:55 +01005066// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00005067// Debugging.
Andrei Popescu31002712010-02-23 13:46:05 +00005068
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005069void MacroAssembler::Assert(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00005070 Register rs, Operand rt) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005071 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005072 Check(cc, reason, rs, rt);
Steve Block44f0eee2011-05-26 01:26:41 +01005073}
5074
5075
5076void MacroAssembler::AssertFastElements(Register elements) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005077 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005078 DCHECK(!elements.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01005079 Label ok;
Ben Murdoch257744e2011-11-30 15:57:28 +00005080 push(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01005081 lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
5082 LoadRoot(at, Heap::kFixedArrayMapRootIndex);
5083 Branch(&ok, eq, elements, Operand(at));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005084 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
5085 Branch(&ok, eq, elements, Operand(at));
Steve Block44f0eee2011-05-26 01:26:41 +01005086 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
5087 Branch(&ok, eq, elements, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005088 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Steve Block44f0eee2011-05-26 01:26:41 +01005089 bind(&ok);
Ben Murdoch257744e2011-11-30 15:57:28 +00005090 pop(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01005091 }
Andrei Popescu31002712010-02-23 13:46:05 +00005092}
5093
5094
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005095void MacroAssembler::Check(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00005096 Register rs, Operand rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01005097 Label L;
5098 Branch(&L, cc, rs, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005099 Abort(reason);
Ben Murdoch257744e2011-11-30 15:57:28 +00005100 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01005101 bind(&L);
Andrei Popescu31002712010-02-23 13:46:05 +00005102}
5103
5104
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005105void MacroAssembler::Abort(BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01005106 Label abort_start;
5107 bind(&abort_start);
Steve Block44f0eee2011-05-26 01:26:41 +01005108#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005109 const char* msg = GetBailoutReason(reason);
Steve Block44f0eee2011-05-26 01:26:41 +01005110 if (msg != NULL) {
5111 RecordComment("Abort message: ");
5112 RecordComment(msg);
5113 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005114
5115 if (FLAG_trap_on_abort) {
5116 stop(msg);
5117 return;
5118 }
Steve Block44f0eee2011-05-26 01:26:41 +01005119#endif
Steve Block44f0eee2011-05-26 01:26:41 +01005120
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005121 li(a0, Operand(Smi::FromInt(reason)));
Ben Murdoch257744e2011-11-30 15:57:28 +00005122 push(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005123 // Disable stub call restrictions to always allow calls to abort.
5124 if (!has_frame_) {
5125 // We don't actually want to generate a pile of code for this, so just
5126 // claim there is a stack frame, without generating one.
5127 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005128 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005129 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005130 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005131 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005132 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01005133 if (is_trampoline_pool_blocked()) {
5134 // If the calling code cares about the exact number of
5135 // instructions generated, we insert padding here to keep the size
5136 // of the Abort macro constant.
5137 // Currently in debug mode with debug_code enabled the number of
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005138 // generated instructions is 10, so we use this as a maximum value.
5139 static const int kExpectedAbortInstructions = 10;
Steve Block44f0eee2011-05-26 01:26:41 +01005140 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005141 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block44f0eee2011-05-26 01:26:41 +01005142 while (abort_instructions++ < kExpectedAbortInstructions) {
5143 nop();
5144 }
5145 }
5146}
5147
5148
5149void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
5150 if (context_chain_length > 0) {
5151 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005152 lw(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01005153 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005154 lw(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01005155 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005156 } else {
5157 // Slot is in the current function context. Move it into the
5158 // destination register in case we store into it (the write barrier
5159 // cannot be allowed to destroy the context in esi).
5160 Move(dst, cp);
5161 }
Steve Block44f0eee2011-05-26 01:26:41 +01005162}
5163
5164
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005165void MacroAssembler::LoadTransitionedArrayMapConditional(
5166 ElementsKind expected_kind,
5167 ElementsKind transitioned_kind,
5168 Register map_in_out,
5169 Register scratch,
5170 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005171 DCHECK(IsFastElementsKind(expected_kind));
5172 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005173
5174 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005175 lw(scratch, NativeContextMemOperand());
5176 lw(at, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005177 Branch(no_map_match, ne, map_in_out, Operand(at));
5178
5179 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005180 lw(map_in_out,
5181 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005182}
5183
5184
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005185void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
5186 lw(dst, NativeContextMemOperand());
5187 lw(dst, ContextMemOperand(dst, index));
Steve Block44f0eee2011-05-26 01:26:41 +01005188}
5189
5190
5191void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
5192 Register map,
5193 Register scratch) {
5194 // Load the initial map. The global functions all have initial maps.
5195 lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00005196 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01005197 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00005198 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Steve Block44f0eee2011-05-26 01:26:41 +01005199 Branch(&ok);
5200 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005201 Abort(kGlobalFunctionsMustHaveInitialMap);
Steve Block44f0eee2011-05-26 01:26:41 +01005202 bind(&ok);
5203 }
Andrei Popescu31002712010-02-23 13:46:05 +00005204}
5205
Ben Murdochda12d292016-06-02 14:46:10 +01005206void MacroAssembler::StubPrologue(StackFrame::Type type) {
5207 li(at, Operand(Smi::FromInt(type)));
5208 PushCommonFrame(at);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005209}
5210
5211
5212void MacroAssembler::Prologue(bool code_pre_aging) {
5213 PredictableCodeSizeScope predictible_code_size_scope(
5214 this, kNoCodeAgeSequenceLength);
5215 // The following three instructions must remain together and unmodified
5216 // for code aging to work properly.
5217 if (code_pre_aging) {
5218 // Pre-age the code.
5219 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
5220 nop(Assembler::CODE_AGE_MARKER_NOP);
5221 // Load the stub address to t9 and call it,
5222 // GetCodeAgeAndParity() extracts the stub address from this instruction.
5223 li(t9,
5224 Operand(reinterpret_cast<uint32_t>(stub->instruction_start())),
5225 CONSTANT_SIZE);
5226 nop(); // Prevent jalr to jal optimization.
5227 jalr(t9, a0);
5228 nop(); // Branch delay slot nop.
5229 nop(); // Pad the empty space.
5230 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01005231 PushStandardFrame(a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005232 nop(Assembler::CODE_AGE_SEQUENCE_NOP);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005233 }
5234}
5235
5236
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005237void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
5238 lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5239 lw(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
5240 lw(vector,
5241 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
5242}
5243
5244
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005245void MacroAssembler::EnterFrame(StackFrame::Type type,
5246 bool load_constant_pool_pointer_reg) {
5247 // Out-of-line constant pool not implemented on mips.
5248 UNREACHABLE();
5249}
5250
5251
Steve Block6ded16b2010-05-10 14:33:55 +01005252void MacroAssembler::EnterFrame(StackFrame::Type type) {
Ben Murdochda12d292016-06-02 14:46:10 +01005253 int stack_offset, fp_offset;
5254 if (type == StackFrame::INTERNAL) {
5255 stack_offset = -4 * kPointerSize;
5256 fp_offset = 2 * kPointerSize;
5257 } else {
5258 stack_offset = -3 * kPointerSize;
5259 fp_offset = 1 * kPointerSize;
5260 }
5261 addiu(sp, sp, stack_offset);
5262 stack_offset = -stack_offset - kPointerSize;
5263 sw(ra, MemOperand(sp, stack_offset));
5264 stack_offset -= kPointerSize;
5265 sw(fp, MemOperand(sp, stack_offset));
5266 stack_offset -= kPointerSize;
5267 li(t9, Operand(Smi::FromInt(type)));
5268 sw(t9, MemOperand(sp, stack_offset));
5269 if (type == StackFrame::INTERNAL) {
5270 DCHECK_EQ(stack_offset, kPointerSize);
5271 li(t9, Operand(CodeObject()));
5272 sw(t9, MemOperand(sp, 0));
5273 } else {
5274 DCHECK_EQ(stack_offset, 0);
5275 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005276 // Adjust FP to point to saved FP.
Ben Murdochda12d292016-06-02 14:46:10 +01005277 Addu(fp, sp, Operand(fp_offset));
Steve Block6ded16b2010-05-10 14:33:55 +01005278}
5279
5280
5281void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Ben Murdochda12d292016-06-02 14:46:10 +01005282 addiu(sp, fp, 2 * kPointerSize);
5283 lw(ra, MemOperand(fp, 1 * kPointerSize));
5284 lw(fp, MemOperand(fp, 0 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01005285}
5286
Ben Murdochda12d292016-06-02 14:46:10 +01005287void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005288 // Set up the frame structure on the stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00005289 STATIC_ASSERT(2 * kPointerSize == ExitFrameConstants::kCallerSPDisplacement);
5290 STATIC_ASSERT(1 * kPointerSize == ExitFrameConstants::kCallerPCOffset);
5291 STATIC_ASSERT(0 * kPointerSize == ExitFrameConstants::kCallerFPOffset);
Steve Block6ded16b2010-05-10 14:33:55 +01005292
Ben Murdoch257744e2011-11-30 15:57:28 +00005293 // This is how the stack will look:
5294 // fp + 2 (==kCallerSPDisplacement) - old stack's end
5295 // [fp + 1 (==kCallerPCOffset)] - saved old ra
5296 // [fp + 0 (==kCallerFPOffset)] - saved old fp
Ben Murdochda12d292016-06-02 14:46:10 +01005297 // [fp - 1 StackFrame::EXIT Smi
5298 // [fp - 2 (==kSPOffset)] - sp of the called function
5299 // [fp - 3 (==kCodeOffset)] - CodeObject
Ben Murdoch257744e2011-11-30 15:57:28 +00005300 // fp - (2 + stack_space + alignment) == sp == [fp - kSPOffset] - top of the
5301 // new stack (will contain saved ra)
Steve Block6ded16b2010-05-10 14:33:55 +01005302
Ben Murdochda12d292016-06-02 14:46:10 +01005303 // Save registers and reserve room for saved entry sp and code object.
5304 addiu(sp, sp, -2 * kPointerSize - ExitFrameConstants::kFixedFrameSizeFromFp);
5305 sw(ra, MemOperand(sp, 4 * kPointerSize));
5306 sw(fp, MemOperand(sp, 3 * kPointerSize));
5307 li(at, Operand(Smi::FromInt(StackFrame::EXIT)));
5308 sw(at, MemOperand(sp, 2 * kPointerSize));
5309 // Set up new frame pointer.
5310 addiu(fp, sp, ExitFrameConstants::kFixedFrameSizeFromFp);
Steve Block6ded16b2010-05-10 14:33:55 +01005311
Ben Murdoch257744e2011-11-30 15:57:28 +00005312 if (emit_debug_code()) {
5313 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset));
5314 }
5315
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005316 // Accessed from ExitFrame::code_slot.
5317 li(t8, Operand(CodeObject()), CONSTANT_SIZE);
Ben Murdoch257744e2011-11-30 15:57:28 +00005318 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005319
5320 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00005321 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005322 sw(fp, MemOperand(t8));
Ben Murdoch589d6972011-11-30 16:04:58 +00005323 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005324 sw(cp, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005325
Ben Murdoch257744e2011-11-30 15:57:28 +00005326 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01005327 if (save_doubles) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005328 // The stack must be allign to 0 modulo 8 for stores with sdc1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005329 DCHECK(kDoubleSize == frame_alignment);
Ben Murdoch257744e2011-11-30 15:57:28 +00005330 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005331 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00005332 And(sp, sp, Operand(-frame_alignment)); // Align stack.
5333 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005334 int space = FPURegister::kMaxNumRegisters * kDoubleSize;
Steve Block44f0eee2011-05-26 01:26:41 +01005335 Subu(sp, sp, Operand(space));
5336 // Remember: we only need to save every 2nd double FPU value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005337 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01005338 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00005339 sdc1(reg, MemOperand(sp, i * kDoubleSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005340 }
Steve Block44f0eee2011-05-26 01:26:41 +01005341 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005342
5343 // Reserve place for the return address, stack space and an optional slot
5344 // (used by the DirectCEntryStub to hold the return value if a struct is
5345 // returned) and align the frame preparing for calling the runtime function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005346 DCHECK(stack_space >= 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00005347 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize));
5348 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005349 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00005350 And(sp, sp, Operand(-frame_alignment)); // Align stack.
5351 }
5352
5353 // Set the exit frame sp value to point just before the return address
5354 // location.
5355 addiu(at, sp, kPointerSize);
5356 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005357}
5358
5359
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005360void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
5361 bool restore_context, bool do_return,
5362 bool argument_count_is_length) {
Steve Block44f0eee2011-05-26 01:26:41 +01005363 // Optionally restore all double registers.
5364 if (save_doubles) {
Steve Block44f0eee2011-05-26 01:26:41 +01005365 // Remember: we only need to restore every 2nd double FPU value.
Ben Murdoch257744e2011-11-30 15:57:28 +00005366 lw(t8, MemOperand(fp, ExitFrameConstants::kSPOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005367 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01005368 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005369 ldc1(reg, MemOperand(t8, i * kDoubleSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005370 }
5371 }
5372
Steve Block6ded16b2010-05-10 14:33:55 +01005373 // Clear top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00005374 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005375 sw(zero_reg, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005376
5377 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005378 if (restore_context) {
5379 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
5380 lw(cp, MemOperand(t8));
5381 }
Steve Block6ded16b2010-05-10 14:33:55 +01005382#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005383 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005384 sw(a3, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005385#endif
5386
5387 // Pop the arguments, restore registers, and return.
5388 mov(sp, fp); // Respect ABI stack constraint.
Ben Murdoch257744e2011-11-30 15:57:28 +00005389 lw(fp, MemOperand(sp, ExitFrameConstants::kCallerFPOffset));
5390 lw(ra, MemOperand(sp, ExitFrameConstants::kCallerPCOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005391
Ben Murdoch257744e2011-11-30 15:57:28 +00005392 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005393 if (argument_count_is_length) {
5394 addu(sp, sp, argument_count);
5395 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005396 Lsa(sp, sp, argument_count, kPointerSizeLog2, t8);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005397 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005398 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005399
5400 if (do_return) {
5401 Ret(USE_DELAY_SLOT);
5402 // If returning, the instruction in the delay slot will be the addiu below.
5403 }
5404 addiu(sp, sp, 8);
Steve Block6ded16b2010-05-10 14:33:55 +01005405}
5406
5407
Steve Block44f0eee2011-05-26 01:26:41 +01005408void MacroAssembler::InitializeNewString(Register string,
5409 Register length,
5410 Heap::RootListIndex map_index,
5411 Register scratch1,
5412 Register scratch2) {
5413 sll(scratch1, length, kSmiTagSize);
5414 LoadRoot(scratch2, map_index);
5415 sw(scratch1, FieldMemOperand(string, String::kLengthOffset));
5416 li(scratch1, Operand(String::kEmptyHashField));
5417 sw(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
5418 sw(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
5419}
5420
5421
5422int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005423#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005424 // Running on the real platform. Use the alignment as mandated by the local
5425 // environment.
5426 // Note: This will break if we ever start generating snapshots on one Mips
5427 // platform for another Mips platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005428 return base::OS::ActivationFrameAlignment();
5429#else // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005430 // If we are using the simulator then we should always align to the expected
5431 // alignment. As the simulator is used to generate snapshots we do not know
5432 // if the target platform will need alignment, so this is controlled from a
5433 // flag.
5434 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005435#endif // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005436}
5437
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005438
Ben Murdoch257744e2011-11-30 15:57:28 +00005439void MacroAssembler::AssertStackIsAligned() {
5440 if (emit_debug_code()) {
5441 const int frame_alignment = ActivationFrameAlignment();
5442 const int frame_alignment_mask = frame_alignment - 1;
Steve Block44f0eee2011-05-26 01:26:41 +01005443
Ben Murdoch257744e2011-11-30 15:57:28 +00005444 if (frame_alignment > kPointerSize) {
5445 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005446 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00005447 andi(at, sp, frame_alignment_mask);
5448 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
5449 // Don't use Check here, as it will call Runtime_Abort re-entering here.
5450 stop("Unexpected stack alignment");
5451 bind(&alignment_as_expected);
5452 }
Steve Block6ded16b2010-05-10 14:33:55 +01005453 }
Steve Block6ded16b2010-05-10 14:33:55 +01005454}
5455
Steve Block44f0eee2011-05-26 01:26:41 +01005456
Steve Block44f0eee2011-05-26 01:26:41 +01005457void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
5458 Register reg,
5459 Register scratch,
5460 Label* not_power_of_two_or_zero) {
5461 Subu(scratch, reg, Operand(1));
5462 Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt,
5463 scratch, Operand(zero_reg));
5464 and_(at, scratch, reg); // In the delay slot.
5465 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg));
5466}
5467
5468
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005469void MacroAssembler::SmiTagCheckOverflow(Register reg, Register overflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005470 DCHECK(!reg.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005471 mov(overflow, reg); // Save original value.
5472 SmiTag(reg);
5473 xor_(overflow, overflow, reg); // Overflow if (value ^ 2 * value) < 0.
5474}
5475
5476
5477void MacroAssembler::SmiTagCheckOverflow(Register dst,
5478 Register src,
5479 Register overflow) {
5480 if (dst.is(src)) {
5481 // Fall back to slower case.
5482 SmiTagCheckOverflow(dst, overflow);
5483 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005484 DCHECK(!dst.is(src));
5485 DCHECK(!dst.is(overflow));
5486 DCHECK(!src.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005487 SmiTag(dst, src);
5488 xor_(overflow, dst, src); // Overflow if (value ^ 2 * value) < 0.
5489 }
5490}
5491
5492
5493void MacroAssembler::UntagAndJumpIfSmi(Register dst,
5494 Register src,
5495 Label* smi_case) {
5496 JumpIfSmi(src, smi_case, at, USE_DELAY_SLOT);
5497 SmiUntag(dst, src);
5498}
5499
5500
5501void MacroAssembler::UntagAndJumpIfNotSmi(Register dst,
5502 Register src,
5503 Label* non_smi_case) {
5504 JumpIfNotSmi(src, non_smi_case, at, USE_DELAY_SLOT);
5505 SmiUntag(dst, src);
5506}
5507
5508void MacroAssembler::JumpIfSmi(Register value,
5509 Label* smi_label,
5510 Register scratch,
5511 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005512 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005513 andi(scratch, value, kSmiTagMask);
5514 Branch(bd, smi_label, eq, scratch, Operand(zero_reg));
5515}
5516
5517void MacroAssembler::JumpIfNotSmi(Register value,
5518 Label* not_smi_label,
5519 Register scratch,
5520 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005521 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005522 andi(scratch, value, kSmiTagMask);
5523 Branch(bd, not_smi_label, ne, scratch, Operand(zero_reg));
5524}
5525
5526
Steve Block44f0eee2011-05-26 01:26:41 +01005527void MacroAssembler::JumpIfNotBothSmi(Register reg1,
5528 Register reg2,
5529 Label* on_not_both_smi) {
5530 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005531 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01005532 or_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005533 JumpIfNotSmi(at, on_not_both_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01005534}
5535
5536
5537void MacroAssembler::JumpIfEitherSmi(Register reg1,
5538 Register reg2,
5539 Label* on_either_smi) {
5540 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005541 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01005542 // Both Smi tags must be 1 (not Smi).
5543 and_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005544 JumpIfSmi(at, on_either_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01005545}
5546
Ben Murdochda12d292016-06-02 14:46:10 +01005547void MacroAssembler::AssertNotNumber(Register object) {
5548 if (emit_debug_code()) {
5549 STATIC_ASSERT(kSmiTag == 0);
5550 andi(at, object, kSmiTagMask);
5551 Check(ne, kOperandIsANumber, at, Operand(zero_reg));
5552 GetObjectType(object, t8, t8);
5553 Check(ne, kOperandIsNotANumber, t8, Operand(HEAP_NUMBER_TYPE));
5554 }
5555}
Steve Block44f0eee2011-05-26 01:26:41 +01005556
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005557void MacroAssembler::AssertNotSmi(Register object) {
5558 if (emit_debug_code()) {
5559 STATIC_ASSERT(kSmiTag == 0);
5560 andi(at, object, kSmiTagMask);
5561 Check(ne, kOperandIsASmi, at, Operand(zero_reg));
5562 }
Steve Block44f0eee2011-05-26 01:26:41 +01005563}
5564
5565
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005566void MacroAssembler::AssertSmi(Register object) {
5567 if (emit_debug_code()) {
5568 STATIC_ASSERT(kSmiTag == 0);
5569 andi(at, object, kSmiTagMask);
5570 Check(eq, kOperandIsASmi, at, Operand(zero_reg));
5571 }
Steve Block44f0eee2011-05-26 01:26:41 +01005572}
5573
5574
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005575void MacroAssembler::AssertString(Register object) {
5576 if (emit_debug_code()) {
5577 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005578 SmiTst(object, t8);
5579 Check(ne, kOperandIsASmiAndNotAString, t8, Operand(zero_reg));
5580 GetObjectType(object, t8, t8);
5581 Check(lo, kOperandIsNotAString, t8, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005582 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005583}
5584
5585
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005586void MacroAssembler::AssertName(Register object) {
5587 if (emit_debug_code()) {
5588 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005589 SmiTst(object, t8);
5590 Check(ne, kOperandIsASmiAndNotAName, t8, Operand(zero_reg));
5591 GetObjectType(object, t8, t8);
5592 Check(le, kOperandIsNotAName, t8, Operand(LAST_NAME_TYPE));
5593 }
5594}
5595
5596
5597void MacroAssembler::AssertFunction(Register object) {
5598 if (emit_debug_code()) {
5599 STATIC_ASSERT(kSmiTag == 0);
5600 SmiTst(object, t8);
5601 Check(ne, kOperandIsASmiAndNotAFunction, t8, Operand(zero_reg));
5602 GetObjectType(object, t8, t8);
5603 Check(eq, kOperandIsNotAFunction, t8, Operand(JS_FUNCTION_TYPE));
5604 }
5605}
5606
5607
5608void MacroAssembler::AssertBoundFunction(Register object) {
5609 if (emit_debug_code()) {
5610 STATIC_ASSERT(kSmiTag == 0);
5611 SmiTst(object, t8);
5612 Check(ne, kOperandIsASmiAndNotABoundFunction, t8, Operand(zero_reg));
5613 GetObjectType(object, t8, t8);
5614 Check(eq, kOperandIsNotABoundFunction, t8, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005615 }
5616}
5617
5618
Ben Murdoch097c5b22016-05-18 11:27:45 +01005619void MacroAssembler::AssertReceiver(Register object) {
5620 if (emit_debug_code()) {
5621 STATIC_ASSERT(kSmiTag == 0);
5622 SmiTst(object, t8);
5623 Check(ne, kOperandIsASmiAndNotAReceiver, t8, Operand(zero_reg));
5624 GetObjectType(object, t8, t8);
5625 Check(ge, kOperandIsNotAReceiver, t8, Operand(FIRST_JS_RECEIVER_TYPE));
5626 }
5627}
5628
5629
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005630void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
5631 Register scratch) {
5632 if (emit_debug_code()) {
5633 Label done_checking;
5634 AssertNotSmi(object);
5635 LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5636 Branch(&done_checking, eq, object, Operand(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005637 lw(t8, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005638 LoadRoot(scratch, Heap::kAllocationSiteMapRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005639 Assert(eq, kExpectedUndefinedOrCell, t8, Operand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005640 bind(&done_checking);
5641 }
5642}
5643
5644
5645void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
5646 if (emit_debug_code()) {
5647 DCHECK(!reg.is(at));
5648 LoadRoot(at, index);
5649 Check(eq, kHeapNumberMapRegisterClobbered, reg, Operand(at));
5650 }
Steve Block44f0eee2011-05-26 01:26:41 +01005651}
5652
5653
5654void MacroAssembler::JumpIfNotHeapNumber(Register object,
5655 Register heap_number_map,
5656 Register scratch,
5657 Label* on_not_heap_number) {
5658 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005659 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01005660 Branch(on_not_heap_number, ne, scratch, Operand(heap_number_map));
5661}
5662
5663
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005664void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
5665 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01005666 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005667 // Test that both first and second are sequential one-byte strings.
Steve Block44f0eee2011-05-26 01:26:41 +01005668 // Assume that they are non-smis.
5669 lw(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
5670 lw(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
5671 lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
5672 lbu(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
5673
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005674 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
5675 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01005676}
5677
5678
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005679void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
5680 Register second,
5681 Register scratch1,
5682 Register scratch2,
5683 Label* failure) {
Steve Block44f0eee2011-05-26 01:26:41 +01005684 // Check that neither is a smi.
5685 STATIC_ASSERT(kSmiTag == 0);
5686 And(scratch1, first, Operand(second));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005687 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005688 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
5689 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01005690}
5691
5692
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005693void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
5694 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01005695 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005696 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01005697 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005698 const int kFlatOneByteStringTag =
5699 kStringTag | kOneByteStringTag | kSeqStringTag;
5700 DCHECK(kFlatOneByteStringTag <= 0xffff); // Ensure this fits 16-bit immed.
5701 andi(scratch1, first, kFlatOneByteStringMask);
5702 Branch(failure, ne, scratch1, Operand(kFlatOneByteStringTag));
5703 andi(scratch2, second, kFlatOneByteStringMask);
5704 Branch(failure, ne, scratch2, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01005705}
5706
5707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005708void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
5709 Register scratch,
5710 Label* failure) {
5711 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01005712 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005713 const int kFlatOneByteStringTag =
5714 kStringTag | kOneByteStringTag | kSeqStringTag;
5715 And(scratch, type, Operand(kFlatOneByteStringMask));
5716 Branch(failure, ne, scratch, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01005717}
5718
5719
5720static const int kRegisterPassedArguments = 4;
5721
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005722int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
5723 int num_double_arguments) {
5724 int stack_passed_words = 0;
5725 num_reg_arguments += 2 * num_double_arguments;
5726
5727 // Up to four simple arguments are passed in registers a0..a3.
5728 if (num_reg_arguments > kRegisterPassedArguments) {
5729 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
5730 }
5731 stack_passed_words += kCArgSlotCount;
5732 return stack_passed_words;
5733}
5734
5735
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005736void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
5737 Register index,
5738 Register value,
5739 Register scratch,
5740 uint32_t encoding_mask) {
5741 Label is_object;
5742 SmiTst(string, at);
5743 Check(ne, kNonObject, at, Operand(zero_reg));
5744
5745 lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
5746 lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
5747
5748 andi(at, at, kStringRepresentationMask | kStringEncodingMask);
5749 li(scratch, Operand(encoding_mask));
5750 Check(eq, kUnexpectedStringType, at, Operand(scratch));
5751
5752 // The index is assumed to be untagged coming in, tag it to compare with the
5753 // string length without using a temp register, it is restored at the end of
5754 // this function.
5755 Label index_tag_ok, index_tag_bad;
5756 TrySmiTag(index, scratch, &index_tag_bad);
5757 Branch(&index_tag_ok);
5758 bind(&index_tag_bad);
5759 Abort(kIndexIsTooLarge);
5760 bind(&index_tag_ok);
5761
5762 lw(at, FieldMemOperand(string, String::kLengthOffset));
5763 Check(lt, kIndexIsTooLarge, index, Operand(at));
5764
5765 DCHECK(Smi::FromInt(0) == 0);
5766 Check(ge, kIndexIsNegative, index, Operand(zero_reg));
5767
5768 SmiUntag(index, index);
5769}
5770
5771
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005772void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
5773 int num_double_arguments,
5774 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01005775 int frame_alignment = ActivationFrameAlignment();
5776
Steve Block44f0eee2011-05-26 01:26:41 +01005777 // Up to four simple arguments are passed in registers a0..a3.
5778 // Those four arguments must have reserved argument slots on the stack for
5779 // mips, even though those argument slots are not normally used.
5780 // Remaining arguments are pushed on the stack, above (higher address than)
5781 // the argument slots.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005782 int stack_passed_arguments = CalculateStackPassedWords(
5783 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005784 if (frame_alignment > kPointerSize) {
5785 // Make stack end at alignment and make room for num_arguments - 4 words
5786 // and the original value of sp.
5787 mov(scratch, sp);
5788 Subu(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005789 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01005790 And(sp, sp, Operand(-frame_alignment));
5791 sw(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
5792 } else {
5793 Subu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
5794 }
5795}
5796
5797
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005798void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
5799 Register scratch) {
5800 PrepareCallCFunction(num_reg_arguments, 0, scratch);
5801}
5802
5803
Steve Block44f0eee2011-05-26 01:26:41 +01005804void MacroAssembler::CallCFunction(ExternalReference function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005805 int num_reg_arguments,
5806 int num_double_arguments) {
5807 li(t8, Operand(function));
5808 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005809}
5810
5811
5812void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005813 int num_reg_arguments,
5814 int num_double_arguments) {
5815 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
5816}
5817
5818
5819void MacroAssembler::CallCFunction(ExternalReference function,
Steve Block44f0eee2011-05-26 01:26:41 +01005820 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005821 CallCFunction(function, num_arguments, 0);
5822}
5823
5824
5825void MacroAssembler::CallCFunction(Register function,
5826 int num_arguments) {
5827 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005828}
5829
5830
5831void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005832 int num_reg_arguments,
5833 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005834 DCHECK(has_frame());
Steve Block44f0eee2011-05-26 01:26:41 +01005835 // Make sure that the stack is aligned before calling a C function unless
5836 // running in the simulator. The simulator has its own alignment check which
5837 // provides more information.
5838 // The argument stots are presumed to have been set up by
5839 // PrepareCallCFunction. The C function must be called via t9, for mips ABI.
5840
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005841#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005842 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005843 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01005844 int frame_alignment_mask = frame_alignment - 1;
5845 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005846 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01005847 Label alignment_as_expected;
5848 And(at, sp, Operand(frame_alignment_mask));
5849 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
5850 // Don't use Check here, as it will call Runtime_Abort possibly
5851 // re-entering here.
5852 stop("Unexpected alignment in CallCFunction");
5853 bind(&alignment_as_expected);
5854 }
5855 }
5856#endif // V8_HOST_ARCH_MIPS
5857
5858 // Just call directly. The function called cannot cause a GC, or
5859 // allow preemption, so the return address in the link register
5860 // stays correct.
Steve Block44f0eee2011-05-26 01:26:41 +01005861
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005862 if (!function.is(t9)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005863 mov(t9, function);
Steve Block44f0eee2011-05-26 01:26:41 +01005864 function = t9;
5865 }
5866
5867 Call(function);
5868
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005869 int stack_passed_arguments = CalculateStackPassedWords(
5870 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005871
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005872 if (base::OS::ActivationFrameAlignment() > kPointerSize) {
Steve Block44f0eee2011-05-26 01:26:41 +01005873 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
5874 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005875 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005876 }
5877}
5878
5879
5880#undef BRANCH_ARGS_CHECK
5881
5882
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005883void MacroAssembler::CheckPageFlag(
5884 Register object,
5885 Register scratch,
5886 int mask,
5887 Condition cc,
5888 Label* condition_met) {
5889 And(scratch, object, Operand(~Page::kPageAlignmentMask));
5890 lw(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
5891 And(scratch, scratch, Operand(mask));
5892 Branch(condition_met, cc, scratch, Operand(zero_reg));
5893}
5894
5895
5896void MacroAssembler::JumpIfBlack(Register object,
5897 Register scratch0,
5898 Register scratch1,
5899 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005900 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
5901 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005902}
5903
5904
5905void MacroAssembler::HasColor(Register object,
5906 Register bitmap_scratch,
5907 Register mask_scratch,
5908 Label* has_color,
5909 int first_bit,
5910 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005911 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t8));
5912 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005913
5914 GetMarkBits(object, bitmap_scratch, mask_scratch);
5915
5916 Label other_color, word_boundary;
5917 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5918 And(t8, t9, Operand(mask_scratch));
5919 Branch(&other_color, first_bit == 1 ? eq : ne, t8, Operand(zero_reg));
5920 // Shift left 1 by adding.
5921 Addu(mask_scratch, mask_scratch, Operand(mask_scratch));
5922 Branch(&word_boundary, eq, mask_scratch, Operand(zero_reg));
5923 And(t8, t9, Operand(mask_scratch));
5924 Branch(has_color, second_bit == 1 ? ne : eq, t8, Operand(zero_reg));
5925 jmp(&other_color);
5926
5927 bind(&word_boundary);
5928 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
5929 And(t9, t9, Operand(1));
5930 Branch(has_color, second_bit == 1 ? ne : eq, t9, Operand(zero_reg));
5931 bind(&other_color);
5932}
5933
5934
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005935void MacroAssembler::GetMarkBits(Register addr_reg,
5936 Register bitmap_reg,
5937 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005938 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005939 And(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
5940 Ext(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
5941 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
5942 Ext(t8, addr_reg, kLowBits, kPageSizeBits - kLowBits);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005943 Lsa(bitmap_reg, bitmap_reg, t8, kPointerSizeLog2, t8);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005944 li(t8, Operand(1));
5945 sllv(mask_reg, t8, mask_reg);
5946}
5947
5948
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005949void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
5950 Register mask_scratch, Register load_scratch,
5951 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005952 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005953 GetMarkBits(value, bitmap_scratch, mask_scratch);
5954
5955 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005956 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005957 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
5958 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005959 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005960
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005961 // Since both black and grey have a 1 in the first position and white does
5962 // not have a 1 there we only need to check one bit.
5963 lw(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5964 And(t8, mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005965 Branch(value_is_white, eq, t8, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005966}
5967
5968
Ben Murdoch257744e2011-11-30 15:57:28 +00005969void MacroAssembler::LoadInstanceDescriptors(Register map,
5970 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005971 lw(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
5972}
5973
5974
5975void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
5976 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
5977 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
5978}
5979
5980
5981void MacroAssembler::EnumLength(Register dst, Register map) {
5982 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
5983 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
5984 And(dst, dst, Operand(Map::EnumLengthBits::kMask));
5985 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00005986}
5987
5988
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005989void MacroAssembler::LoadAccessor(Register dst, Register holder,
5990 int accessor_index,
5991 AccessorComponent accessor) {
5992 lw(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
5993 LoadInstanceDescriptors(dst, dst);
5994 lw(dst,
5995 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
5996 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
5997 : AccessorPair::kSetterOffset;
5998 lw(dst, FieldMemOperand(dst, offset));
5999}
6000
6001
Ben Murdoch097c5b22016-05-18 11:27:45 +01006002void MacroAssembler::CheckEnumCache(Label* call_runtime) {
6003 Register null_value = t1;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006004 Register empty_fixed_array_value = t2;
6005 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006006 Label next, start;
6007 mov(a2, a0);
6008
6009 // Check if the enum length field is properly initialized, indicating that
6010 // there is an enum cache.
6011 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
6012
6013 EnumLength(a3, a1);
6014 Branch(
6015 call_runtime, eq, a3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
6016
Ben Murdoch097c5b22016-05-18 11:27:45 +01006017 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006018 jmp(&start);
6019
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006020 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006021 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006022
6023 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006024 EnumLength(a3, a1);
6025 Branch(call_runtime, ne, a3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006026
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006027 bind(&start);
6028
6029 // Check that there are no elements. Register a2 contains the current JS
6030 // object we've reached through the prototype chain.
6031 Label no_elements;
6032 lw(a2, FieldMemOperand(a2, JSObject::kElementsOffset));
6033 Branch(&no_elements, eq, a2, Operand(empty_fixed_array_value));
6034
6035 // Second chance, the object may be using the empty slow element dictionary.
6036 LoadRoot(at, Heap::kEmptySlowElementDictionaryRootIndex);
6037 Branch(call_runtime, ne, a2, Operand(at));
6038
6039 bind(&no_elements);
6040 lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
6041 Branch(&next, ne, a2, Operand(null_value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006042}
6043
6044
6045void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006046 DCHECK(!output_reg.is(input_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006047 Label done;
6048 li(output_reg, Operand(255));
6049 // Normal branch: nop in delay slot.
6050 Branch(&done, gt, input_reg, Operand(output_reg));
6051 // Use delay slot in this branch.
6052 Branch(USE_DELAY_SLOT, &done, lt, input_reg, Operand(zero_reg));
6053 mov(output_reg, zero_reg); // In delay slot.
6054 mov(output_reg, input_reg); // Value is in range 0..255.
6055 bind(&done);
6056}
6057
6058
6059void MacroAssembler::ClampDoubleToUint8(Register result_reg,
6060 DoubleRegister input_reg,
6061 DoubleRegister temp_double_reg) {
6062 Label above_zero;
6063 Label done;
6064 Label in_bounds;
6065
6066 Move(temp_double_reg, 0.0);
6067 BranchF(&above_zero, NULL, gt, input_reg, temp_double_reg);
6068
6069 // Double value is less than zero, NaN or Inf, return 0.
6070 mov(result_reg, zero_reg);
6071 Branch(&done);
6072
6073 // Double value is >= 255, return 255.
6074 bind(&above_zero);
6075 Move(temp_double_reg, 255.0);
6076 BranchF(&in_bounds, NULL, le, input_reg, temp_double_reg);
6077 li(result_reg, Operand(255));
6078 Branch(&done);
6079
6080 // In 0-255 range, round and truncate.
6081 bind(&in_bounds);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006082 cvt_w_d(temp_double_reg, input_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006083 mfc1(result_reg, temp_double_reg);
6084 bind(&done);
6085}
6086
Ben Murdochda12d292016-06-02 14:46:10 +01006087void MacroAssembler::TestJSArrayForAllocationMemento(Register receiver_reg,
6088 Register scratch_reg,
6089 Label* no_memento_found) {
6090 Label map_check;
6091 Label top_check;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006092 ExternalReference new_space_allocation_top =
6093 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01006094 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
6095 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
6096
6097 // Bail out if the object is not in new space.
6098 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
6099 // If the object is in new space, we need to check whether it is on the same
6100 // page as the current top.
6101 Addu(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
6102 Xor(scratch_reg, scratch_reg, Operand(new_space_allocation_top));
6103 And(scratch_reg, scratch_reg, Operand(~Page::kPageAlignmentMask));
6104 Branch(&top_check, eq, scratch_reg, Operand(zero_reg));
6105 // The object is on a different page than allocation top. Bail out if the
6106 // object sits on the page boundary as no memento can follow and we cannot
6107 // touch the memory following it.
6108 Addu(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
6109 Xor(scratch_reg, scratch_reg, Operand(receiver_reg));
6110 And(scratch_reg, scratch_reg, Operand(~Page::kPageAlignmentMask));
6111 Branch(no_memento_found, ne, scratch_reg, Operand(zero_reg));
6112 // Continue with the actual map check.
6113 jmp(&map_check);
6114 // If top is on the same page as the current object, we need to check whether
6115 // we are below top.
6116 bind(&top_check);
6117 Addu(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006118 li(at, Operand(new_space_allocation_top));
6119 lw(at, MemOperand(at));
6120 Branch(no_memento_found, gt, scratch_reg, Operand(at));
Ben Murdochda12d292016-06-02 14:46:10 +01006121 // Memento map check.
6122 bind(&map_check);
6123 lw(scratch_reg, MemOperand(receiver_reg, kMementoMapOffset));
6124 Branch(no_memento_found, ne, scratch_reg,
6125 Operand(isolate()->factory()->allocation_memento_map()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006126}
6127
6128
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006129Register GetRegisterThatIsNotOneOf(Register reg1,
6130 Register reg2,
6131 Register reg3,
6132 Register reg4,
6133 Register reg5,
6134 Register reg6) {
6135 RegList regs = 0;
6136 if (reg1.is_valid()) regs |= reg1.bit();
6137 if (reg2.is_valid()) regs |= reg2.bit();
6138 if (reg3.is_valid()) regs |= reg3.bit();
6139 if (reg4.is_valid()) regs |= reg4.bit();
6140 if (reg5.is_valid()) regs |= reg5.bit();
6141 if (reg6.is_valid()) regs |= reg6.bit();
6142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006143 const RegisterConfiguration* config =
6144 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
6145 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
6146 int code = config->GetAllocatableGeneralCode(i);
6147 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006148 if (regs & candidate.bit()) continue;
6149 return candidate;
6150 }
6151 UNREACHABLE();
6152 return no_reg;
6153}
6154
6155
6156void MacroAssembler::JumpIfDictionaryInPrototypeChain(
6157 Register object,
6158 Register scratch0,
6159 Register scratch1,
6160 Label* found) {
6161 DCHECK(!scratch1.is(scratch0));
6162 Factory* factory = isolate()->factory();
6163 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006164 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006165
6166 // Scratch contained elements pointer.
6167 Move(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006168 lw(current, FieldMemOperand(current, HeapObject::kMapOffset));
6169 lw(current, FieldMemOperand(current, Map::kPrototypeOffset));
6170 Branch(&end, eq, current, Operand(factory->null_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006171
6172 // Loop based on the map going up the prototype chain.
6173 bind(&loop_again);
6174 lw(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006175 lbu(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
6176 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
6177 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
6178 Branch(found, lo, scratch1, Operand(JS_OBJECT_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006179 lb(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
6180 DecodeField<Map::ElementsKindBits>(scratch1);
6181 Branch(found, eq, scratch1, Operand(DICTIONARY_ELEMENTS));
6182 lw(current, FieldMemOperand(current, Map::kPrototypeOffset));
6183 Branch(&loop_again, ne, current, Operand(factory->null_value()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006184
6185 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006186}
6187
6188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006189bool AreAliased(Register reg1, Register reg2, Register reg3, Register reg4,
6190 Register reg5, Register reg6, Register reg7, Register reg8,
6191 Register reg9, Register reg10) {
6192 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + reg3.is_valid() +
6193 reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
6194 reg7.is_valid() + reg8.is_valid() + reg9.is_valid() +
6195 reg10.is_valid();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006196
6197 RegList regs = 0;
6198 if (reg1.is_valid()) regs |= reg1.bit();
6199 if (reg2.is_valid()) regs |= reg2.bit();
6200 if (reg3.is_valid()) regs |= reg3.bit();
6201 if (reg4.is_valid()) regs |= reg4.bit();
6202 if (reg5.is_valid()) regs |= reg5.bit();
6203 if (reg6.is_valid()) regs |= reg6.bit();
6204 if (reg7.is_valid()) regs |= reg7.bit();
6205 if (reg8.is_valid()) regs |= reg8.bit();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006206 if (reg9.is_valid()) regs |= reg9.bit();
6207 if (reg10.is_valid()) regs |= reg10.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006208 int n_of_non_aliasing_regs = NumRegs(regs);
6209
6210 return n_of_valid_regs != n_of_non_aliasing_regs;
6211}
6212
6213
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006214CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006215 FlushICache flush_cache)
Steve Block44f0eee2011-05-26 01:26:41 +01006216 : address_(address),
Steve Block44f0eee2011-05-26 01:26:41 +01006217 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006218 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006219 flush_cache_(flush_cache) {
Steve Block44f0eee2011-05-26 01:26:41 +01006220 // Create a new macro assembler pointing to the address of the code to patch.
6221 // The size is adjusted with kGap on order for the assembler to generate size
6222 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006223 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01006224}
6225
6226
6227CodePatcher::~CodePatcher() {
6228 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006229 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006230 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006231 }
Steve Block44f0eee2011-05-26 01:26:41 +01006232
6233 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006234 DCHECK(masm_.pc_ == address_ + size_);
6235 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01006236}
6237
6238
Ben Murdoch257744e2011-11-30 15:57:28 +00006239void CodePatcher::Emit(Instr instr) {
6240 masm()->emit(instr);
Steve Block44f0eee2011-05-26 01:26:41 +01006241}
6242
6243
6244void CodePatcher::Emit(Address addr) {
6245 masm()->emit(reinterpret_cast<Instr>(addr));
6246}
6247
6248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006249void CodePatcher::ChangeBranchCondition(Instr current_instr,
6250 uint32_t new_opcode) {
6251 current_instr = (current_instr & ~kOpcodeMask) | new_opcode;
6252 masm_.emit(current_instr);
Ben Murdoch257744e2011-11-30 15:57:28 +00006253}
Steve Block44f0eee2011-05-26 01:26:41 +01006254
6255
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006256void MacroAssembler::TruncatingDiv(Register result,
6257 Register dividend,
6258 int32_t divisor) {
6259 DCHECK(!dividend.is(result));
6260 DCHECK(!dividend.is(at));
6261 DCHECK(!result.is(at));
6262 base::MagicNumbersForDivision<uint32_t> mag =
6263 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
6264 li(at, Operand(mag.multiplier));
6265 Mulh(result, dividend, Operand(at));
6266 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
6267 if (divisor > 0 && neg) {
6268 Addu(result, result, Operand(dividend));
6269 }
6270 if (divisor < 0 && !neg && mag.multiplier > 0) {
6271 Subu(result, result, Operand(dividend));
6272 }
6273 if (mag.shift > 0) sra(result, result, mag.shift);
6274 srl(at, dividend, 31);
6275 Addu(result, result, Operand(at));
6276}
6277
6278
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006279} // namespace internal
6280} // namespace v8
Andrei Popescu31002712010-02-23 13:46:05 +00006281
Leon Clarkef7060e22010-06-03 12:02:55 +01006282#endif // V8_TARGET_ARCH_MIPS