blob: 86aef38414b8c97d3d04b929c2ff39d8d8e2e15e [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
Ben Murdoch257744e2011-11-30 15:57:28 +00005#include <limits.h> // For LONG_MIN, LONG_MAX.
Andrei Popescu31002712010-02-23 13:46:05 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/mips/macro-assembler-mips.h"
15#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016#include "src/runtime/runtime.h"
Andrei Popescu31002712010-02-23 13:46:05 +000017
18namespace v8 {
19namespace internal {
20
Ben Murdoch61f157c2016-09-16 13:49:30 +010021// Floating point constants.
22const uint32_t kDoubleSignMask = HeapNumber::kSignMask;
23const uint32_t kDoubleExponentShift = HeapNumber::kExponentShift;
24const uint32_t kDoubleNaNShift = kDoubleExponentShift - 1;
25const uint32_t kDoubleNaNMask =
26 HeapNumber::kExponentMask | (1 << kDoubleNaNShift);
27
28const uint32_t kSingleSignMask = kBinary32SignMask;
29const uint32_t kSingleExponentMask = kBinary32ExponentMask;
30const uint32_t kSingleExponentShift = kBinary32ExponentShift;
31const uint32_t kSingleNaNShift = kSingleExponentShift - 1;
32const uint32_t kSingleNaNMask = kSingleExponentMask | (1 << kSingleNaNShift);
33
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
35 CodeObjectRequired create_code_object)
Ben Murdoch257744e2011-11-30 15:57:28 +000036 : Assembler(arg_isolate, buffer, size),
Andrei Popescu31002712010-02-23 13:46:05 +000037 generating_stub_(false),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038 has_frame_(false),
39 has_double_zero_reg_set_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 if (create_code_object == CodeObjectRequired::kYes) {
41 code_object_ =
42 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +000043 }
Andrei Popescu31002712010-02-23 13:46:05 +000044}
45
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046void MacroAssembler::Load(Register dst,
47 const MemOperand& src,
48 Representation r) {
49 DCHECK(!r.IsDouble());
50 if (r.IsInteger8()) {
51 lb(dst, src);
52 } else if (r.IsUInteger8()) {
53 lbu(dst, src);
54 } else if (r.IsInteger16()) {
55 lh(dst, src);
56 } else if (r.IsUInteger16()) {
57 lhu(dst, src);
58 } else {
59 lw(dst, src);
60 }
61}
62
63
64void MacroAssembler::Store(Register src,
65 const MemOperand& dst,
66 Representation r) {
67 DCHECK(!r.IsDouble());
68 if (r.IsInteger8() || r.IsUInteger8()) {
69 sb(src, dst);
70 } else if (r.IsInteger16() || r.IsUInteger16()) {
71 sh(src, dst);
72 } else {
73 if (r.IsHeapObject()) {
74 AssertNotSmi(src);
75 } else if (r.IsSmi()) {
76 AssertSmi(src);
77 }
78 sw(src, dst);
79 }
80}
81
Andrei Popescu31002712010-02-23 13:46:05 +000082void MacroAssembler::LoadRoot(Register destination,
83 Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010084 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000085}
86
Steve Block44f0eee2011-05-26 01:26:41 +010087
Andrei Popescu31002712010-02-23 13:46:05 +000088void MacroAssembler::LoadRoot(Register destination,
89 Heap::RootListIndex index,
90 Condition cond,
91 Register src1, const Operand& src2) {
Steve Block44f0eee2011-05-26 01:26:41 +010092 Branch(2, NegateCondition(cond), src1, src2);
Steve Block6ded16b2010-05-10 14:33:55 +010093 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000094}
95
96
Steve Block44f0eee2011-05-26 01:26:41 +010097void MacroAssembler::StoreRoot(Register source,
98 Heap::RootListIndex index) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Steve Block44f0eee2011-05-26 01:26:41 +0100100 sw(source, MemOperand(s6, index << kPointerSizeLog2));
101}
102
103
104void MacroAssembler::StoreRoot(Register source,
105 Heap::RootListIndex index,
106 Condition cond,
107 Register src1, const Operand& src2) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000108 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Steve Block44f0eee2011-05-26 01:26:41 +0100109 Branch(2, NegateCondition(cond), src1, src2);
110 sw(source, MemOperand(s6, index << kPointerSizeLog2));
111}
112
Ben Murdochda12d292016-06-02 14:46:10 +0100113void MacroAssembler::PushCommonFrame(Register marker_reg) {
114 if (marker_reg.is_valid()) {
115 Push(ra, fp, marker_reg);
116 Addu(fp, sp, Operand(kPointerSize));
117 } else {
118 Push(ra, fp);
119 mov(fp, sp);
120 }
121}
122
123void MacroAssembler::PopCommonFrame(Register marker_reg) {
124 if (marker_reg.is_valid()) {
125 Pop(ra, fp, marker_reg);
126 } else {
127 Pop(ra, fp);
128 }
129}
130
131void MacroAssembler::PushStandardFrame(Register function_reg) {
132 int offset = -StandardFrameConstants::kContextOffset;
133 if (function_reg.is_valid()) {
134 Push(ra, fp, cp, function_reg);
135 offset += kPointerSize;
136 } else {
137 Push(ra, fp, cp);
138 }
139 Addu(fp, sp, Operand(offset));
140}
Steve Block44f0eee2011-05-26 01:26:41 +0100141
Ben Murdoch257744e2011-11-30 15:57:28 +0000142// Push and pop all registers that can hold pointers.
143void MacroAssembler::PushSafepointRegisters() {
144 // Safepoints expect a block of kNumSafepointRegisters values on the
145 // stack, so adjust the stack for unsaved registers.
146 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 DCHECK(num_unsaved >= 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100148 if (num_unsaved > 0) {
149 Subu(sp, sp, Operand(num_unsaved * kPointerSize));
150 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000151 MultiPush(kSafepointSavedRegisters);
152}
153
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000154
Ben Murdoch257744e2011-11-30 15:57:28 +0000155void MacroAssembler::PopSafepointRegisters() {
156 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
157 MultiPop(kSafepointSavedRegisters);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100158 if (num_unsaved > 0) {
159 Addu(sp, sp, Operand(num_unsaved * kPointerSize));
160 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000161}
162
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000163
Ben Murdoch257744e2011-11-30 15:57:28 +0000164void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
165 sw(src, SafepointRegisterSlot(dst));
166}
167
168
169void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
170 lw(dst, SafepointRegisterSlot(src));
171}
172
173
174int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
175 // The registers are pushed starting with the highest encoding,
176 // which means that lowest encodings are closest to the stack pointer.
177 return kSafepointRegisterStackIndexMap[reg_code];
178}
179
180
181MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
182 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
183}
184
185
186MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100187 UNIMPLEMENTED_MIPS();
Ben Murdoch257744e2011-11-30 15:57:28 +0000188 // General purpose registers are pushed last on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189 int doubles_size = DoubleRegister::kMaxNumRegisters * kDoubleSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000190 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
191 return MemOperand(sp, doubles_size + register_offset);
192}
193
194
Steve Block44f0eee2011-05-26 01:26:41 +0100195void MacroAssembler::InNewSpace(Register object,
196 Register scratch,
197 Condition cc,
198 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 DCHECK(cc == eq || cc == ne);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100200 const int mask =
201 1 << MemoryChunk::IN_FROM_SPACE | 1 << MemoryChunk::IN_TO_SPACE;
202 CheckPageFlag(object, scratch, mask, cc, branch);
Steve Block44f0eee2011-05-26 01:26:41 +0100203}
204
205
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000206// Clobbers object, dst, value, and ra, if (ra_status == kRAHasBeenSaved)
207// The register 'object' contains a heap object pointer. The heap object
208// tag is shifted away.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100209void MacroAssembler::RecordWriteField(
210 Register object,
211 int offset,
212 Register value,
213 Register dst,
214 RAStatus ra_status,
215 SaveFPRegsMode save_fp,
216 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 SmiCheck smi_check,
218 PointersToHereCheck pointers_to_here_check_for_value) {
219 DCHECK(!AreAliased(value, dst, t8, object));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100220 // First, check if a write barrier is even needed. The tests below
221 // catch stores of Smis.
Steve Block44f0eee2011-05-26 01:26:41 +0100222 Label done;
223
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100224 // Skip barrier if writing a smi.
225 if (smi_check == INLINE_SMI_CHECK) {
226 JumpIfSmi(value, &done);
227 }
Steve Block44f0eee2011-05-26 01:26:41 +0100228
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100229 // Although the object register is tagged, the offset is relative to the start
230 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000231 DCHECK(IsAligned(offset, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100232
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100233 Addu(dst, object, Operand(offset - kHeapObjectTag));
234 if (emit_debug_code()) {
235 Label ok;
236 And(t8, dst, Operand((1 << kPointerSizeLog2) - 1));
237 Branch(&ok, eq, t8, Operand(zero_reg));
238 stop("Unaligned cell in write barrier");
239 bind(&ok);
240 }
241
242 RecordWrite(object,
243 dst,
244 value,
245 ra_status,
246 save_fp,
247 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 OMIT_SMI_CHECK,
249 pointers_to_here_check_for_value);
Steve Block44f0eee2011-05-26 01:26:41 +0100250
251 bind(&done);
252
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100253 // Clobber clobbered input registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100254 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000255 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 li(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
257 li(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
258 }
259}
260
261
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262// Clobbers object, dst, map, and ra, if (ra_status == kRAHasBeenSaved)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263void MacroAssembler::RecordWriteForMap(Register object,
264 Register map,
265 Register dst,
266 RAStatus ra_status,
267 SaveFPRegsMode fp_mode) {
268 if (emit_debug_code()) {
269 DCHECK(!dst.is(at));
270 lw(dst, FieldMemOperand(map, HeapObject::kMapOffset));
271 Check(eq,
272 kWrongAddressOrValuePassedToRecordWrite,
273 dst,
274 Operand(isolate()->factory()->meta_map()));
275 }
276
277 if (!FLAG_incremental_marking) {
278 return;
279 }
280
281 if (emit_debug_code()) {
282 lw(at, FieldMemOperand(object, HeapObject::kMapOffset));
283 Check(eq,
284 kWrongAddressOrValuePassedToRecordWrite,
285 map,
286 Operand(at));
287 }
288
289 Label done;
290
291 // A single check of the map's pages interesting flag suffices, since it is
292 // only set during incremental collection, and then it's also guaranteed that
293 // the from object's page's interesting flag is also set. This optimization
294 // relies on the fact that maps can never be in new space.
295 CheckPageFlag(map,
296 map, // Used as scratch.
297 MemoryChunk::kPointersToHereAreInterestingMask,
298 eq,
299 &done);
300
301 Addu(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
302 if (emit_debug_code()) {
303 Label ok;
304 And(at, dst, Operand((1 << kPointerSizeLog2) - 1));
305 Branch(&ok, eq, at, Operand(zero_reg));
306 stop("Unaligned cell in write barrier");
307 bind(&ok);
308 }
309
310 // Record the actual write.
311 if (ra_status == kRAHasNotBeenSaved) {
312 push(ra);
313 }
314 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
315 fp_mode);
316 CallStub(&stub);
317 if (ra_status == kRAHasNotBeenSaved) {
318 pop(ra);
319 }
320
321 bind(&done);
322
323 // Count number of write barriers in generated code.
324 isolate()->counters()->write_barriers_static()->Increment();
325 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at, dst);
326
327 // Clobber clobbered registers when running with the debug-code flag
328 // turned on to provoke errors.
329 if (emit_debug_code()) {
330 li(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
331 li(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Steve Block44f0eee2011-05-26 01:26:41 +0100332 }
333}
334
335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000336// Clobbers object, address, value, and ra, if (ra_status == kRAHasBeenSaved)
337// The register 'object' contains a heap object pointer. The heap object
Steve Block44f0eee2011-05-26 01:26:41 +0100338// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000339void MacroAssembler::RecordWrite(
340 Register object,
341 Register address,
342 Register value,
343 RAStatus ra_status,
344 SaveFPRegsMode fp_mode,
345 RememberedSetAction remembered_set_action,
346 SmiCheck smi_check,
347 PointersToHereCheck pointers_to_here_check_for_value) {
348 DCHECK(!AreAliased(object, address, value, t8));
349 DCHECK(!AreAliased(object, address, value, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100350
351 if (emit_debug_code()) {
352 lw(at, MemOperand(address));
353 Assert(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100355 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000356
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 if (remembered_set_action == OMIT_REMEMBERED_SET &&
358 !FLAG_incremental_marking) {
359 return;
360 }
361
362 // First, check if a write barrier is even needed. The tests below
363 // catch stores of smis and stores into the young generation.
Steve Block44f0eee2011-05-26 01:26:41 +0100364 Label done;
365
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100366 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100368 JumpIfSmi(value, &done);
369 }
370
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
372 CheckPageFlag(value,
373 value, // Used as scratch.
374 MemoryChunk::kPointersToHereAreInterestingMask,
375 eq,
376 &done);
377 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100378 CheckPageFlag(object,
379 value, // Used as scratch.
380 MemoryChunk::kPointersFromHereAreInterestingMask,
381 eq,
382 &done);
Steve Block44f0eee2011-05-26 01:26:41 +0100383
384 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100385 if (ra_status == kRAHasNotBeenSaved) {
386 push(ra);
387 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000388 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
389 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100390 CallStub(&stub);
391 if (ra_status == kRAHasNotBeenSaved) {
392 pop(ra);
393 }
Steve Block44f0eee2011-05-26 01:26:41 +0100394
395 bind(&done);
396
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000397 // Count number of write barriers in generated code.
398 isolate()->counters()->write_barriers_static()->Increment();
399 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at,
400 value);
401
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100402 // Clobber clobbered registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100403 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000404 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000405 li(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
406 li(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100407 }
408}
409
Ben Murdoch097c5b22016-05-18 11:27:45 +0100410void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
411 Register code_entry,
412 Register scratch) {
413 const int offset = JSFunction::kCodeEntryOffset;
414
415 // Since a code entry (value) is always in old space, we don't need to update
416 // remembered set. If incremental marking is off, there is nothing for us to
417 // do.
418 if (!FLAG_incremental_marking) return;
419
420 DCHECK(js_function.is(a1));
421 DCHECK(code_entry.is(t0));
422 DCHECK(scratch.is(t1));
423 AssertNotSmi(js_function);
424
425 if (emit_debug_code()) {
426 Addu(scratch, js_function, Operand(offset - kHeapObjectTag));
427 lw(at, MemOperand(scratch));
428 Assert(eq, kWrongAddressOrValuePassedToRecordWrite, at,
429 Operand(code_entry));
430 }
431
432 // First, check if a write barrier is even needed. The tests below
433 // catch stores of Smis and stores into young gen.
434 Label done;
435
436 CheckPageFlag(code_entry, scratch,
437 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
438 CheckPageFlag(js_function, scratch,
439 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
440
441 const Register dst = scratch;
442 Addu(dst, js_function, Operand(offset - kHeapObjectTag));
443
444 // Save caller-saved registers. js_function and code_entry are in the
445 // caller-saved register list.
446 DCHECK(kJSCallerSaved & js_function.bit());
447 DCHECK(kJSCallerSaved & code_entry.bit());
448 MultiPush(kJSCallerSaved | ra.bit());
449
450 int argument_count = 3;
451
452 PrepareCallCFunction(argument_count, 0, code_entry);
453
454 mov(a0, js_function);
455 mov(a1, dst);
456 li(a2, Operand(ExternalReference::isolate_address(isolate())));
457
458 {
459 AllowExternalCallThatCantCauseGC scope(this);
460 CallCFunction(
461 ExternalReference::incremental_marking_record_write_code_entry_function(
462 isolate()),
463 argument_count);
464 }
465
466 // Restore caller-saved registers.
467 MultiPop(kJSCallerSaved | ra.bit());
468
469 bind(&done);
470}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100471
472void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
473 Register address,
474 Register scratch,
475 SaveFPRegsMode fp_mode,
476 RememberedSetFinalAction and_then) {
477 Label done;
478 if (emit_debug_code()) {
479 Label ok;
480 JumpIfNotInNewSpace(object, scratch, &ok);
481 stop("Remembered set pointer is in new space");
482 bind(&ok);
483 }
484 // Load store buffer top.
485 ExternalReference store_buffer =
486 ExternalReference::store_buffer_top(isolate());
487 li(t8, Operand(store_buffer));
488 lw(scratch, MemOperand(t8));
489 // Store pointer to buffer and increment buffer top.
490 sw(address, MemOperand(scratch));
491 Addu(scratch, scratch, kPointerSize);
492 // Write back new top of buffer.
493 sw(scratch, MemOperand(t8));
494 // Call stub on end of buffer.
495 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100496 And(t8, scratch, Operand(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100497 if (and_then == kFallThroughAtEnd) {
Ben Murdochda12d292016-06-02 14:46:10 +0100498 Branch(&done, ne, t8, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100499 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 DCHECK(and_then == kReturnAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100501 Ret(ne, t8, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100502 }
503 push(ra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100505 CallStub(&store_buffer_overflow);
506 pop(ra);
507 bind(&done);
508 if (and_then == kReturnAtEnd) {
509 Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100510 }
511}
512
513
514// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000515// Allocation support.
Steve Block44f0eee2011-05-26 01:26:41 +0100516
517
518void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
519 Register scratch,
520 Label* miss) {
521 Label same_contexts;
Ben Murdochda12d292016-06-02 14:46:10 +0100522 Register temporary = t8;
Steve Block44f0eee2011-05-26 01:26:41 +0100523
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 DCHECK(!holder_reg.is(scratch));
525 DCHECK(!holder_reg.is(at));
526 DCHECK(!scratch.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100527
Ben Murdochda12d292016-06-02 14:46:10 +0100528 // Load current lexical context from the active StandardFrame, which
529 // may require crawling past STUB frames.
530 Label load_context;
531 Label has_context;
532 mov(at, fp);
533 bind(&load_context);
534 lw(scratch, MemOperand(at, CommonFrameConstants::kContextOrFrameTypeOffset));
535 // Passing temporary register, otherwise JumpIfNotSmi modifies register at.
536 JumpIfNotSmi(scratch, &has_context, temporary);
537 lw(at, MemOperand(at, CommonFrameConstants::kCallerFPOffset));
538 Branch(&load_context);
539 bind(&has_context);
540
Steve Block44f0eee2011-05-26 01:26:41 +0100541 // In debug mode, make sure the lexical context is set.
542#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000543 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100544 scratch, Operand(zero_reg));
545#endif
546
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000548 lw(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Block44f0eee2011-05-26 01:26:41 +0100549
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000551 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000552 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553 // Read the first word and compare to the native_context_map.
Steve Block44f0eee2011-05-26 01:26:41 +0100554 lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555 LoadRoot(at, Heap::kNativeContextMapRootIndex);
556 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100557 holder_reg, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +0000558 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100559 }
560
561 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000562 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100563 Branch(&same_contexts, eq, scratch, Operand(at));
564
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000565 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000566 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000567 push(holder_reg); // Temporarily save holder on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +0100568 mov(holder_reg, at); // Move at to its holding place.
569 LoadRoot(at, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570 Check(ne, kJSGlobalProxyContextShouldNotBeNull,
Steve Block44f0eee2011-05-26 01:26:41 +0100571 holder_reg, Operand(at));
572
573 lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 LoadRoot(at, Heap::kNativeContextMapRootIndex);
575 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100576 holder_reg, Operand(at));
577 // Restore at is not needed. at is reloaded below.
Ben Murdoch257744e2011-11-30 15:57:28 +0000578 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100579 // Restore at to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100581 }
582
583 // Check that the security token in the calling global object is
584 // compatible with the security token in the receiving global
585 // object.
586 int token_offset = Context::kHeaderSize +
587 Context::SECURITY_TOKEN_INDEX * kPointerSize;
588
589 lw(scratch, FieldMemOperand(scratch, token_offset));
590 lw(at, FieldMemOperand(at, token_offset));
591 Branch(miss, ne, scratch, Operand(at));
592
593 bind(&same_contexts);
Andrei Popescu31002712010-02-23 13:46:05 +0000594}
595
596
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597// Compute the hash code from the untagged key. This must be kept in sync with
598// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
599// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +0000600void MacroAssembler::GetNumberHash(Register reg0, Register scratch) {
601 // First of all we assign the hash seed to scratch.
602 LoadRoot(scratch, Heap::kHashSeedRootIndex);
603 SmiUntag(scratch);
604
605 // Xor original key with a seed.
606 xor_(reg0, reg0, scratch);
607
608 // Compute the hash code from the untagged key. This must be kept in sync
609 // with ComputeIntegerHash in utils.h.
610 //
611 // hash = ~hash + (hash << 15);
612 nor(scratch, reg0, zero_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100613 Lsa(reg0, scratch, reg0, 15);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000614
615 // hash = hash ^ (hash >> 12);
616 srl(at, reg0, 12);
617 xor_(reg0, reg0, at);
618
619 // hash = hash + (hash << 2);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100620 Lsa(reg0, reg0, reg0, 2);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000621
622 // hash = hash ^ (hash >> 4);
623 srl(at, reg0, 4);
624 xor_(reg0, reg0, at);
625
626 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100627 sll(scratch, reg0, 11);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100628 Lsa(reg0, reg0, reg0, 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100629 addu(reg0, reg0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000630
631 // hash = hash ^ (hash >> 16);
632 srl(at, reg0, 16);
633 xor_(reg0, reg0, at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 And(reg0, reg0, Operand(0x3fffffff));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000635}
636
637
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000638void MacroAssembler::LoadFromNumberDictionary(Label* miss,
639 Register elements,
640 Register key,
641 Register result,
642 Register reg0,
643 Register reg1,
644 Register reg2) {
645 // Register use:
646 //
647 // elements - holds the slow-case elements of the receiver on entry.
648 // Unchanged unless 'result' is the same register.
649 //
650 // key - holds the smi key on entry.
651 // Unchanged unless 'result' is the same register.
652 //
653 //
654 // result - holds the result on exit if the load succeeded.
655 // Allowed to be the same as 'key' or 'result'.
656 // Unchanged on bailout so 'key' or 'result' can be used
657 // in further computation.
658 //
659 // Scratch registers:
660 //
661 // reg0 - holds the untagged key on entry and holds the hash once computed.
662 //
663 // reg1 - Used to hold the capacity mask of the dictionary.
664 //
665 // reg2 - Used for the index into the dictionary.
666 // at - Temporary (avoid MacroAssembler instructions also using 'at').
667 Label done;
668
Ben Murdochc7cc0282012-03-05 14:35:55 +0000669 GetNumberHash(reg0, reg1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000670
671 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +0000672 lw(reg1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000673 sra(reg1, reg1, kSmiTagSize);
674 Subu(reg1, reg1, Operand(1));
675
676 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000677 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000678 // Use reg2 for index calculations and keep the hash intact in reg0.
679 mov(reg2, reg0);
680 // Compute the masked index: (hash + i + i * i) & mask.
681 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +0000682 Addu(reg2, reg2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000683 }
684 and_(reg2, reg2, reg1);
685
686 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000687 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100688 Lsa(reg2, reg2, reg2, 1); // reg2 = reg2 * 3.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000689
690 // Check if the key is identical to the name.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691 Lsa(reg2, elements, reg2, kPointerSizeLog2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000692
Ben Murdochc7cc0282012-03-05 14:35:55 +0000693 lw(at, FieldMemOperand(reg2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000694 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000695 Branch(&done, eq, key, Operand(at));
696 } else {
697 Branch(miss, ne, key, Operand(at));
698 }
699 }
700
701 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400702 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000703 // reg2: elements + (index * kPointerSize).
704 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000705 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000706 lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000707 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +0000708 And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000709 Branch(miss, ne, at, Operand(zero_reg));
710
711 // Get the value at the masked, scaled index and return.
712 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000713 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000714 lw(result, FieldMemOperand(reg2, kValueOffset));
715}
716
717
Andrei Popescu31002712010-02-23 13:46:05 +0000718// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000719// Instruction macros.
Andrei Popescu31002712010-02-23 13:46:05 +0000720
Andrei Popescu31002712010-02-23 13:46:05 +0000721void MacroAssembler::Addu(Register rd, Register rs, const Operand& rt) {
722 if (rt.is_reg()) {
723 addu(rd, rs, rt.rm());
724 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100725 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000726 addiu(rd, rs, rt.imm32_);
727 } else {
728 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000730 li(at, rt);
731 addu(rd, rs, at);
732 }
733 }
734}
735
736
Steve Block44f0eee2011-05-26 01:26:41 +0100737void MacroAssembler::Subu(Register rd, Register rs, const Operand& rt) {
738 if (rt.is_reg()) {
739 subu(rd, rs, rt.rm());
740 } else {
741 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
742 addiu(rd, rs, -rt.imm32_); // No subiu instr, use addiu(x, y, -imm).
743 } else {
744 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000745 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100746 li(at, rt);
747 subu(rd, rs, at);
748 }
749 }
750}
751
752
Andrei Popescu31002712010-02-23 13:46:05 +0000753void MacroAssembler::Mul(Register rd, Register rs, const Operand& rt) {
754 if (rt.is_reg()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100756 mult(rs, rt.rm());
757 mflo(rd);
758 } else {
759 mul(rd, rs, rt.rm());
760 }
Andrei Popescu31002712010-02-23 13:46:05 +0000761 } else {
762 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000764 li(at, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000765 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100766 mult(rs, at);
767 mflo(rd);
768 } else {
769 mul(rd, rs, at);
770 }
Andrei Popescu31002712010-02-23 13:46:05 +0000771 }
772}
773
774
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775void MacroAssembler::Mul(Register rd_hi, Register rd_lo,
776 Register rs, const Operand& rt) {
777 if (rt.is_reg()) {
778 if (!IsMipsArchVariant(kMips32r6)) {
779 mult(rs, rt.rm());
780 mflo(rd_lo);
781 mfhi(rd_hi);
782 } else {
783 if (rd_lo.is(rs)) {
784 DCHECK(!rd_hi.is(rs));
785 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
786 muh(rd_hi, rs, rt.rm());
787 mul(rd_lo, rs, rt.rm());
788 } else {
789 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
790 mul(rd_lo, rs, rt.rm());
791 muh(rd_hi, rs, rt.rm());
792 }
793 }
794 } else {
795 // li handles the relocation.
796 DCHECK(!rs.is(at));
797 li(at, rt);
798 if (!IsMipsArchVariant(kMips32r6)) {
799 mult(rs, at);
800 mflo(rd_lo);
801 mfhi(rd_hi);
802 } else {
803 if (rd_lo.is(rs)) {
804 DCHECK(!rd_hi.is(rs));
805 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
806 muh(rd_hi, rs, at);
807 mul(rd_lo, rs, at);
808 } else {
809 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
810 mul(rd_lo, rs, at);
811 muh(rd_hi, rs, at);
812 }
813 }
814 }
815}
816
Ben Murdochda12d292016-06-02 14:46:10 +0100817void MacroAssembler::Mulu(Register rd_hi, Register rd_lo, Register rs,
818 const Operand& rt) {
819 Register reg;
820 if (rt.is_reg()) {
821 reg = rt.rm();
822 } else {
823 DCHECK(!rs.is(at));
824 reg = at;
825 li(reg, rt);
826 }
827
828 if (!IsMipsArchVariant(kMips32r6)) {
829 multu(rs, reg);
830 mflo(rd_lo);
831 mfhi(rd_hi);
832 } else {
833 if (rd_lo.is(rs)) {
834 DCHECK(!rd_hi.is(rs));
835 DCHECK(!rd_hi.is(reg) && !rd_lo.is(reg));
836 muhu(rd_hi, rs, reg);
837 mulu(rd_lo, rs, reg);
838 } else {
839 DCHECK(!rd_hi.is(reg) && !rd_lo.is(reg));
840 mulu(rd_lo, rs, reg);
841 muhu(rd_hi, rs, reg);
842 }
843 }
844}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845
846void MacroAssembler::Mulh(Register rd, Register rs, const Operand& rt) {
847 if (rt.is_reg()) {
848 if (!IsMipsArchVariant(kMips32r6)) {
849 mult(rs, rt.rm());
850 mfhi(rd);
851 } else {
852 muh(rd, rs, rt.rm());
853 }
854 } else {
855 // li handles the relocation.
856 DCHECK(!rs.is(at));
857 li(at, rt);
858 if (!IsMipsArchVariant(kMips32r6)) {
859 mult(rs, at);
860 mfhi(rd);
861 } else {
862 muh(rd, rs, at);
863 }
864 }
865}
866
867
Andrei Popescu31002712010-02-23 13:46:05 +0000868void MacroAssembler::Mult(Register rs, const Operand& rt) {
869 if (rt.is_reg()) {
870 mult(rs, rt.rm());
871 } else {
872 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000873 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000874 li(at, rt);
875 mult(rs, at);
876 }
877}
878
879
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400880void MacroAssembler::Mulhu(Register rd, Register rs, const Operand& rt) {
881 if (rt.is_reg()) {
882 if (!IsMipsArchVariant(kMips32r6)) {
883 multu(rs, rt.rm());
884 mfhi(rd);
885 } else {
886 muhu(rd, rs, rt.rm());
887 }
888 } else {
889 // li handles the relocation.
890 DCHECK(!rs.is(at));
891 li(at, rt);
892 if (!IsMipsArchVariant(kMips32r6)) {
893 multu(rs, at);
894 mfhi(rd);
895 } else {
896 muhu(rd, rs, at);
897 }
898 }
899}
900
901
Andrei Popescu31002712010-02-23 13:46:05 +0000902void MacroAssembler::Multu(Register rs, const Operand& rt) {
903 if (rt.is_reg()) {
904 multu(rs, rt.rm());
905 } else {
906 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000907 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000908 li(at, rt);
909 multu(rs, at);
910 }
911}
912
913
914void MacroAssembler::Div(Register rs, const Operand& rt) {
915 if (rt.is_reg()) {
916 div(rs, rt.rm());
917 } else {
918 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000919 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000920 li(at, rt);
921 div(rs, at);
922 }
923}
924
925
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926void MacroAssembler::Div(Register rem, Register res,
927 Register rs, const Operand& rt) {
928 if (rt.is_reg()) {
929 if (!IsMipsArchVariant(kMips32r6)) {
930 div(rs, rt.rm());
931 mflo(res);
932 mfhi(rem);
933 } else {
934 div(res, rs, rt.rm());
935 mod(rem, rs, rt.rm());
936 }
937 } else {
938 // li handles the relocation.
939 DCHECK(!rs.is(at));
940 li(at, rt);
941 if (!IsMipsArchVariant(kMips32r6)) {
942 div(rs, at);
943 mflo(res);
944 mfhi(rem);
945 } else {
946 div(res, rs, at);
947 mod(rem, rs, at);
948 }
949 }
950}
951
952
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400953void MacroAssembler::Div(Register res, Register rs, const Operand& rt) {
954 if (rt.is_reg()) {
955 if (!IsMipsArchVariant(kMips32r6)) {
956 div(rs, rt.rm());
957 mflo(res);
958 } else {
959 div(res, rs, rt.rm());
960 }
961 } else {
962 // li handles the relocation.
963 DCHECK(!rs.is(at));
964 li(at, rt);
965 if (!IsMipsArchVariant(kMips32r6)) {
966 div(rs, at);
967 mflo(res);
968 } else {
969 div(res, rs, at);
970 }
971 }
972}
973
974
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000975void MacroAssembler::Mod(Register rd, Register rs, const Operand& rt) {
976 if (rt.is_reg()) {
977 if (!IsMipsArchVariant(kMips32r6)) {
978 div(rs, rt.rm());
979 mfhi(rd);
980 } else {
981 mod(rd, rs, rt.rm());
982 }
983 } else {
984 // li handles the relocation.
985 DCHECK(!rs.is(at));
986 li(at, rt);
987 if (!IsMipsArchVariant(kMips32r6)) {
988 div(rs, at);
989 mfhi(rd);
990 } else {
991 mod(rd, rs, at);
992 }
993 }
994}
995
996
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400997void MacroAssembler::Modu(Register rd, Register rs, const Operand& rt) {
998 if (rt.is_reg()) {
999 if (!IsMipsArchVariant(kMips32r6)) {
1000 divu(rs, rt.rm());
1001 mfhi(rd);
1002 } else {
1003 modu(rd, rs, rt.rm());
1004 }
1005 } else {
1006 // li handles the relocation.
1007 DCHECK(!rs.is(at));
1008 li(at, rt);
1009 if (!IsMipsArchVariant(kMips32r6)) {
1010 divu(rs, at);
1011 mfhi(rd);
1012 } else {
1013 modu(rd, rs, at);
1014 }
1015 }
1016}
1017
1018
Andrei Popescu31002712010-02-23 13:46:05 +00001019void MacroAssembler::Divu(Register rs, const Operand& rt) {
1020 if (rt.is_reg()) {
1021 divu(rs, rt.rm());
1022 } else {
1023 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001024 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001025 li(at, rt);
1026 divu(rs, at);
1027 }
1028}
1029
1030
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001031void MacroAssembler::Divu(Register res, Register rs, const Operand& rt) {
1032 if (rt.is_reg()) {
1033 if (!IsMipsArchVariant(kMips32r6)) {
1034 divu(rs, rt.rm());
1035 mflo(res);
1036 } else {
1037 divu(res, rs, rt.rm());
1038 }
1039 } else {
1040 // li handles the relocation.
1041 DCHECK(!rs.is(at));
1042 li(at, rt);
1043 if (!IsMipsArchVariant(kMips32r6)) {
1044 divu(rs, at);
1045 mflo(res);
1046 } else {
1047 divu(res, rs, at);
1048 }
1049 }
1050}
1051
1052
Andrei Popescu31002712010-02-23 13:46:05 +00001053void MacroAssembler::And(Register rd, Register rs, const Operand& rt) {
1054 if (rt.is_reg()) {
1055 and_(rd, rs, rt.rm());
1056 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001057 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001058 andi(rd, rs, rt.imm32_);
1059 } else {
1060 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001061 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001062 li(at, rt);
1063 and_(rd, rs, at);
1064 }
1065 }
1066}
1067
1068
1069void MacroAssembler::Or(Register rd, Register rs, const Operand& rt) {
1070 if (rt.is_reg()) {
1071 or_(rd, rs, rt.rm());
1072 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001073 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001074 ori(rd, rs, rt.imm32_);
1075 } else {
1076 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001078 li(at, rt);
1079 or_(rd, rs, at);
1080 }
1081 }
1082}
1083
1084
1085void MacroAssembler::Xor(Register rd, Register rs, const Operand& rt) {
1086 if (rt.is_reg()) {
1087 xor_(rd, rs, rt.rm());
1088 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001089 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001090 xori(rd, rs, rt.imm32_);
1091 } else {
1092 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001093 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001094 li(at, rt);
1095 xor_(rd, rs, at);
1096 }
1097 }
1098}
1099
1100
1101void MacroAssembler::Nor(Register rd, Register rs, const Operand& rt) {
1102 if (rt.is_reg()) {
1103 nor(rd, rs, rt.rm());
1104 } else {
1105 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001106 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001107 li(at, rt);
1108 nor(rd, rs, at);
1109 }
1110}
1111
1112
Ben Murdoch257744e2011-11-30 15:57:28 +00001113void MacroAssembler::Neg(Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001114 DCHECK(rt.is_reg());
1115 DCHECK(!at.is(rs));
1116 DCHECK(!at.is(rt.rm()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001117 li(at, -1);
1118 xor_(rs, rt.rm(), at);
1119}
1120
1121
Andrei Popescu31002712010-02-23 13:46:05 +00001122void MacroAssembler::Slt(Register rd, Register rs, const Operand& rt) {
1123 if (rt.is_reg()) {
1124 slt(rd, rs, rt.rm());
1125 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001126 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001127 slti(rd, rs, rt.imm32_);
1128 } else {
1129 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001131 li(at, rt);
1132 slt(rd, rs, at);
1133 }
1134 }
1135}
1136
1137
1138void MacroAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
1139 if (rt.is_reg()) {
1140 sltu(rd, rs, rt.rm());
1141 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001142 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001143 sltiu(rd, rs, rt.imm32_);
1144 } else {
1145 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001147 li(at, rt);
1148 sltu(rd, rs, at);
1149 }
1150 }
1151}
1152
1153
Steve Block44f0eee2011-05-26 01:26:41 +01001154void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001155 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001156 if (rt.is_reg()) {
1157 rotrv(rd, rs, rt.rm());
1158 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001159 rotr(rd, rs, rt.imm32_ & 0x1f);
Steve Block44f0eee2011-05-26 01:26:41 +01001160 }
1161 } else {
1162 if (rt.is_reg()) {
1163 subu(at, zero_reg, rt.rm());
1164 sllv(at, rs, at);
1165 srlv(rd, rs, rt.rm());
1166 or_(rd, rd, at);
1167 } else {
1168 if (rt.imm32_ == 0) {
1169 srl(rd, rs, 0);
1170 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001171 srl(at, rs, rt.imm32_ & 0x1f);
1172 sll(rd, rs, (0x20 - (rt.imm32_ & 0x1f)) & 0x1f);
Steve Block44f0eee2011-05-26 01:26:41 +01001173 or_(rd, rd, at);
1174 }
1175 }
1176 }
Andrei Popescu31002712010-02-23 13:46:05 +00001177}
1178
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001179
1180void MacroAssembler::Pref(int32_t hint, const MemOperand& rs) {
1181 if (IsMipsArchVariant(kLoongson)) {
1182 lw(zero_reg, rs);
1183 } else {
1184 pref(hint, rs);
1185 }
1186}
1187
1188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189void MacroAssembler::Lsa(Register rd, Register rt, Register rs, uint8_t sa,
1190 Register scratch) {
Ben Murdochda12d292016-06-02 14:46:10 +01001191 DCHECK(sa >= 1 && sa <= 31);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001192 if (IsMipsArchVariant(kMips32r6) && sa <= 4) {
Ben Murdochda12d292016-06-02 14:46:10 +01001193 lsa(rd, rt, rs, sa - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 } else {
1195 Register tmp = rd.is(rt) ? scratch : rd;
1196 DCHECK(!tmp.is(rt));
1197 sll(tmp, rs, sa);
1198 Addu(rd, rt, tmp);
1199 }
1200}
1201
1202
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001203// ------------Pseudo-instructions-------------
1204
Ben Murdoch61f157c2016-09-16 13:49:30 +01001205// Word Swap Byte
1206void MacroAssembler::ByteSwapSigned(Register reg, int operand_size) {
1207 DCHECK(operand_size == 1 || operand_size == 2 || operand_size == 4);
1208 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
1209 if (operand_size == 2) {
1210 seh(reg, reg);
1211 } else if (operand_size == 1) {
1212 seb(reg, reg);
1213 }
1214 // No need to do any preparation if operand_size is 4
1215
1216 wsbh(reg, reg);
1217 rotr(reg, reg, 16);
1218 } else if (IsMipsArchVariant(kMips32r1) || IsMipsArchVariant(kLoongson)) {
1219 if (operand_size == 1) {
1220 sll(reg, reg, 24);
1221 sra(reg, reg, 24);
1222 } else if (operand_size == 2) {
1223 sll(reg, reg, 16);
1224 sra(reg, reg, 16);
1225 }
1226 // No need to do any preparation if operand_size is 4
1227
1228 Register tmp = t0;
1229 Register tmp2 = t1;
1230
1231 andi(tmp2, reg, 0xFF);
1232 sll(tmp2, tmp2, 24);
1233 or_(tmp, zero_reg, tmp2);
1234
1235 andi(tmp2, reg, 0xFF00);
1236 sll(tmp2, tmp2, 8);
1237 or_(tmp, tmp, tmp2);
1238
1239 srl(reg, reg, 8);
1240 andi(tmp2, reg, 0xFF00);
1241 or_(tmp, tmp, tmp2);
1242
1243 srl(reg, reg, 16);
1244 andi(tmp2, reg, 0xFF);
1245 or_(tmp, tmp, tmp2);
1246
1247 or_(reg, tmp, zero_reg);
1248 }
1249}
1250
1251void MacroAssembler::ByteSwapUnsigned(Register reg, int operand_size) {
1252 DCHECK(operand_size == 1 || operand_size == 2);
1253
1254 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
1255 if (operand_size == 1) {
1256 andi(reg, reg, 0xFF);
1257 } else {
1258 andi(reg, reg, 0xFFFF);
1259 }
1260 // No need to do any preparation if operand_size is 4
1261
1262 wsbh(reg, reg);
1263 rotr(reg, reg, 16);
1264 } else if (IsMipsArchVariant(kMips32r1) || IsMipsArchVariant(kLoongson)) {
1265 if (operand_size == 1) {
1266 sll(reg, reg, 24);
1267 } else {
1268 Register tmp = t0;
1269
1270 andi(tmp, reg, 0xFF00);
1271 sll(reg, reg, 24);
1272 sll(tmp, tmp, 8);
1273 or_(reg, tmp, reg);
1274 }
1275 }
1276}
1277
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278void MacroAssembler::Ulw(Register rd, const MemOperand& rs) {
Ben Murdochc5610432016-08-08 18:44:38 +01001279 DCHECK(!rd.is(at));
1280 DCHECK(!rs.rm().is(at));
1281 if (IsMipsArchVariant(kMips32r6)) {
1282 lw(rd, rs);
1283 } else {
1284 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1285 IsMipsArchVariant(kLoongson));
1286 if (is_int16(rs.offset() + kMipsLwrOffset) &&
1287 is_int16(rs.offset() + kMipsLwlOffset)) {
1288 if (!rd.is(rs.rm())) {
1289 lwr(rd, MemOperand(rs.rm(), rs.offset() + kMipsLwrOffset));
1290 lwl(rd, MemOperand(rs.rm(), rs.offset() + kMipsLwlOffset));
1291 } else {
1292 lwr(at, MemOperand(rs.rm(), rs.offset() + kMipsLwrOffset));
1293 lwl(at, MemOperand(rs.rm(), rs.offset() + kMipsLwlOffset));
1294 mov(rd, at);
1295 }
1296 } else { // Offset > 16 bits, use multiple instructions to load.
1297 LoadRegPlusOffsetToAt(rs);
1298 lwr(rd, MemOperand(at, kMipsLwrOffset));
1299 lwl(rd, MemOperand(at, kMipsLwlOffset));
1300 }
1301 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001302}
1303
1304
1305void MacroAssembler::Usw(Register rd, const MemOperand& rs) {
Ben Murdochc5610432016-08-08 18:44:38 +01001306 DCHECK(!rd.is(at));
1307 DCHECK(!rs.rm().is(at));
1308 if (IsMipsArchVariant(kMips32r6)) {
1309 sw(rd, rs);
1310 } else {
1311 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1312 IsMipsArchVariant(kLoongson));
1313 if (is_int16(rs.offset() + kMipsSwrOffset) &&
1314 is_int16(rs.offset() + kMipsSwlOffset)) {
1315 swr(rd, MemOperand(rs.rm(), rs.offset() + kMipsSwrOffset));
1316 swl(rd, MemOperand(rs.rm(), rs.offset() + kMipsSwlOffset));
1317 } else {
1318 LoadRegPlusOffsetToAt(rs);
1319 swr(rd, MemOperand(at, kMipsSwrOffset));
1320 swl(rd, MemOperand(at, kMipsSwlOffset));
1321 }
1322 }
1323}
1324
1325void MacroAssembler::Ulh(Register rd, const MemOperand& rs) {
1326 DCHECK(!rd.is(at));
1327 DCHECK(!rs.rm().is(at));
1328 if (IsMipsArchVariant(kMips32r6)) {
1329 lh(rd, rs);
1330 } else {
1331 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1332 IsMipsArchVariant(kLoongson));
1333 if (is_int16(rs.offset()) && is_int16(rs.offset() + 1)) {
1334#if defined(V8_TARGET_LITTLE_ENDIAN)
1335 lbu(at, rs);
1336 lb(rd, MemOperand(rs.rm(), rs.offset() + 1));
1337#elif defined(V8_TARGET_BIG_ENDIAN)
1338 lbu(at, MemOperand(rs.rm(), rs.offset() + 1));
1339 lb(rd, rs);
1340#endif
1341 } else { // Offset > 16 bits, use multiple instructions to load.
1342 LoadRegPlusOffsetToAt(rs);
1343#if defined(V8_TARGET_LITTLE_ENDIAN)
1344 lb(rd, MemOperand(at, 1));
1345 lbu(at, MemOperand(at, 0));
1346#elif defined(V8_TARGET_BIG_ENDIAN)
1347 lb(rd, MemOperand(at, 0));
1348 lbu(at, MemOperand(at, 1));
1349#endif
1350 }
1351 sll(rd, rd, 8);
1352 or_(rd, rd, at);
1353 }
1354}
1355
1356void MacroAssembler::Ulhu(Register rd, const MemOperand& rs) {
1357 DCHECK(!rd.is(at));
1358 DCHECK(!rs.rm().is(at));
1359 if (IsMipsArchVariant(kMips32r6)) {
1360 lhu(rd, rs);
1361 } else {
1362 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1363 IsMipsArchVariant(kLoongson));
1364 if (is_int16(rs.offset()) && is_int16(rs.offset() + 1)) {
1365#if defined(V8_TARGET_LITTLE_ENDIAN)
1366 lbu(at, rs);
1367 lbu(rd, MemOperand(rs.rm(), rs.offset() + 1));
1368#elif defined(V8_TARGET_BIG_ENDIAN)
1369 lbu(at, MemOperand(rs.rm(), rs.offset() + 1));
1370 lbu(rd, rs);
1371#endif
1372 } else { // Offset > 16 bits, use multiple instructions to load.
1373 LoadRegPlusOffsetToAt(rs);
1374#if defined(V8_TARGET_LITTLE_ENDIAN)
1375 lbu(rd, MemOperand(at, 1));
1376 lbu(at, MemOperand(at, 0));
1377#elif defined(V8_TARGET_BIG_ENDIAN)
1378 lbu(rd, MemOperand(at, 0));
1379 lbu(at, MemOperand(at, 1));
1380#endif
1381 }
1382 sll(rd, rd, 8);
1383 or_(rd, rd, at);
1384 }
1385}
1386
1387void MacroAssembler::Ush(Register rd, const MemOperand& rs, Register scratch) {
1388 DCHECK(!rd.is(at));
1389 DCHECK(!rs.rm().is(at));
1390 DCHECK(!rs.rm().is(scratch));
1391 DCHECK(!scratch.is(at));
1392 if (IsMipsArchVariant(kMips32r6)) {
1393 sh(rd, rs);
1394 } else {
1395 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1396 IsMipsArchVariant(kLoongson));
1397 MemOperand source = rs;
1398 // If offset > 16 bits, load address to at with offset 0.
1399 if (!is_int16(rs.offset()) || !is_int16(rs.offset() + 1)) {
1400 LoadRegPlusOffsetToAt(rs);
1401 source = MemOperand(at, 0);
1402 }
1403
1404 if (!scratch.is(rd)) {
1405 mov(scratch, rd);
1406 }
1407
1408#if defined(V8_TARGET_LITTLE_ENDIAN)
1409 sb(scratch, source);
1410 srl(scratch, scratch, 8);
1411 sb(scratch, MemOperand(source.rm(), source.offset() + 1));
1412#elif defined(V8_TARGET_BIG_ENDIAN)
1413 sb(scratch, MemOperand(source.rm(), source.offset() + 1));
1414 srl(scratch, scratch, 8);
1415 sb(scratch, source);
1416#endif
1417 }
1418}
1419
1420void MacroAssembler::Ulwc1(FPURegister fd, const MemOperand& rs,
1421 Register scratch) {
1422 if (IsMipsArchVariant(kMips32r6)) {
1423 lwc1(fd, rs);
1424 } else {
1425 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1426 IsMipsArchVariant(kLoongson));
1427 Ulw(scratch, rs);
1428 mtc1(scratch, fd);
1429 }
1430}
1431
1432void MacroAssembler::Uswc1(FPURegister fd, const MemOperand& rs,
1433 Register scratch) {
1434 if (IsMipsArchVariant(kMips32r6)) {
1435 swc1(fd, rs);
1436 } else {
1437 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1438 IsMipsArchVariant(kLoongson));
1439 mfc1(scratch, fd);
1440 Usw(scratch, rs);
1441 }
1442}
1443
1444void MacroAssembler::Uldc1(FPURegister fd, const MemOperand& rs,
1445 Register scratch) {
1446 DCHECK(!scratch.is(at));
1447 if (IsMipsArchVariant(kMips32r6)) {
1448 ldc1(fd, rs);
1449 } else {
1450 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1451 IsMipsArchVariant(kLoongson));
1452 Ulw(scratch, MemOperand(rs.rm(), rs.offset() + Register::kMantissaOffset));
1453 mtc1(scratch, fd);
1454 Ulw(scratch, MemOperand(rs.rm(), rs.offset() + Register::kExponentOffset));
1455 Mthc1(scratch, fd);
1456 }
1457}
1458
1459void MacroAssembler::Usdc1(FPURegister fd, const MemOperand& rs,
1460 Register scratch) {
1461 DCHECK(!scratch.is(at));
1462 if (IsMipsArchVariant(kMips32r6)) {
1463 sdc1(fd, rs);
1464 } else {
1465 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
1466 IsMipsArchVariant(kLoongson));
1467 mfc1(scratch, fd);
1468 Usw(scratch, MemOperand(rs.rm(), rs.offset() + Register::kMantissaOffset));
1469 Mfhc1(scratch, fd);
1470 Usw(scratch, MemOperand(rs.rm(), rs.offset() + Register::kExponentOffset));
1471 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001472}
1473
1474
1475void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) {
1476 AllowDeferredHandleDereference smi_check;
1477 if (value->IsSmi()) {
1478 li(dst, Operand(value), mode);
1479 } else {
1480 DCHECK(value->IsHeapObject());
1481 if (isolate()->heap()->InNewSpace(*value)) {
1482 Handle<Cell> cell = isolate()->factory()->NewCell(value);
1483 li(dst, Operand(cell));
1484 lw(dst, FieldMemOperand(dst, Cell::kValueOffset));
1485 } else {
1486 li(dst, Operand(value));
1487 }
1488 }
1489}
1490
Steve Block44f0eee2011-05-26 01:26:41 +01001491
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001492void MacroAssembler::li(Register rd, Operand j, LiFlags mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001493 DCHECK(!j.is_reg());
Steve Block44f0eee2011-05-26 01:26:41 +01001494 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001495 if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) {
Andrei Popescu31002712010-02-23 13:46:05 +00001496 // Normal load of an immediate value which does not need Relocation Info.
1497 if (is_int16(j.imm32_)) {
1498 addiu(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001499 } else if (!(j.imm32_ & kHiMask)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001500 ori(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001501 } else if (!(j.imm32_ & kImm16Mask)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001502 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +00001503 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001504 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Steve Block44f0eee2011-05-26 01:26:41 +01001505 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001506 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001507 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001508 if (MustUseReg(j.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001509 RecordRelocInfo(j.rmode_, j.imm32_);
1510 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001511 // We always need the same number of instructions as we may need to patch
Andrei Popescu31002712010-02-23 13:46:05 +00001512 // this code to load another value which may need 2 instructions to load.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001513 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Ben Murdoch257744e2011-11-30 15:57:28 +00001514 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001515 }
1516}
1517
1518
Andrei Popescu31002712010-02-23 13:46:05 +00001519void MacroAssembler::MultiPush(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001520 int16_t num_to_push = NumberOfBitsSet(regs);
1521 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001522
Ben Murdoch589d6972011-11-30 16:04:58 +00001523 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001524 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001525 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001526 stack_offset -= kPointerSize;
1527 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001528 }
1529 }
1530}
1531
1532
1533void MacroAssembler::MultiPushReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001534 int16_t num_to_push = NumberOfBitsSet(regs);
1535 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001536
Ben Murdoch589d6972011-11-30 16:04:58 +00001537 Subu(sp, sp, Operand(stack_offset));
Steve Block6ded16b2010-05-10 14:33:55 +01001538 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001539 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001540 stack_offset -= kPointerSize;
1541 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001542 }
1543 }
1544}
1545
1546
1547void MacroAssembler::MultiPop(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001548 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001549
Steve Block6ded16b2010-05-10 14:33:55 +01001550 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001551 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001552 lw(ToRegister(i), MemOperand(sp, stack_offset));
1553 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001554 }
1555 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001556 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001557}
1558
1559
1560void MacroAssembler::MultiPopReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001561 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001562
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001563 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001564 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001565 lw(ToRegister(i), MemOperand(sp, stack_offset));
1566 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001567 }
1568 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001569 addiu(sp, sp, stack_offset);
1570}
1571
1572
1573void MacroAssembler::MultiPushFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001574 int16_t num_to_push = NumberOfBitsSet(regs);
1575 int16_t stack_offset = num_to_push * kDoubleSize;
1576
1577 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001578 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001579 if ((regs & (1 << i)) != 0) {
1580 stack_offset -= kDoubleSize;
1581 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1582 }
1583 }
1584}
1585
1586
1587void MacroAssembler::MultiPushReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001588 int16_t num_to_push = NumberOfBitsSet(regs);
1589 int16_t stack_offset = num_to_push * kDoubleSize;
1590
1591 Subu(sp, sp, Operand(stack_offset));
1592 for (int16_t i = 0; i < kNumRegisters; i++) {
1593 if ((regs & (1 << i)) != 0) {
1594 stack_offset -= kDoubleSize;
1595 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1596 }
1597 }
1598}
1599
1600
1601void MacroAssembler::MultiPopFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001602 int16_t stack_offset = 0;
1603
1604 for (int16_t i = 0; i < kNumRegisters; i++) {
1605 if ((regs & (1 << i)) != 0) {
1606 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1607 stack_offset += kDoubleSize;
1608 }
1609 }
1610 addiu(sp, sp, stack_offset);
1611}
1612
1613
1614void MacroAssembler::MultiPopReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001615 int16_t stack_offset = 0;
1616
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001617 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001618 if ((regs & (1 << i)) != 0) {
1619 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1620 stack_offset += kDoubleSize;
1621 }
1622 }
1623 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001624}
1625
Ben Murdochc5610432016-08-08 18:44:38 +01001626void MacroAssembler::AddPair(Register dst_low, Register dst_high,
1627 Register left_low, Register left_high,
1628 Register right_low, Register right_high) {
1629 Label no_overflow;
1630 Register kScratchReg = s3;
1631 Register kScratchReg2 = s4;
1632 // Add lower word
1633 Addu(dst_low, left_low, right_low);
1634 Addu(dst_high, left_high, right_high);
1635 // Check for lower word unsigned overflow
1636 Sltu(kScratchReg, dst_low, left_low);
1637 Sltu(kScratchReg2, dst_low, right_low);
1638 Or(kScratchReg, kScratchReg2, kScratchReg);
1639 Branch(&no_overflow, eq, kScratchReg, Operand(zero_reg));
1640 // Increment higher word if there was overflow
1641 Addu(dst_high, dst_high, 0x1);
1642 bind(&no_overflow);
1643}
1644
1645void MacroAssembler::SubPair(Register dst_low, Register dst_high,
1646 Register left_low, Register left_high,
1647 Register right_low, Register right_high) {
1648 Label no_overflow;
1649 Register kScratchReg = s3;
1650 // Subtract lower word
1651 Subu(dst_low, left_low, right_low);
1652 Subu(dst_high, left_high, right_high);
1653 // Check for lower word unsigned underflow
1654 Sltu(kScratchReg, left_low, right_low);
1655 Branch(&no_overflow, eq, kScratchReg, Operand(zero_reg));
1656 // Decrement higher word if there was underflow
1657 Subu(dst_high, dst_high, 0x1);
1658 bind(&no_overflow);
1659}
1660
1661void MacroAssembler::ShlPair(Register dst_low, Register dst_high,
1662 Register src_low, Register src_high,
1663 Register shift) {
1664 Label less_than_32;
1665 Label zero_shift;
1666 Label word_shift;
1667 Label done;
1668 Register kScratchReg = s3;
1669 And(shift, shift, 0x3F);
1670 li(kScratchReg, 0x20);
1671 Branch(&less_than_32, lt, shift, Operand(kScratchReg));
1672
1673 Branch(&word_shift, eq, shift, Operand(kScratchReg));
1674 // Shift more than 32
1675 Subu(kScratchReg, shift, kScratchReg);
1676 mov(dst_low, zero_reg);
1677 sllv(dst_high, src_low, kScratchReg);
1678 Branch(&done);
1679 // Word shift
1680 bind(&word_shift);
1681 mov(dst_low, zero_reg);
1682 mov(dst_high, src_low);
1683 Branch(&done);
1684
1685 bind(&less_than_32);
1686 // Check if zero shift
1687 Branch(&zero_shift, eq, shift, Operand(zero_reg));
1688 // Shift less than 32
1689 Subu(kScratchReg, kScratchReg, shift);
1690 sllv(dst_high, src_high, shift);
1691 sllv(dst_low, src_low, shift);
1692 srlv(kScratchReg, src_low, kScratchReg);
1693 Or(dst_high, dst_high, kScratchReg);
1694 Branch(&done);
1695 // Zero shift
1696 bind(&zero_shift);
1697 mov(dst_low, src_low);
1698 mov(dst_high, src_high);
1699 bind(&done);
1700}
1701
1702void MacroAssembler::ShlPair(Register dst_low, Register dst_high,
1703 Register src_low, Register src_high,
1704 uint32_t shift) {
1705 Register kScratchReg = s3;
1706 shift = shift & 0x3F;
1707 if (shift < 32) {
1708 if (shift == 0) {
1709 mov(dst_low, src_low);
1710 mov(dst_high, src_high);
1711 } else {
1712 sll(dst_high, src_high, shift);
1713 sll(dst_low, src_low, shift);
1714 shift = 32 - shift;
1715 srl(kScratchReg, src_low, shift);
1716 Or(dst_high, dst_high, kScratchReg);
1717 }
1718 } else {
1719 if (shift == 32) {
1720 mov(dst_low, zero_reg);
1721 mov(dst_high, src_low);
1722 } else {
1723 shift = shift - 32;
1724 mov(dst_low, zero_reg);
1725 sll(dst_high, src_low, shift);
1726 }
1727 }
1728}
1729
1730void MacroAssembler::ShrPair(Register dst_low, Register dst_high,
1731 Register src_low, Register src_high,
1732 Register shift) {
1733 Label less_than_32;
1734 Label zero_shift;
1735 Label word_shift;
1736 Label done;
1737 Register kScratchReg = s3;
1738 And(shift, shift, 0x3F);
1739 li(kScratchReg, 0x20);
1740 Branch(&less_than_32, lt, shift, Operand(kScratchReg));
1741
1742 Branch(&word_shift, eq, shift, Operand(kScratchReg));
1743 // Shift more than 32
1744 Subu(kScratchReg, shift, kScratchReg);
1745 mov(dst_high, zero_reg);
1746 srlv(dst_low, src_high, kScratchReg);
1747 Branch(&done);
1748 // Word shift
1749 bind(&word_shift);
1750 mov(dst_high, zero_reg);
1751 mov(dst_low, src_high);
1752 Branch(&done);
1753
1754 bind(&less_than_32);
1755 // Check if zero shift
1756 Branch(&zero_shift, eq, shift, Operand(zero_reg));
1757 // Shift less than 32
1758 Subu(kScratchReg, kScratchReg, shift);
1759 srlv(dst_high, src_high, shift);
1760 srlv(dst_low, src_low, shift);
1761 sllv(kScratchReg, src_high, kScratchReg);
1762 Or(dst_low, dst_low, kScratchReg);
1763 Branch(&done);
1764 // Zero shift
1765 bind(&zero_shift);
1766 mov(dst_low, src_low);
1767 mov(dst_high, src_high);
1768 bind(&done);
1769}
1770
1771void MacroAssembler::ShrPair(Register dst_low, Register dst_high,
1772 Register src_low, Register src_high,
1773 uint32_t shift) {
1774 Register kScratchReg = s3;
1775 shift = shift & 0x3F;
1776 if (shift < 32) {
1777 if (shift == 0) {
1778 mov(dst_low, src_low);
1779 mov(dst_high, src_high);
1780 } else {
1781 srl(dst_high, src_high, shift);
1782 srl(dst_low, src_low, shift);
1783 shift = 32 - shift;
1784 sll(kScratchReg, src_high, shift);
1785 Or(dst_low, dst_low, kScratchReg);
1786 }
1787 } else {
1788 if (shift == 32) {
1789 mov(dst_high, zero_reg);
1790 mov(dst_low, src_high);
1791 } else {
1792 shift = shift - 32;
1793 mov(dst_high, zero_reg);
1794 srl(dst_low, src_high, shift);
1795 }
1796 }
1797}
1798
1799void MacroAssembler::SarPair(Register dst_low, Register dst_high,
1800 Register src_low, Register src_high,
1801 Register shift) {
1802 Label less_than_32;
1803 Label zero_shift;
1804 Label word_shift;
1805 Label done;
1806 Register kScratchReg = s3;
1807 Register kScratchReg2 = s4;
1808 And(shift, shift, 0x3F);
1809 li(kScratchReg, 0x20);
1810 Branch(&less_than_32, lt, shift, Operand(kScratchReg));
1811
1812 Branch(&word_shift, eq, shift, Operand(kScratchReg));
1813
1814 // Shift more than 32
1815 li(kScratchReg2, 0x1F);
1816 Subu(kScratchReg, shift, kScratchReg);
1817 srav(dst_high, src_high, kScratchReg2);
1818 srav(dst_low, src_high, kScratchReg);
1819 Branch(&done);
1820 // Word shift
1821 bind(&word_shift);
1822 li(kScratchReg2, 0x1F);
1823 srav(dst_high, src_high, kScratchReg2);
1824 mov(dst_low, src_high);
1825 Branch(&done);
1826
1827 bind(&less_than_32);
1828 // Check if zero shift
1829 Branch(&zero_shift, eq, shift, Operand(zero_reg));
1830
1831 // Shift less than 32
1832 Subu(kScratchReg, kScratchReg, shift);
1833 srav(dst_high, src_high, shift);
1834 srlv(dst_low, src_low, shift);
1835 sllv(kScratchReg, src_high, kScratchReg);
1836 Or(dst_low, dst_low, kScratchReg);
1837 Branch(&done);
1838 // Zero shift
1839 bind(&zero_shift);
1840 mov(dst_low, src_low);
1841 mov(dst_high, src_high);
1842 bind(&done);
1843}
1844
1845void MacroAssembler::SarPair(Register dst_low, Register dst_high,
1846 Register src_low, Register src_high,
1847 uint32_t shift) {
1848 Register kScratchReg = s3;
1849 shift = shift & 0x3F;
1850 if (shift < 32) {
1851 if (shift == 0) {
1852 mov(dst_low, src_low);
1853 mov(dst_high, src_high);
1854 } else {
1855 sra(dst_high, src_high, shift);
1856 srl(dst_low, src_low, shift);
1857 shift = 32 - shift;
1858 sll(kScratchReg, src_high, shift);
1859 Or(dst_low, dst_low, kScratchReg);
1860 }
1861 } else {
1862 if (shift == 32) {
1863 sra(dst_high, src_high, 31);
1864 mov(dst_low, src_high);
1865 } else {
1866 shift = shift - 32;
1867 sra(dst_high, src_high, 31);
1868 sra(dst_low, src_high, shift);
1869 }
1870 }
1871}
Andrei Popescu31002712010-02-23 13:46:05 +00001872
Steve Block44f0eee2011-05-26 01:26:41 +01001873void MacroAssembler::Ext(Register rt,
1874 Register rs,
1875 uint16_t pos,
1876 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001877 DCHECK(pos < 32);
1878 DCHECK(pos + size < 33);
Andrei Popescu31002712010-02-23 13:46:05 +00001879
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001880 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001881 ext_(rt, rs, pos, size);
1882 } else {
1883 // Move rs to rt and shift it left then right to get the
1884 // desired bitfield on the right side and zeroes on the left.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001885 int shift_left = 32 - (pos + size);
1886 sll(rt, rs, shift_left); // Acts as a move if shift_left == 0.
1887
1888 int shift_right = 32 - size;
1889 if (shift_right > 0) {
1890 srl(rt, rt, shift_right);
1891 }
Steve Block44f0eee2011-05-26 01:26:41 +01001892 }
1893}
1894
1895
1896void MacroAssembler::Ins(Register rt,
1897 Register rs,
1898 uint16_t pos,
1899 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001900 DCHECK(pos < 32);
1901 DCHECK(pos + size <= 32);
1902 DCHECK(size != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01001903
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001904 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001905 ins_(rt, rs, pos, size);
1906 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001907 DCHECK(!rt.is(t8) && !rs.is(t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001908 Subu(at, zero_reg, Operand(1));
1909 srl(at, at, 32 - size);
1910 and_(t8, rs, at);
1911 sll(t8, t8, pos);
1912 sll(at, at, pos);
1913 nor(at, at, zero_reg);
1914 and_(at, rt, at);
1915 or_(rt, t8, at);
Steve Block44f0eee2011-05-26 01:26:41 +01001916 }
1917}
1918
1919
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001920void MacroAssembler::Cvt_d_uw(FPURegister fd, Register rs,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001921 FPURegister scratch) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001922 // In FP64Mode we do convertion from long.
1923 if (IsFp64Mode()) {
1924 mtc1(rs, scratch);
1925 Mthc1(zero_reg, scratch);
1926 cvt_d_l(fd, scratch);
1927 } else {
1928 // Convert rs to a FP value in fd.
1929 DCHECK(!fd.is(scratch));
1930 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01001931
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001932 Label msb_clear, conversion_done;
1933 // For a value which is < 2^31, regard it as a signed positve word.
1934 Branch(&msb_clear, ge, rs, Operand(zero_reg), USE_DELAY_SLOT);
1935 mtc1(rs, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001936
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001937 li(at, 0x41F00000); // FP value: 2^32.
Steve Block44f0eee2011-05-26 01:26:41 +01001938
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001939 // For unsigned inputs > 2^31, we convert to double as a signed int32,
1940 // then add 2^32 to move it back to unsigned value in range 2^31..2^31-1.
1941 mtc1(zero_reg, scratch);
1942 Mthc1(at, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001943
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001944 cvt_d_w(fd, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001945
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001946 Branch(USE_DELAY_SLOT, &conversion_done);
1947 add_d(fd, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001948
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001949 bind(&msb_clear);
1950 cvt_d_w(fd, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001951
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001952 bind(&conversion_done);
1953 }
Steve Block44f0eee2011-05-26 01:26:41 +01001954}
1955
1956
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001957void MacroAssembler::Trunc_uw_d(FPURegister fd,
1958 FPURegister fs,
1959 FPURegister scratch) {
1960 Trunc_uw_d(fs, t8, scratch);
1961 mtc1(t8, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001962}
1963
Ben Murdoch097c5b22016-05-18 11:27:45 +01001964void MacroAssembler::Trunc_uw_s(FPURegister fd, FPURegister fs,
1965 FPURegister scratch) {
1966 Trunc_uw_s(fs, t8, scratch);
1967 mtc1(t8, fd);
1968}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001970void MacroAssembler::Trunc_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001971 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1972 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001973 trunc_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001974 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001975 } else {
1976 trunc_w_d(fd, fs);
1977 }
1978}
1979
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001980
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001981void MacroAssembler::Round_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001982 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1983 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001984 round_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001985 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001986 } else {
1987 round_w_d(fd, fs);
1988 }
1989}
1990
1991
1992void MacroAssembler::Floor_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1994 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001995 floor_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001996 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001997 } else {
1998 floor_w_d(fd, fs);
1999 }
2000}
2001
2002
2003void MacroAssembler::Ceil_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002004 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
2005 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002006 ceil_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002007 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002008 } else {
2009 ceil_w_d(fd, fs);
2010 }
2011}
2012
Steve Block44f0eee2011-05-26 01:26:41 +01002013
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002014void MacroAssembler::Trunc_uw_d(FPURegister fd,
2015 Register rs,
2016 FPURegister scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002017 DCHECK(!fd.is(scratch));
2018 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01002019
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002020 // Load 2^31 into scratch as its float representation.
2021 li(at, 0x41E00000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002022 mtc1(zero_reg, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002023 Mthc1(at, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002024 // Test if scratch > fd.
Ben Murdoch85b71792012-04-11 18:30:58 +01002025 // If fd < 2^31 we can convert it normally.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002026 Label simple_convert;
2027 BranchF(&simple_convert, NULL, lt, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01002028
2029 // First we subtract 2^31 from fd, then trunc it to rs
2030 // and add 2^31 to rs.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002031 sub_d(scratch, fd, scratch);
2032 trunc_w_d(scratch, scratch);
2033 mfc1(rs, scratch);
2034 Or(rs, rs, 1 << 31);
Steve Block44f0eee2011-05-26 01:26:41 +01002035
2036 Label done;
2037 Branch(&done);
2038 // Simple conversion.
2039 bind(&simple_convert);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002040 trunc_w_d(scratch, fd);
2041 mfc1(rs, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01002042
2043 bind(&done);
2044}
2045
Ben Murdoch097c5b22016-05-18 11:27:45 +01002046void MacroAssembler::Trunc_uw_s(FPURegister fd, Register rs,
2047 FPURegister scratch) {
2048 DCHECK(!fd.is(scratch));
2049 DCHECK(!rs.is(at));
2050
2051 // Load 2^31 into scratch as its float representation.
2052 li(at, 0x4F000000);
2053 mtc1(at, scratch);
2054 // Test if scratch > fd.
2055 // If fd < 2^31 we can convert it normally.
2056 Label simple_convert;
2057 BranchF32(&simple_convert, NULL, lt, fd, scratch);
2058
2059 // First we subtract 2^31 from fd, then trunc it to rs
2060 // and add 2^31 to rs.
2061 sub_s(scratch, fd, scratch);
2062 trunc_w_s(scratch, scratch);
2063 mfc1(rs, scratch);
2064 Or(rs, rs, 1 << 31);
2065
2066 Label done;
2067 Branch(&done);
2068 // Simple conversion.
2069 bind(&simple_convert);
2070 trunc_w_s(scratch, fd);
2071 mfc1(rs, scratch);
2072
2073 bind(&done);
2074}
Steve Block44f0eee2011-05-26 01:26:41 +01002075
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002076void MacroAssembler::Mthc1(Register rt, FPURegister fs) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002077 if (IsFp32Mode()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002078 mtc1(rt, fs.high());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002079 } else {
2080 DCHECK(IsFp64Mode() || IsFpxxMode());
2081 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
2082 mthc1(rt, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002083 }
2084}
2085
2086
2087void MacroAssembler::Mfhc1(Register rt, FPURegister fs) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002088 if (IsFp32Mode()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002089 mfc1(rt, fs.high());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002090 } else {
2091 DCHECK(IsFp64Mode() || IsFpxxMode());
2092 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
2093 mfhc1(rt, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002094 }
2095}
2096
2097
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002098void MacroAssembler::BranchFCommon(SecondaryField sizeField, Label* target,
2099 Label* nan, Condition cond, FPURegister cmp1,
2100 FPURegister cmp2, BranchDelaySlot bd) {
2101 {
2102 BlockTrampolinePoolScope block_trampoline_pool(this);
2103 if (cond == al) {
2104 Branch(bd, target);
2105 return;
2106 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002107
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002108 if (IsMipsArchVariant(kMips32r6)) {
2109 sizeField = sizeField == D ? L : W;
2110 }
2111 DCHECK(nan || target);
2112 // Check for unordered (NaN) cases.
2113 if (nan) {
2114 bool long_branch =
2115 nan->is_bound() ? is_near(nan) : is_trampoline_emitted();
2116 if (!IsMipsArchVariant(kMips32r6)) {
2117 if (long_branch) {
2118 Label skip;
2119 c(UN, sizeField, cmp1, cmp2);
2120 bc1f(&skip);
2121 nop();
2122 BranchLong(nan, bd);
2123 bind(&skip);
2124 } else {
2125 c(UN, sizeField, cmp1, cmp2);
2126 bc1t(nan);
2127 if (bd == PROTECT) {
2128 nop();
2129 }
2130 }
2131 } else {
2132 // Use kDoubleCompareReg for comparison result. It has to be unavailable
2133 // to lithium register allocator.
2134 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
2135 if (long_branch) {
2136 Label skip;
2137 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
2138 bc1eqz(&skip, kDoubleCompareReg);
2139 nop();
2140 BranchLong(nan, bd);
2141 bind(&skip);
2142 } else {
2143 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
2144 bc1nez(nan, kDoubleCompareReg);
2145 if (bd == PROTECT) {
2146 nop();
2147 }
2148 }
2149 }
2150 }
2151
2152 if (target) {
2153 bool long_branch =
2154 target->is_bound() ? is_near(target) : is_trampoline_emitted();
2155 if (long_branch) {
2156 Label skip;
2157 Condition neg_cond = NegateFpuCondition(cond);
2158 BranchShortF(sizeField, &skip, neg_cond, cmp1, cmp2, bd);
2159 BranchLong(target, bd);
2160 bind(&skip);
2161 } else {
2162 BranchShortF(sizeField, target, cond, cmp1, cmp2, bd);
2163 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002165 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002166}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002168void MacroAssembler::BranchShortF(SecondaryField sizeField, Label* target,
2169 Condition cc, FPURegister cmp1,
2170 FPURegister cmp2, BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002171 if (!IsMipsArchVariant(kMips32r6)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002172 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002173 if (target) {
2174 // Here NaN cases were either handled by this function or are assumed to
2175 // have been handled by the caller.
2176 switch (cc) {
2177 case lt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002178 c(OLT, sizeField, cmp1, cmp2);
2179 bc1t(target);
2180 break;
2181 case ult:
2182 c(ULT, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002183 bc1t(target);
2184 break;
2185 case gt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002186 c(ULE, sizeField, cmp1, cmp2);
2187 bc1f(target);
2188 break;
2189 case ugt:
2190 c(OLE, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002191 bc1f(target);
2192 break;
2193 case ge:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002194 c(ULT, sizeField, cmp1, cmp2);
2195 bc1f(target);
2196 break;
2197 case uge:
2198 c(OLT, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002199 bc1f(target);
2200 break;
2201 case le:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002202 c(OLE, sizeField, cmp1, cmp2);
2203 bc1t(target);
2204 break;
2205 case ule:
2206 c(ULE, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002207 bc1t(target);
2208 break;
2209 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210 c(EQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002211 bc1t(target);
2212 break;
2213 case ueq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002214 c(UEQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002215 bc1t(target);
2216 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002217 case ne: // Unordered or not equal.
2218 c(EQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002219 bc1f(target);
2220 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002221 case ogl:
2222 c(UEQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002223 bc1f(target);
2224 break;
2225 default:
2226 CHECK(0);
2227 }
2228 }
2229 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002230 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002231 if (target) {
2232 // Here NaN cases were either handled by this function or are assumed to
2233 // have been handled by the caller.
2234 // Unsigned conditions are treated as their signed counterpart.
2235 // Use kDoubleCompareReg for comparison result, it is
2236 // valid in fp64 (FR = 1) mode which is implied for mips32r6.
2237 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
2238 switch (cc) {
2239 case lt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002240 cmp(OLT, sizeField, kDoubleCompareReg, cmp1, cmp2);
2241 bc1nez(target, kDoubleCompareReg);
2242 break;
2243 case ult:
2244 cmp(ULT, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002245 bc1nez(target, kDoubleCompareReg);
2246 break;
2247 case gt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002248 cmp(ULE, sizeField, kDoubleCompareReg, cmp1, cmp2);
2249 bc1eqz(target, kDoubleCompareReg);
2250 break;
2251 case ugt:
2252 cmp(OLE, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002253 bc1eqz(target, kDoubleCompareReg);
2254 break;
2255 case ge:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002256 cmp(ULT, sizeField, kDoubleCompareReg, cmp1, cmp2);
2257 bc1eqz(target, kDoubleCompareReg);
2258 break;
2259 case uge:
2260 cmp(OLT, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002261 bc1eqz(target, kDoubleCompareReg);
2262 break;
2263 case le:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002264 cmp(OLE, sizeField, kDoubleCompareReg, cmp1, cmp2);
2265 bc1nez(target, kDoubleCompareReg);
2266 break;
2267 case ule:
2268 cmp(ULE, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002269 bc1nez(target, kDoubleCompareReg);
2270 break;
2271 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002272 cmp(EQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002273 bc1nez(target, kDoubleCompareReg);
2274 break;
2275 case ueq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002276 cmp(UEQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002277 bc1nez(target, kDoubleCompareReg);
2278 break;
2279 case ne:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002280 cmp(EQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002281 bc1eqz(target, kDoubleCompareReg);
2282 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002283 case ogl:
2284 cmp(UEQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002285 bc1eqz(target, kDoubleCompareReg);
2286 break;
2287 default:
2288 CHECK(0);
2289 }
2290 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002291 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002292 if (bd == PROTECT) {
2293 nop();
2294 }
2295}
2296
2297
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002298void MacroAssembler::FmoveLow(FPURegister dst, Register src_low) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002299 if (IsFp32Mode()) {
2300 mtc1(src_low, dst);
2301 } else {
2302 DCHECK(IsFp64Mode() || IsFpxxMode());
2303 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002304 DCHECK(!src_low.is(at));
2305 mfhc1(at, dst);
2306 mtc1(src_low, dst);
2307 mthc1(at, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308 }
2309}
2310
2311
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002312void MacroAssembler::Move(FPURegister dst, float imm) {
2313 li(at, Operand(bit_cast<int32_t>(imm)));
2314 mtc1(at, dst);
2315}
2316
2317
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002318void MacroAssembler::Move(FPURegister dst, double imm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002319 static const DoubleRepresentation minus_zero(-0.0);
2320 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002321 DoubleRepresentation value_rep(imm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002322 // Handle special values first.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002323 if (value_rep == zero && has_double_zero_reg_set_) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002324 mov_d(dst, kDoubleRegZero);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002325 } else if (value_rep == minus_zero && has_double_zero_reg_set_) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002326 neg_d(dst, kDoubleRegZero);
2327 } else {
2328 uint32_t lo, hi;
2329 DoubleAsTwoUInt32(imm, &lo, &hi);
2330 // Move the low part of the double into the lower of the corresponding FPU
2331 // register of FPU register pair.
2332 if (lo != 0) {
2333 li(at, Operand(lo));
2334 mtc1(at, dst);
2335 } else {
2336 mtc1(zero_reg, dst);
2337 }
2338 // Move the high part of the double into the higher of the corresponding FPU
2339 // register of FPU register pair.
2340 if (hi != 0) {
2341 li(at, Operand(hi));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002342 Mthc1(at, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002343 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002344 Mthc1(zero_reg, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002345 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002346 if (dst.is(kDoubleRegZero)) has_double_zero_reg_set_ = true;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002347 }
2348}
2349
2350
2351void MacroAssembler::Movz(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002352 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002353 Label done;
2354 Branch(&done, ne, rt, Operand(zero_reg));
2355 mov(rd, rs);
2356 bind(&done);
2357 } else {
2358 movz(rd, rs, rt);
2359 }
2360}
2361
2362
2363void MacroAssembler::Movn(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002364 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002365 Label done;
2366 Branch(&done, eq, rt, Operand(zero_reg));
2367 mov(rd, rs);
2368 bind(&done);
2369 } else {
2370 movn(rd, rs, rt);
2371 }
2372}
2373
2374
2375void MacroAssembler::Movt(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002376 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002377 // Tests an FP condition code and then conditionally move rs to rd.
2378 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379 DCHECK(cc == 0);
2380 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002381 Label done;
2382 Register scratch = t8;
2383 // For testing purposes we need to fetch content of the FCSR register and
2384 // than test its cc (floating point condition code) bit (for cc = 0, it is
2385 // 24. bit of the FCSR).
2386 cfc1(scratch, FCSR);
2387 // For the MIPS I, II and III architectures, the contents of scratch is
2388 // UNPREDICTABLE for the instruction immediately following CFC1.
2389 nop();
2390 srl(scratch, scratch, 16);
2391 andi(scratch, scratch, 0x0080);
2392 Branch(&done, eq, scratch, Operand(zero_reg));
2393 mov(rd, rs);
2394 bind(&done);
2395 } else {
2396 movt(rd, rs, cc);
2397 }
2398}
2399
2400
2401void MacroAssembler::Movf(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002402 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002403 // Tests an FP condition code and then conditionally move rs to rd.
2404 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002405 DCHECK(cc == 0);
2406 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002407 Label done;
2408 Register scratch = t8;
2409 // For testing purposes we need to fetch content of the FCSR register and
2410 // than test its cc (floating point condition code) bit (for cc = 0, it is
2411 // 24. bit of the FCSR).
2412 cfc1(scratch, FCSR);
2413 // For the MIPS I, II and III architectures, the contents of scratch is
2414 // UNPREDICTABLE for the instruction immediately following CFC1.
2415 nop();
2416 srl(scratch, scratch, 16);
2417 andi(scratch, scratch, 0x0080);
2418 Branch(&done, ne, scratch, Operand(zero_reg));
2419 mov(rd, rs);
2420 bind(&done);
2421 } else {
2422 movf(rd, rs, cc);
2423 }
2424}
2425
Ben Murdochda12d292016-06-02 14:46:10 +01002426#define __ masm->
2427
2428static bool ZeroHelper_d(MacroAssembler* masm, MaxMinKind kind, FPURegister dst,
2429 FPURegister src1, FPURegister src2, Label* equal) {
2430 if (src1.is(src2)) {
2431 __ Move(dst, src1);
2432 return true;
2433 }
2434
2435 Label other, compare_not_equal;
2436 FPURegister left, right;
2437 if (kind == MaxMinKind::kMin) {
2438 left = src1;
2439 right = src2;
2440 } else {
2441 left = src2;
2442 right = src1;
2443 }
2444
2445 __ BranchF64(&compare_not_equal, nullptr, ne, src1, src2);
2446 // Left and right hand side are equal, check for -0 vs. +0.
2447 __ FmoveHigh(t8, src1);
2448 __ Branch(&other, eq, t8, Operand(0x80000000));
2449 __ Move_d(dst, right);
2450 __ Branch(equal);
2451 __ bind(&other);
2452 __ Move_d(dst, left);
2453 __ Branch(equal);
2454 __ bind(&compare_not_equal);
2455 return false;
2456}
2457
2458static bool ZeroHelper_s(MacroAssembler* masm, MaxMinKind kind, FPURegister dst,
2459 FPURegister src1, FPURegister src2, Label* equal) {
2460 if (src1.is(src2)) {
2461 __ Move(dst, src1);
2462 return true;
2463 }
2464
2465 Label other, compare_not_equal;
2466 FPURegister left, right;
2467 if (kind == MaxMinKind::kMin) {
2468 left = src1;
2469 right = src2;
2470 } else {
2471 left = src2;
2472 right = src1;
2473 }
2474
2475 __ BranchF32(&compare_not_equal, nullptr, ne, src1, src2);
2476 // Left and right hand side are equal, check for -0 vs. +0.
2477 __ FmoveLow(t8, src1);
2478 __ Branch(&other, eq, t8, Operand(0x80000000));
2479 __ Move_s(dst, right);
2480 __ Branch(equal);
2481 __ bind(&other);
2482 __ Move_s(dst, left);
2483 __ Branch(equal);
2484 __ bind(&compare_not_equal);
2485 return false;
2486}
2487
2488#undef __
2489
2490void MacroAssembler::MinNaNCheck_d(FPURegister dst, FPURegister src1,
2491 FPURegister src2, Label* nan) {
2492 if (nan) {
2493 BranchF64(nullptr, nan, eq, src1, src2);
2494 }
2495 if (IsMipsArchVariant(kMips32r6)) {
2496 min_d(dst, src1, src2);
2497 } else {
2498 Label skip;
2499 if (!ZeroHelper_d(this, MaxMinKind::kMin, dst, src1, src2, &skip)) {
2500 if (dst.is(src1)) {
2501 BranchF64(&skip, nullptr, le, src1, src2);
2502 Move_d(dst, src2);
2503 } else if (dst.is(src2)) {
2504 BranchF64(&skip, nullptr, ge, src1, src2);
2505 Move_d(dst, src1);
2506 } else {
2507 Label right;
2508 BranchF64(&right, nullptr, gt, src1, src2);
2509 Move_d(dst, src1);
2510 Branch(&skip);
2511 bind(&right);
2512 Move_d(dst, src2);
2513 }
2514 }
2515 bind(&skip);
2516 }
2517}
2518
2519void MacroAssembler::MaxNaNCheck_d(FPURegister dst, FPURegister src1,
2520 FPURegister src2, Label* nan) {
2521 if (nan) {
2522 BranchF64(nullptr, nan, eq, src1, src2);
2523 }
2524 if (IsMipsArchVariant(kMips32r6)) {
2525 max_d(dst, src1, src2);
2526 } else {
2527 Label skip;
2528 if (!ZeroHelper_d(this, MaxMinKind::kMax, dst, src1, src2, &skip)) {
2529 if (dst.is(src1)) {
2530 BranchF64(&skip, nullptr, ge, src1, src2);
2531 Move_d(dst, src2);
2532 } else if (dst.is(src2)) {
2533 BranchF64(&skip, nullptr, le, src1, src2);
2534 Move_d(dst, src1);
2535 } else {
2536 Label right;
2537 BranchF64(&right, nullptr, lt, src1, src2);
2538 Move_d(dst, src1);
2539 Branch(&skip);
2540 bind(&right);
2541 Move_d(dst, src2);
2542 }
2543 }
2544 bind(&skip);
2545 }
2546}
2547
2548void MacroAssembler::MinNaNCheck_s(FPURegister dst, FPURegister src1,
2549 FPURegister src2, Label* nan) {
2550 if (nan) {
2551 BranchF32(nullptr, nan, eq, src1, src2);
2552 }
2553 if (IsMipsArchVariant(kMips32r6)) {
2554 min_s(dst, src1, src2);
2555 } else {
2556 Label skip;
2557 if (!ZeroHelper_s(this, MaxMinKind::kMin, dst, src1, src2, &skip)) {
2558 if (dst.is(src1)) {
2559 BranchF32(&skip, nullptr, le, src1, src2);
2560 Move_s(dst, src2);
2561 } else if (dst.is(src2)) {
2562 BranchF32(&skip, nullptr, ge, src1, src2);
2563 Move_s(dst, src1);
2564 } else {
2565 Label right;
2566 BranchF32(&right, nullptr, gt, src1, src2);
2567 Move_s(dst, src1);
2568 Branch(&skip);
2569 bind(&right);
2570 Move_s(dst, src2);
2571 }
2572 }
2573 bind(&skip);
2574 }
2575}
2576
2577void MacroAssembler::MaxNaNCheck_s(FPURegister dst, FPURegister src1,
2578 FPURegister src2, Label* nan) {
2579 if (nan) {
2580 BranchF32(nullptr, nan, eq, src1, src2);
2581 }
2582 if (IsMipsArchVariant(kMips32r6)) {
2583 max_s(dst, src1, src2);
2584 } else {
2585 Label skip;
2586 if (!ZeroHelper_s(this, MaxMinKind::kMax, dst, src1, src2, &skip)) {
2587 if (dst.is(src1)) {
2588 BranchF32(&skip, nullptr, ge, src1, src2);
2589 Move_s(dst, src2);
2590 } else if (dst.is(src2)) {
2591 BranchF32(&skip, nullptr, le, src1, src2);
2592 Move_s(dst, src1);
2593 } else {
2594 Label right;
2595 BranchF32(&right, nullptr, lt, src1, src2);
2596 Move_s(dst, src1);
2597 Branch(&skip);
2598 bind(&right);
2599 Move_s(dst, src2);
2600 }
2601 }
2602 bind(&skip);
2603 }
2604}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002605
2606void MacroAssembler::Clz(Register rd, Register rs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002607 if (IsMipsArchVariant(kLoongson)) {
2608 DCHECK(!(rd.is(t8) || rd.is(t9)) && !(rs.is(t8) || rs.is(t9)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002609 Register mask = t8;
2610 Register scratch = t9;
2611 Label loop, end;
2612 mov(at, rs);
2613 mov(rd, zero_reg);
2614 lui(mask, 0x8000);
2615 bind(&loop);
2616 and_(scratch, at, mask);
2617 Branch(&end, ne, scratch, Operand(zero_reg));
2618 addiu(rd, rd, 1);
2619 Branch(&loop, ne, mask, Operand(zero_reg), USE_DELAY_SLOT);
2620 srl(mask, mask, 1);
2621 bind(&end);
2622 } else {
2623 clz(rd, rs);
2624 }
2625}
2626
2627
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002628void MacroAssembler::EmitFPUTruncate(FPURoundingMode rounding_mode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002629 Register result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002630 DoubleRegister double_input,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002631 Register scratch,
2632 DoubleRegister double_scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002633 Register except_flag,
2634 CheckForInexactConversion check_inexact) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002635 DCHECK(!result.is(scratch));
2636 DCHECK(!double_input.is(double_scratch));
2637 DCHECK(!except_flag.is(scratch));
2638
2639 Label done;
2640
2641 // Clear the except flag (0 = no exception)
2642 mov(except_flag, zero_reg);
2643
2644 // Test for values that can be exactly represented as a signed 32-bit integer.
2645 cvt_w_d(double_scratch, double_input);
2646 mfc1(result, double_scratch);
2647 cvt_d_w(double_scratch, double_scratch);
2648 BranchF(&done, NULL, eq, double_input, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002649
2650 int32_t except_mask = kFCSRFlagMask; // Assume interested in all exceptions.
2651
2652 if (check_inexact == kDontCheckForInexactConversion) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002653 // Ignore inexact exceptions.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002654 except_mask &= ~kFCSRInexactFlagMask;
2655 }
2656
2657 // Save FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002658 cfc1(scratch, FCSR);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002659 // Disable FPU exceptions.
2660 ctc1(zero_reg, FCSR);
2661
2662 // Do operation based on rounding mode.
2663 switch (rounding_mode) {
2664 case kRoundToNearest:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002665 Round_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002666 break;
2667 case kRoundToZero:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002668 Trunc_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002669 break;
2670 case kRoundToPlusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002671 Ceil_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002672 break;
2673 case kRoundToMinusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002674 Floor_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002675 break;
2676 } // End of switch-statement.
2677
2678 // Retrieve FCSR.
2679 cfc1(except_flag, FCSR);
2680 // Restore FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002681 ctc1(scratch, FCSR);
2682 // Move the converted value into the result register.
2683 mfc1(result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002684
2685 // Check for fpu exceptions.
2686 And(except_flag, except_flag, Operand(except_mask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002687
Ben Murdoch257744e2011-11-30 15:57:28 +00002688 bind(&done);
2689}
2690
2691
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002692void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2693 DoubleRegister double_input,
2694 Label* done) {
2695 DoubleRegister single_scratch = kLithiumScratchDouble.low();
2696 Register scratch = at;
2697 Register scratch2 = t9;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002698
2699 // Clear cumulative exception flags and save the FCSR.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002700 cfc1(scratch2, FCSR);
2701 ctc1(zero_reg, FCSR);
2702 // Try a conversion to a signed integer.
2703 trunc_w_d(single_scratch, double_input);
2704 mfc1(result, single_scratch);
2705 // Retrieve and restore the FCSR.
2706 cfc1(scratch, FCSR);
2707 ctc1(scratch2, FCSR);
2708 // Check for overflow and NaNs.
2709 And(scratch,
2710 scratch,
2711 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
2712 // If we had no exceptions we are done.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002713 Branch(done, eq, scratch, Operand(zero_reg));
2714}
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002715
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002716
2717void MacroAssembler::TruncateDoubleToI(Register result,
2718 DoubleRegister double_input) {
2719 Label done;
2720
2721 TryInlineTruncateDoubleToI(result, double_input, &done);
2722
2723 // If we fell through then inline version didn't succeed - call stub instead.
2724 push(ra);
2725 Subu(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2726 sdc1(double_input, MemOperand(sp, 0));
2727
2728 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2729 CallStub(&stub);
2730
2731 Addu(sp, sp, Operand(kDoubleSize));
2732 pop(ra);
2733
2734 bind(&done);
2735}
2736
2737
2738void MacroAssembler::TruncateHeapNumberToI(Register result, Register object) {
2739 Label done;
2740 DoubleRegister double_scratch = f12;
2741 DCHECK(!result.is(object));
2742
2743 ldc1(double_scratch,
2744 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2745 TryInlineTruncateDoubleToI(result, double_scratch, &done);
2746
2747 // If we fell through then inline version didn't succeed - call stub instead.
2748 push(ra);
2749 DoubleToIStub stub(isolate(),
2750 object,
2751 result,
2752 HeapNumber::kValueOffset - kHeapObjectTag,
2753 true,
2754 true);
2755 CallStub(&stub);
2756 pop(ra);
2757
2758 bind(&done);
2759}
2760
2761
2762void MacroAssembler::TruncateNumberToI(Register object,
2763 Register result,
2764 Register heap_number_map,
2765 Register scratch,
2766 Label* not_number) {
2767 Label done;
2768 DCHECK(!result.is(object));
2769
2770 UntagAndJumpIfSmi(result, object, &done);
2771 JumpIfNotHeapNumber(object, heap_number_map, scratch, not_number);
2772 TruncateHeapNumberToI(result, object);
2773
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002774 bind(&done);
2775}
2776
2777
Ben Murdoch257744e2011-11-30 15:57:28 +00002778void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2779 Register src,
2780 int num_least_bits) {
2781 Ext(dst, src, kSmiTagSize, num_least_bits);
2782}
2783
2784
2785void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2786 Register src,
2787 int num_least_bits) {
2788 And(dst, src, Operand((1 << num_least_bits) - 1));
2789}
2790
2791
Steve Block44f0eee2011-05-26 01:26:41 +01002792// Emulated condtional branches do not emit a nop in the branch delay slot.
2793//
2794// BRANCH_ARGS_CHECK checks that conditional jump arguments are correct.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002795#define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \
Steve Block44f0eee2011-05-26 01:26:41 +01002796 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
2797 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
2798
2799
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002800void MacroAssembler::Branch(int32_t offset, BranchDelaySlot bdslot) {
2801 DCHECK(IsMipsArchVariant(kMips32r6) ? is_int26(offset) : is_int16(offset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002802 BranchShort(offset, bdslot);
2803}
2804
2805
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002806void MacroAssembler::Branch(int32_t offset, Condition cond, Register rs,
2807 const Operand& rt, BranchDelaySlot bdslot) {
2808 bool is_near = BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2809 DCHECK(is_near);
2810 USE(is_near);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002811}
2812
2813
2814void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002815 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002816 if (is_near_branch(L)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002817 BranchShort(L, bdslot);
2818 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002819 BranchLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002820 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002821 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002822 if (is_trampoline_emitted()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002823 BranchLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002824 } else {
2825 BranchShort(L, bdslot);
2826 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002827 }
2828}
2829
2830
2831void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
2832 const Operand& rt,
2833 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002834 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002835 if (!BranchShortCheck(0, L, cond, rs, rt, bdslot)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002836 if (cond != cc_always) {
2837 Label skip;
2838 Condition neg_cond = NegateCondition(cond);
2839 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002840 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002841 bind(&skip);
2842 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002843 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002844 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002845 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002846 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002847 if (is_trampoline_emitted()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002848 if (cond != cc_always) {
2849 Label skip;
2850 Condition neg_cond = NegateCondition(cond);
2851 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002852 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002853 bind(&skip);
2854 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002855 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002856 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002857 } else {
2858 BranchShort(L, cond, rs, rt, bdslot);
2859 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002860 }
2861}
2862
2863
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002864void MacroAssembler::Branch(Label* L,
2865 Condition cond,
2866 Register rs,
2867 Heap::RootListIndex index,
2868 BranchDelaySlot bdslot) {
2869 LoadRoot(at, index);
2870 Branch(L, cond, rs, Operand(at), bdslot);
2871}
2872
2873
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002874void MacroAssembler::BranchShortHelper(int16_t offset, Label* L,
2875 BranchDelaySlot bdslot) {
2876 DCHECK(L == nullptr || offset == 0);
2877 offset = GetOffset(offset, L, OffsetSize::kOffset16);
Steve Block44f0eee2011-05-26 01:26:41 +01002878 b(offset);
2879
2880 // Emit a nop in the branch delay slot if required.
2881 if (bdslot == PROTECT)
2882 nop();
2883}
2884
2885
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002886void MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L) {
2887 DCHECK(L == nullptr || offset == 0);
2888 offset = GetOffset(offset, L, OffsetSize::kOffset26);
2889 bc(offset);
2890}
Steve Block44f0eee2011-05-26 01:26:41 +01002891
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002892
2893void MacroAssembler::BranchShort(int32_t offset, BranchDelaySlot bdslot) {
2894 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2895 DCHECK(is_int26(offset));
2896 BranchShortHelperR6(offset, nullptr);
Steve Block44f0eee2011-05-26 01:26:41 +01002897 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002898 DCHECK(is_int16(offset));
2899 BranchShortHelper(offset, nullptr, bdslot);
Andrei Popescu31002712010-02-23 13:46:05 +00002900 }
2901}
2902
2903
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002904void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002905 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2906 BranchShortHelperR6(0, L);
2907 } else {
2908 BranchShortHelper(0, L, bdslot);
2909 }
Andrei Popescu31002712010-02-23 13:46:05 +00002910}
2911
2912
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002913static inline bool IsZero(const Operand& rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01002914 if (rt.is_reg()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002915 return rt.rm().is(zero_reg);
2916 } else {
2917 return rt.immediate() == 0;
2918 }
2919}
2920
2921
2922int32_t MacroAssembler::GetOffset(int32_t offset, Label* L, OffsetSize bits) {
2923 if (L) {
2924 offset = branch_offset_helper(L, bits) >> 2;
2925 } else {
2926 DCHECK(is_intn(offset, bits));
2927 }
2928 return offset;
2929}
2930
2931
2932Register MacroAssembler::GetRtAsRegisterHelper(const Operand& rt,
2933 Register scratch) {
2934 Register r2 = no_reg;
2935 if (rt.is_reg()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002936 r2 = rt.rm_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002937 } else {
2938 r2 = scratch;
2939 li(r2, rt);
2940 }
2941
2942 return r2;
2943}
2944
2945
2946bool MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L,
2947 Condition cond, Register rs,
2948 const Operand& rt) {
2949 DCHECK(L == nullptr || offset == 0);
2950 Register scratch = rs.is(at) ? t8 : at;
2951 OffsetSize bits = OffsetSize::kOffset16;
2952
2953 // Be careful to always use shifted_branch_offset only just before the
2954 // branch instruction, as the location will be remember for patching the
2955 // target.
2956 {
2957 BlockTrampolinePoolScope block_trampoline_pool(this);
Steve Block44f0eee2011-05-26 01:26:41 +01002958 switch (cond) {
2959 case cc_always:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002960 bits = OffsetSize::kOffset26;
2961 if (!is_near(L, bits)) return false;
2962 offset = GetOffset(offset, L, bits);
2963 bc(offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002964 break;
2965 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002966 if (rs.code() == rt.rm_.reg_code) {
2967 // Pre R6 beq is used here to make the code patchable. Otherwise bc
2968 // should be used which has no condition field so is not patchable.
2969 bits = OffsetSize::kOffset16;
2970 if (!is_near(L, bits)) return false;
2971 scratch = GetRtAsRegisterHelper(rt, scratch);
2972 offset = GetOffset(offset, L, bits);
2973 beq(rs, scratch, offset);
2974 nop();
2975 } else if (IsZero(rt)) {
2976 bits = OffsetSize::kOffset21;
2977 if (!is_near(L, bits)) return false;
2978 offset = GetOffset(offset, L, bits);
2979 beqzc(rs, offset);
2980 } else {
2981 // We don't want any other register but scratch clobbered.
2982 bits = OffsetSize::kOffset16;
2983 if (!is_near(L, bits)) return false;
2984 scratch = GetRtAsRegisterHelper(rt, scratch);
2985 offset = GetOffset(offset, L, bits);
2986 beqc(rs, scratch, offset);
2987 }
Steve Block44f0eee2011-05-26 01:26:41 +01002988 break;
2989 case ne:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002990 if (rs.code() == rt.rm_.reg_code) {
2991 // Pre R6 bne is used here to make the code patchable. Otherwise we
2992 // should not generate any instruction.
2993 bits = OffsetSize::kOffset16;
2994 if (!is_near(L, bits)) return false;
2995 scratch = GetRtAsRegisterHelper(rt, scratch);
2996 offset = GetOffset(offset, L, bits);
2997 bne(rs, scratch, offset);
2998 nop();
2999 } else if (IsZero(rt)) {
3000 bits = OffsetSize::kOffset21;
3001 if (!is_near(L, bits)) return false;
3002 offset = GetOffset(offset, L, bits);
3003 bnezc(rs, offset);
3004 } else {
3005 // We don't want any other register but scratch clobbered.
3006 bits = OffsetSize::kOffset16;
3007 if (!is_near(L, bits)) return false;
3008 scratch = GetRtAsRegisterHelper(rt, scratch);
3009 offset = GetOffset(offset, L, bits);
3010 bnec(rs, scratch, offset);
3011 }
Steve Block44f0eee2011-05-26 01:26:41 +01003012 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003013
Ben Murdoch257744e2011-11-30 15:57:28 +00003014 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01003015 case greater:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003016 // rs > rt
3017 if (rs.code() == rt.rm_.reg_code) {
3018 break; // No code needs to be emitted.
3019 } else if (rs.is(zero_reg)) {
3020 bits = OffsetSize::kOffset16;
3021 if (!is_near(L, bits)) return false;
3022 scratch = GetRtAsRegisterHelper(rt, scratch);
3023 offset = GetOffset(offset, L, bits);
3024 bltzc(scratch, offset);
3025 } else if (IsZero(rt)) {
3026 bits = OffsetSize::kOffset16;
3027 if (!is_near(L, bits)) return false;
3028 offset = GetOffset(offset, L, bits);
3029 bgtzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003030 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003031 bits = OffsetSize::kOffset16;
3032 if (!is_near(L, bits)) return false;
3033 scratch = GetRtAsRegisterHelper(rt, scratch);
3034 DCHECK(!rs.is(scratch));
3035 offset = GetOffset(offset, L, bits);
3036 bltc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003037 }
3038 break;
3039 case greater_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003040 // rs >= rt
3041 if (rs.code() == rt.rm_.reg_code) {
3042 bits = OffsetSize::kOffset26;
3043 if (!is_near(L, bits)) return false;
3044 offset = GetOffset(offset, L, bits);
3045 bc(offset);
3046 } else if (rs.is(zero_reg)) {
3047 bits = OffsetSize::kOffset16;
3048 if (!is_near(L, bits)) return false;
3049 scratch = GetRtAsRegisterHelper(rt, scratch);
3050 offset = GetOffset(offset, L, bits);
3051 blezc(scratch, offset);
3052 } else if (IsZero(rt)) {
3053 bits = OffsetSize::kOffset16;
3054 if (!is_near(L, bits)) return false;
3055 offset = GetOffset(offset, L, bits);
3056 bgezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003057 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003058 bits = OffsetSize::kOffset16;
3059 if (!is_near(L, bits)) return false;
3060 scratch = GetRtAsRegisterHelper(rt, scratch);
3061 DCHECK(!rs.is(scratch));
3062 offset = GetOffset(offset, L, bits);
3063 bgec(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003064 }
3065 break;
3066 case less:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003067 // rs < rt
3068 if (rs.code() == rt.rm_.reg_code) {
3069 break; // No code needs to be emitted.
3070 } else if (rs.is(zero_reg)) {
3071 bits = OffsetSize::kOffset16;
3072 if (!is_near(L, bits)) return false;
3073 scratch = GetRtAsRegisterHelper(rt, scratch);
3074 offset = GetOffset(offset, L, bits);
3075 bgtzc(scratch, offset);
3076 } else if (IsZero(rt)) {
3077 bits = OffsetSize::kOffset16;
3078 if (!is_near(L, bits)) return false;
3079 offset = GetOffset(offset, L, bits);
3080 bltzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003081 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003082 bits = OffsetSize::kOffset16;
3083 if (!is_near(L, bits)) return false;
3084 scratch = GetRtAsRegisterHelper(rt, scratch);
3085 DCHECK(!rs.is(scratch));
3086 offset = GetOffset(offset, L, bits);
3087 bltc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003088 }
3089 break;
3090 case less_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003091 // rs <= rt
3092 if (rs.code() == rt.rm_.reg_code) {
3093 bits = OffsetSize::kOffset26;
3094 if (!is_near(L, bits)) return false;
3095 offset = GetOffset(offset, L, bits);
3096 bc(offset);
3097 } else if (rs.is(zero_reg)) {
3098 bits = OffsetSize::kOffset16;
3099 if (!is_near(L, bits)) return false;
3100 scratch = GetRtAsRegisterHelper(rt, scratch);
3101 offset = GetOffset(offset, L, bits);
3102 bgezc(scratch, offset);
3103 } else if (IsZero(rt)) {
3104 bits = OffsetSize::kOffset16;
3105 if (!is_near(L, bits)) return false;
3106 offset = GetOffset(offset, L, bits);
3107 blezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003108 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003109 bits = OffsetSize::kOffset16;
3110 if (!is_near(L, bits)) return false;
3111 scratch = GetRtAsRegisterHelper(rt, scratch);
3112 DCHECK(!rs.is(scratch));
3113 offset = GetOffset(offset, L, bits);
3114 bgec(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003115 }
3116 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003117
Steve Block44f0eee2011-05-26 01:26:41 +01003118 // Unsigned comparison.
3119 case Ugreater:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003120 // rs > rt
3121 if (rs.code() == rt.rm_.reg_code) {
3122 break; // No code needs to be emitted.
3123 } else if (rs.is(zero_reg)) {
3124 bits = OffsetSize::kOffset21;
3125 if (!is_near(L, bits)) return false;
3126 scratch = GetRtAsRegisterHelper(rt, scratch);
3127 offset = GetOffset(offset, L, bits);
3128 bnezc(scratch, offset);
3129 } else if (IsZero(rt)) {
3130 bits = OffsetSize::kOffset21;
3131 if (!is_near(L, bits)) return false;
3132 offset = GetOffset(offset, L, bits);
3133 bnezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003134 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003135 bits = OffsetSize::kOffset16;
3136 if (!is_near(L, bits)) return false;
3137 scratch = GetRtAsRegisterHelper(rt, scratch);
3138 DCHECK(!rs.is(scratch));
3139 offset = GetOffset(offset, L, bits);
3140 bltuc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003141 }
3142 break;
3143 case Ugreater_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003144 // rs >= rt
3145 if (rs.code() == rt.rm_.reg_code) {
3146 bits = OffsetSize::kOffset26;
3147 if (!is_near(L, bits)) return false;
3148 offset = GetOffset(offset, L, bits);
3149 bc(offset);
3150 } else if (rs.is(zero_reg)) {
3151 bits = OffsetSize::kOffset21;
3152 if (!is_near(L, bits)) return false;
3153 scratch = GetRtAsRegisterHelper(rt, scratch);
3154 offset = GetOffset(offset, L, bits);
3155 beqzc(scratch, offset);
3156 } else if (IsZero(rt)) {
3157 bits = OffsetSize::kOffset26;
3158 if (!is_near(L, bits)) return false;
3159 offset = GetOffset(offset, L, bits);
3160 bc(offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003161 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003162 bits = OffsetSize::kOffset16;
3163 if (!is_near(L, bits)) return false;
3164 scratch = GetRtAsRegisterHelper(rt, scratch);
3165 DCHECK(!rs.is(scratch));
3166 offset = GetOffset(offset, L, bits);
3167 bgeuc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003168 }
3169 break;
3170 case Uless:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003171 // rs < rt
3172 if (rs.code() == rt.rm_.reg_code) {
3173 break; // No code needs to be emitted.
3174 } else if (rs.is(zero_reg)) {
3175 bits = OffsetSize::kOffset21;
3176 if (!is_near(L, bits)) return false;
3177 scratch = GetRtAsRegisterHelper(rt, scratch);
3178 offset = GetOffset(offset, L, bits);
3179 bnezc(scratch, offset);
3180 } else if (IsZero(rt)) {
3181 break; // No code needs to be emitted.
Steve Block44f0eee2011-05-26 01:26:41 +01003182 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003183 bits = OffsetSize::kOffset16;
3184 if (!is_near(L, bits)) return false;
3185 scratch = GetRtAsRegisterHelper(rt, scratch);
3186 DCHECK(!rs.is(scratch));
3187 offset = GetOffset(offset, L, bits);
3188 bltuc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003189 }
3190 break;
3191 case Uless_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003192 // rs <= rt
3193 if (rs.code() == rt.rm_.reg_code) {
3194 bits = OffsetSize::kOffset26;
3195 if (!is_near(L, bits)) return false;
3196 offset = GetOffset(offset, L, bits);
3197 bc(offset);
3198 } else if (rs.is(zero_reg)) {
3199 bits = OffsetSize::kOffset26;
3200 if (!is_near(L, bits)) return false;
3201 scratch = GetRtAsRegisterHelper(rt, scratch);
3202 offset = GetOffset(offset, L, bits);
3203 bc(offset);
3204 } else if (IsZero(rt)) {
3205 bits = OffsetSize::kOffset21;
3206 if (!is_near(L, bits)) return false;
3207 offset = GetOffset(offset, L, bits);
3208 beqzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003209 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003210 bits = OffsetSize::kOffset16;
3211 if (!is_near(L, bits)) return false;
3212 scratch = GetRtAsRegisterHelper(rt, scratch);
3213 DCHECK(!rs.is(scratch));
3214 offset = GetOffset(offset, L, bits);
3215 bgeuc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01003216 }
3217 break;
3218 default:
3219 UNREACHABLE();
3220 }
3221 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003222 CheckTrampolinePoolQuick(1);
3223 return true;
Steve Block44f0eee2011-05-26 01:26:41 +01003224}
3225
3226
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003227bool MacroAssembler::BranchShortHelper(int16_t offset, Label* L, Condition cond,
3228 Register rs, const Operand& rt,
3229 BranchDelaySlot bdslot) {
3230 DCHECK(L == nullptr || offset == 0);
3231 if (!is_near(L, OffsetSize::kOffset16)) return false;
3232
3233 Register scratch = at;
3234 int32_t offset32;
3235
3236 // Be careful to always use shifted_branch_offset only just before the
3237 // branch instruction, as the location will be remember for patching the
3238 // target.
3239 {
3240 BlockTrampolinePoolScope block_trampoline_pool(this);
3241 switch (cond) {
3242 case cc_always:
3243 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3244 b(offset32);
3245 break;
3246 case eq:
3247 if (IsZero(rt)) {
3248 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3249 beq(rs, zero_reg, offset32);
3250 } else {
3251 // We don't want any other register but scratch clobbered.
3252 scratch = GetRtAsRegisterHelper(rt, scratch);
3253 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3254 beq(rs, scratch, offset32);
3255 }
3256 break;
3257 case ne:
3258 if (IsZero(rt)) {
3259 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3260 bne(rs, zero_reg, offset32);
3261 } else {
3262 // We don't want any other register but scratch clobbered.
3263 scratch = GetRtAsRegisterHelper(rt, scratch);
3264 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3265 bne(rs, scratch, offset32);
3266 }
3267 break;
3268
3269 // Signed comparison.
3270 case greater:
3271 if (IsZero(rt)) {
3272 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3273 bgtz(rs, offset32);
3274 } else {
3275 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3276 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3277 bne(scratch, zero_reg, offset32);
3278 }
3279 break;
3280 case greater_equal:
3281 if (IsZero(rt)) {
3282 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3283 bgez(rs, offset32);
3284 } else {
3285 Slt(scratch, rs, rt);
3286 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3287 beq(scratch, zero_reg, offset32);
3288 }
3289 break;
3290 case less:
3291 if (IsZero(rt)) {
3292 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3293 bltz(rs, offset32);
3294 } else {
3295 Slt(scratch, rs, rt);
3296 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3297 bne(scratch, zero_reg, offset32);
3298 }
3299 break;
3300 case less_equal:
3301 if (IsZero(rt)) {
3302 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3303 blez(rs, offset32);
3304 } else {
3305 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3306 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3307 beq(scratch, zero_reg, offset32);
3308 }
3309 break;
3310
3311 // Unsigned comparison.
3312 case Ugreater:
3313 if (IsZero(rt)) {
3314 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3315 bne(rs, zero_reg, offset32);
3316 } else {
3317 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3318 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3319 bne(scratch, zero_reg, offset32);
3320 }
3321 break;
3322 case Ugreater_equal:
3323 if (IsZero(rt)) {
3324 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3325 b(offset32);
3326 } else {
3327 Sltu(scratch, rs, rt);
3328 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3329 beq(scratch, zero_reg, offset32);
3330 }
3331 break;
3332 case Uless:
3333 if (IsZero(rt)) {
3334 return true; // No code needs to be emitted.
3335 } else {
3336 Sltu(scratch, rs, rt);
3337 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3338 bne(scratch, zero_reg, offset32);
3339 }
3340 break;
3341 case Uless_equal:
3342 if (IsZero(rt)) {
3343 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3344 beq(rs, zero_reg, offset32);
3345 } else {
3346 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3347 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
3348 beq(scratch, zero_reg, offset32);
3349 }
3350 break;
3351 default:
3352 UNREACHABLE();
3353 }
3354 }
3355 // Emit a nop in the branch delay slot if required.
3356 if (bdslot == PROTECT)
3357 nop();
3358
3359 return true;
3360}
3361
3362
3363bool MacroAssembler::BranchShortCheck(int32_t offset, Label* L, Condition cond,
3364 Register rs, const Operand& rt,
3365 BranchDelaySlot bdslot) {
3366 BRANCH_ARGS_CHECK(cond, rs, rt);
3367
3368 if (!L) {
3369 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3370 DCHECK(is_int26(offset));
3371 return BranchShortHelperR6(offset, nullptr, cond, rs, rt);
3372 } else {
3373 DCHECK(is_int16(offset));
3374 return BranchShortHelper(offset, nullptr, cond, rs, rt, bdslot);
3375 }
3376 } else {
3377 DCHECK(offset == 0);
3378 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3379 return BranchShortHelperR6(0, L, cond, rs, rt);
3380 } else {
3381 return BranchShortHelper(0, L, cond, rs, rt, bdslot);
3382 }
3383 }
3384 return false;
3385}
3386
3387
3388void MacroAssembler::BranchShort(int32_t offset, Condition cond, Register rs,
3389 const Operand& rt, BranchDelaySlot bdslot) {
3390 BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
3391}
3392
3393
3394void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs,
3395 const Operand& rt, BranchDelaySlot bdslot) {
3396 BranchShortCheck(0, L, cond, rs, rt, bdslot);
3397}
3398
3399
3400void MacroAssembler::BranchAndLink(int32_t offset, BranchDelaySlot bdslot) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003401 BranchAndLinkShort(offset, bdslot);
3402}
3403
3404
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003405void MacroAssembler::BranchAndLink(int32_t offset, Condition cond, Register rs,
3406 const Operand& rt, BranchDelaySlot bdslot) {
3407 bool is_near = BranchAndLinkShortCheck(offset, nullptr, cond, rs, rt, bdslot);
3408 DCHECK(is_near);
3409 USE(is_near);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003410}
3411
3412
3413void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003414 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003415 if (is_near_branch(L)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003416 BranchAndLinkShort(L, bdslot);
3417 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003418 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003419 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003420 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003421 if (is_trampoline_emitted()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003422 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003423 } else {
3424 BranchAndLinkShort(L, bdslot);
3425 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003426 }
3427}
3428
3429
3430void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs,
3431 const Operand& rt,
3432 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003433 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003434 if (!BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003435 Label skip;
3436 Condition neg_cond = NegateCondition(cond);
3437 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003438 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003439 bind(&skip);
3440 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003441 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003442 if (is_trampoline_emitted()) {
3443 Label skip;
3444 Condition neg_cond = NegateCondition(cond);
3445 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003446 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003447 bind(&skip);
3448 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003449 BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003450 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003451 }
3452}
3453
3454
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003455void MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
3456 BranchDelaySlot bdslot) {
3457 DCHECK(L == nullptr || offset == 0);
3458 offset = GetOffset(offset, L, OffsetSize::kOffset16);
Steve Block44f0eee2011-05-26 01:26:41 +01003459 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00003460
Steve Block44f0eee2011-05-26 01:26:41 +01003461 // Emit a nop in the branch delay slot if required.
3462 if (bdslot == PROTECT)
3463 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00003464}
3465
3466
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003467void MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L) {
3468 DCHECK(L == nullptr || offset == 0);
3469 offset = GetOffset(offset, L, OffsetSize::kOffset26);
3470 balc(offset);
3471}
3472
3473
3474void MacroAssembler::BranchAndLinkShort(int32_t offset,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003475 BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003476 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3477 DCHECK(is_int26(offset));
3478 BranchAndLinkShortHelperR6(offset, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003479 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003480 DCHECK(is_int16(offset));
3481 BranchAndLinkShortHelper(offset, nullptr, bdslot);
Andrei Popescu31002712010-02-23 13:46:05 +00003482 }
Steve Block44f0eee2011-05-26 01:26:41 +01003483}
3484
3485
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003486void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003487 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3488 BranchAndLinkShortHelperR6(0, L);
3489 } else {
3490 BranchAndLinkShortHelper(0, L, bdslot);
3491 }
Steve Block44f0eee2011-05-26 01:26:41 +01003492}
3493
3494
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003495bool MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L,
3496 Condition cond, Register rs,
3497 const Operand& rt) {
3498 DCHECK(L == nullptr || offset == 0);
3499 Register scratch = rs.is(at) ? t8 : at;
3500 OffsetSize bits = OffsetSize::kOffset16;
Steve Block44f0eee2011-05-26 01:26:41 +01003501
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003502 BlockTrampolinePoolScope block_trampoline_pool(this);
3503 DCHECK((cond == cc_always && is_int26(offset)) || is_int16(offset));
3504 switch (cond) {
3505 case cc_always:
3506 bits = OffsetSize::kOffset26;
3507 if (!is_near(L, bits)) return false;
3508 offset = GetOffset(offset, L, bits);
3509 balc(offset);
3510 break;
3511 case eq:
3512 if (!is_near(L, bits)) return false;
3513 Subu(scratch, rs, rt);
3514 offset = GetOffset(offset, L, bits);
3515 beqzalc(scratch, offset);
3516 break;
3517 case ne:
3518 if (!is_near(L, bits)) return false;
3519 Subu(scratch, rs, rt);
3520 offset = GetOffset(offset, L, bits);
3521 bnezalc(scratch, offset);
3522 break;
3523
3524 // Signed comparison.
3525 case greater:
3526 // rs > rt
3527 if (rs.code() == rt.rm_.reg_code) {
3528 break; // No code needs to be emitted.
3529 } else if (rs.is(zero_reg)) {
3530 if (!is_near(L, bits)) return false;
3531 scratch = GetRtAsRegisterHelper(rt, scratch);
3532 offset = GetOffset(offset, L, bits);
3533 bltzalc(scratch, offset);
3534 } else if (IsZero(rt)) {
3535 if (!is_near(L, bits)) return false;
3536 offset = GetOffset(offset, L, bits);
3537 bgtzalc(rs, offset);
3538 } else {
3539 if (!is_near(L, bits)) return false;
3540 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3541 offset = GetOffset(offset, L, bits);
3542 bnezalc(scratch, offset);
3543 }
3544 break;
3545 case greater_equal:
3546 // rs >= rt
3547 if (rs.code() == rt.rm_.reg_code) {
3548 bits = OffsetSize::kOffset26;
3549 if (!is_near(L, bits)) return false;
3550 offset = GetOffset(offset, L, bits);
3551 balc(offset);
3552 } else if (rs.is(zero_reg)) {
3553 if (!is_near(L, bits)) return false;
3554 scratch = GetRtAsRegisterHelper(rt, scratch);
3555 offset = GetOffset(offset, L, bits);
3556 blezalc(scratch, offset);
3557 } else if (IsZero(rt)) {
3558 if (!is_near(L, bits)) return false;
3559 offset = GetOffset(offset, L, bits);
3560 bgezalc(rs, offset);
3561 } else {
3562 if (!is_near(L, bits)) return false;
3563 Slt(scratch, rs, rt);
3564 offset = GetOffset(offset, L, bits);
3565 beqzalc(scratch, offset);
3566 }
3567 break;
3568 case less:
3569 // rs < rt
3570 if (rs.code() == rt.rm_.reg_code) {
3571 break; // No code needs to be emitted.
3572 } else if (rs.is(zero_reg)) {
3573 if (!is_near(L, bits)) return false;
3574 scratch = GetRtAsRegisterHelper(rt, scratch);
3575 offset = GetOffset(offset, L, bits);
3576 bgtzalc(scratch, offset);
3577 } else if (IsZero(rt)) {
3578 if (!is_near(L, bits)) return false;
3579 offset = GetOffset(offset, L, bits);
3580 bltzalc(rs, offset);
3581 } else {
3582 if (!is_near(L, bits)) return false;
3583 Slt(scratch, rs, rt);
3584 offset = GetOffset(offset, L, bits);
3585 bnezalc(scratch, offset);
3586 }
3587 break;
3588 case less_equal:
3589 // rs <= r2
3590 if (rs.code() == rt.rm_.reg_code) {
3591 bits = OffsetSize::kOffset26;
3592 if (!is_near(L, bits)) return false;
3593 offset = GetOffset(offset, L, bits);
3594 balc(offset);
3595 } else if (rs.is(zero_reg)) {
3596 if (!is_near(L, bits)) return false;
3597 scratch = GetRtAsRegisterHelper(rt, scratch);
3598 offset = GetOffset(offset, L, bits);
3599 bgezalc(scratch, offset);
3600 } else if (IsZero(rt)) {
3601 if (!is_near(L, bits)) return false;
3602 offset = GetOffset(offset, L, bits);
3603 blezalc(rs, offset);
3604 } else {
3605 if (!is_near(L, bits)) return false;
3606 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3607 offset = GetOffset(offset, L, bits);
3608 beqzalc(scratch, offset);
3609 }
3610 break;
3611
3612
3613 // Unsigned comparison.
3614 case Ugreater:
3615 // rs > r2
3616 if (!is_near(L, bits)) return false;
3617 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3618 offset = GetOffset(offset, L, bits);
3619 bnezalc(scratch, offset);
3620 break;
3621 case Ugreater_equal:
3622 // rs >= r2
3623 if (!is_near(L, bits)) return false;
3624 Sltu(scratch, rs, rt);
3625 offset = GetOffset(offset, L, bits);
3626 beqzalc(scratch, offset);
3627 break;
3628 case Uless:
3629 // rs < r2
3630 if (!is_near(L, bits)) return false;
3631 Sltu(scratch, rs, rt);
3632 offset = GetOffset(offset, L, bits);
3633 bnezalc(scratch, offset);
3634 break;
3635 case Uless_equal:
3636 // rs <= r2
3637 if (!is_near(L, bits)) return false;
3638 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3639 offset = GetOffset(offset, L, bits);
3640 beqzalc(scratch, offset);
3641 break;
3642 default:
3643 UNREACHABLE();
Steve Block44f0eee2011-05-26 01:26:41 +01003644 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003645 return true;
3646}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003647
3648
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003649// Pre r6 we need to use a bgezal or bltzal, but they can't be used directly
3650// with the slt instructions. We could use sub or add instead but we would miss
3651// overflow cases, so we keep slt and add an intermediate third instruction.
3652bool MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
3653 Condition cond, Register rs,
3654 const Operand& rt,
3655 BranchDelaySlot bdslot) {
3656 DCHECK(L == nullptr || offset == 0);
3657 if (!is_near(L, OffsetSize::kOffset16)) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003658
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003659 Register scratch = t8;
3660 BlockTrampolinePoolScope block_trampoline_pool(this);
3661
3662 switch (cond) {
3663 case cc_always:
3664 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3665 bal(offset);
3666 break;
3667 case eq:
3668 bne(rs, GetRtAsRegisterHelper(rt, scratch), 2);
3669 nop();
3670 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3671 bal(offset);
3672 break;
3673 case ne:
3674 beq(rs, GetRtAsRegisterHelper(rt, scratch), 2);
3675 nop();
3676 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3677 bal(offset);
3678 break;
3679
3680 // Signed comparison.
3681 case greater:
3682 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3683 addiu(scratch, scratch, -1);
3684 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3685 bgezal(scratch, offset);
3686 break;
3687 case greater_equal:
3688 Slt(scratch, rs, rt);
3689 addiu(scratch, scratch, -1);
3690 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3691 bltzal(scratch, offset);
3692 break;
3693 case less:
3694 Slt(scratch, rs, rt);
3695 addiu(scratch, scratch, -1);
3696 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3697 bgezal(scratch, offset);
3698 break;
3699 case less_equal:
3700 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3701 addiu(scratch, scratch, -1);
3702 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3703 bltzal(scratch, offset);
3704 break;
3705
3706 // Unsigned comparison.
3707 case Ugreater:
3708 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3709 addiu(scratch, scratch, -1);
3710 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3711 bgezal(scratch, offset);
3712 break;
3713 case Ugreater_equal:
3714 Sltu(scratch, rs, rt);
3715 addiu(scratch, scratch, -1);
3716 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3717 bltzal(scratch, offset);
3718 break;
3719 case Uless:
3720 Sltu(scratch, rs, rt);
3721 addiu(scratch, scratch, -1);
3722 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3723 bgezal(scratch, offset);
3724 break;
3725 case Uless_equal:
3726 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3727 addiu(scratch, scratch, -1);
3728 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3729 bltzal(scratch, offset);
3730 break;
3731
3732 default:
3733 UNREACHABLE();
Steve Block44f0eee2011-05-26 01:26:41 +01003734 }
3735
Steve Block44f0eee2011-05-26 01:26:41 +01003736 // Emit a nop in the branch delay slot if required.
3737 if (bdslot == PROTECT)
3738 nop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003739
3740 return true;
3741}
3742
3743
3744bool MacroAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
3745 Condition cond, Register rs,
3746 const Operand& rt,
3747 BranchDelaySlot bdslot) {
3748 BRANCH_ARGS_CHECK(cond, rs, rt);
3749
3750 if (!L) {
3751 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3752 DCHECK(is_int26(offset));
3753 return BranchAndLinkShortHelperR6(offset, nullptr, cond, rs, rt);
3754 } else {
3755 DCHECK(is_int16(offset));
3756 return BranchAndLinkShortHelper(offset, nullptr, cond, rs, rt, bdslot);
3757 }
3758 } else {
3759 DCHECK(offset == 0);
3760 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3761 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt);
3762 } else {
3763 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot);
3764 }
3765 }
3766 return false;
Steve Block44f0eee2011-05-26 01:26:41 +01003767}
3768
3769
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003770void MacroAssembler::Jump(Register target,
Steve Block44f0eee2011-05-26 01:26:41 +01003771 Condition cond,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003772 Register rs,
3773 const Operand& rt,
3774 BranchDelaySlot bd) {
3775 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochda12d292016-06-02 14:46:10 +01003776 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
3777 if (cond == cc_always) {
3778 jic(target, 0);
3779 } else {
3780 BRANCH_ARGS_CHECK(cond, rs, rt);
3781 Branch(2, NegateCondition(cond), rs, rt);
3782 jic(target, 0);
3783 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003784 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003785 if (cond == cc_always) {
3786 jr(target);
3787 } else {
3788 BRANCH_ARGS_CHECK(cond, rs, rt);
3789 Branch(2, NegateCondition(cond), rs, rt);
3790 jr(target);
3791 }
3792 // Emit a nop in the branch delay slot if required.
3793 if (bd == PROTECT) nop();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003794 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003795}
3796
3797
3798void MacroAssembler::Jump(intptr_t target,
3799 RelocInfo::Mode rmode,
3800 Condition cond,
3801 Register rs,
3802 const Operand& rt,
3803 BranchDelaySlot bd) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003804 Label skip;
3805 if (cond != cc_always) {
3806 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt);
3807 }
3808 // The first instruction of 'li' may be placed in the delay slot.
3809 // This is not an issue, t9 is expected to be clobbered anyway.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003810 li(t9, Operand(target, rmode));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003811 Jump(t9, al, zero_reg, Operand(zero_reg), bd);
3812 bind(&skip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003813}
3814
3815
3816void MacroAssembler::Jump(Address target,
3817 RelocInfo::Mode rmode,
3818 Condition cond,
3819 Register rs,
3820 const Operand& rt,
3821 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003822 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003823 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
3824}
3825
3826
3827void MacroAssembler::Jump(Handle<Code> code,
3828 RelocInfo::Mode rmode,
3829 Condition cond,
3830 Register rs,
3831 const Operand& rt,
3832 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003833 DCHECK(RelocInfo::IsCodeTarget(rmode));
3834 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003835 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
3836}
3837
3838
3839int MacroAssembler::CallSize(Register target,
3840 Condition cond,
3841 Register rs,
3842 const Operand& rt,
3843 BranchDelaySlot bd) {
3844 int size = 0;
3845
3846 if (cond == cc_always) {
3847 size += 1;
3848 } else {
3849 size += 3;
Steve Block44f0eee2011-05-26 01:26:41 +01003850 }
3851
Ben Murdochda12d292016-06-02 14:46:10 +01003852 if (bd == PROTECT && !IsMipsArchVariant(kMips32r6)) size += 1;
Steve Block44f0eee2011-05-26 01:26:41 +01003853
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003854 return size * kInstrSize;
3855}
Steve Block44f0eee2011-05-26 01:26:41 +01003856
Steve Block44f0eee2011-05-26 01:26:41 +01003857
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003858// Note: To call gcc-compiled C code on mips, you must call thru t9.
3859void MacroAssembler::Call(Register target,
3860 Condition cond,
3861 Register rs,
3862 const Operand& rt,
3863 BranchDelaySlot bd) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003864#ifdef DEBUG
3865 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0;
3866#endif
3867
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003868 BlockTrampolinePoolScope block_trampoline_pool(this);
3869 Label start;
3870 bind(&start);
Ben Murdochda12d292016-06-02 14:46:10 +01003871 if (IsMipsArchVariant(kMips32r6) && bd == PROTECT) {
3872 if (cond == cc_always) {
3873 jialc(target, 0);
3874 } else {
3875 BRANCH_ARGS_CHECK(cond, rs, rt);
3876 Branch(2, NegateCondition(cond), rs, rt);
3877 jialc(target, 0);
3878 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003879 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003880 if (cond == cc_always) {
3881 jalr(target);
3882 } else {
3883 BRANCH_ARGS_CHECK(cond, rs, rt);
3884 Branch(2, NegateCondition(cond), rs, rt);
3885 jalr(target);
3886 }
3887 // Emit a nop in the branch delay slot if required.
3888 if (bd == PROTECT) nop();
Steve Block44f0eee2011-05-26 01:26:41 +01003889 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003890
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003891#ifdef DEBUG
3892 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd),
3893 SizeOfCodeGeneratedSince(&start));
3894#endif
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003895}
3896
3897
3898int MacroAssembler::CallSize(Address target,
3899 RelocInfo::Mode rmode,
3900 Condition cond,
3901 Register rs,
3902 const Operand& rt,
3903 BranchDelaySlot bd) {
3904 int size = CallSize(t9, cond, rs, rt, bd);
3905 return size + 2 * kInstrSize;
3906}
3907
3908
3909void MacroAssembler::Call(Address target,
3910 RelocInfo::Mode rmode,
3911 Condition cond,
3912 Register rs,
3913 const Operand& rt,
3914 BranchDelaySlot bd) {
3915 BlockTrampolinePoolScope block_trampoline_pool(this);
3916 Label start;
3917 bind(&start);
3918 int32_t target_int = reinterpret_cast<int32_t>(target);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003919 li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003920 Call(t9, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003921 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003922 SizeOfCodeGeneratedSince(&start));
3923}
3924
3925
3926int MacroAssembler::CallSize(Handle<Code> code,
3927 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003928 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003929 Condition cond,
3930 Register rs,
3931 const Operand& rt,
3932 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003933 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003934 return CallSize(reinterpret_cast<Address>(code.location()),
3935 rmode, cond, rs, rt, bd);
3936}
3937
3938
3939void MacroAssembler::Call(Handle<Code> code,
3940 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003941 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003942 Condition cond,
3943 Register rs,
3944 const Operand& rt,
3945 BranchDelaySlot bd) {
3946 BlockTrampolinePoolScope block_trampoline_pool(this);
3947 Label start;
3948 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003949 DCHECK(RelocInfo::IsCodeTarget(rmode));
3950 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003951 SetRecordedAstId(ast_id);
3952 rmode = RelocInfo::CODE_TARGET_WITH_ID;
3953 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003954 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003955 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003956 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003957 SizeOfCodeGeneratedSince(&start));
3958}
3959
3960
3961void MacroAssembler::Ret(Condition cond,
3962 Register rs,
3963 const Operand& rt,
3964 BranchDelaySlot bd) {
3965 Jump(ra, cond, rs, rt, bd);
3966}
3967
3968
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003969void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) {
3970 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT &&
3971 (!L->is_bound() || is_near_r6(L))) {
3972 BranchShortHelperR6(0, L);
3973 } else {
3974 BlockTrampolinePoolScope block_trampoline_pool(this);
3975 uint32_t imm32;
3976 imm32 = jump_address(L);
Ben Murdochda12d292016-06-02 14:46:10 +01003977 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
3978 uint32_t lui_offset, jic_offset;
3979 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset);
3980 {
3981 BlockGrowBufferScope block_buf_growth(this);
3982 // Buffer growth (and relocation) must be blocked for internal
3983 // references until associated instructions are emitted and
3984 // available to be patched.
3985 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3986 lui(at, lui_offset);
3987 jic(at, jic_offset);
3988 }
3989 CheckBuffer();
3990 } else {
3991 {
3992 BlockGrowBufferScope block_buf_growth(this);
3993 // Buffer growth (and relocation) must be blocked for internal
3994 // references
3995 // until associated instructions are emitted and available to be
3996 // patched.
3997 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3998 lui(at, (imm32 & kHiMask) >> kLuiShift);
3999 ori(at, at, (imm32 & kImm16Mask));
4000 }
4001 CheckBuffer();
4002 jr(at);
4003 // Emit a nop in the branch delay slot if required.
4004 if (bdslot == PROTECT) nop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004005 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004006 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004007}
4008
4009
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004010void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) {
4011 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT &&
4012 (!L->is_bound() || is_near_r6(L))) {
4013 BranchAndLinkShortHelperR6(0, L);
4014 } else {
4015 BlockTrampolinePoolScope block_trampoline_pool(this);
4016 uint32_t imm32;
4017 imm32 = jump_address(L);
Ben Murdochda12d292016-06-02 14:46:10 +01004018 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
4019 uint32_t lui_offset, jic_offset;
4020 UnpackTargetAddressUnsigned(imm32, lui_offset, jic_offset);
4021 {
4022 BlockGrowBufferScope block_buf_growth(this);
4023 // Buffer growth (and relocation) must be blocked for internal
4024 // references until associated instructions are emitted and
4025 // available to be patched.
4026 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
4027 lui(at, lui_offset);
4028 jialc(at, jic_offset);
4029 }
4030 CheckBuffer();
4031 } else {
4032 {
4033 BlockGrowBufferScope block_buf_growth(this);
4034 // Buffer growth (and relocation) must be blocked for internal
4035 // references
4036 // until associated instructions are emitted and available to be
4037 // patched.
4038 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
4039 lui(at, (imm32 & kHiMask) >> kLuiShift);
4040 ori(at, at, (imm32 & kImm16Mask));
4041 }
4042 CheckBuffer();
4043 jalr(at);
4044 // Emit a nop in the branch delay slot if required.
4045 if (bdslot == PROTECT) nop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004046 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004047 }
Steve Block44f0eee2011-05-26 01:26:41 +01004048}
4049
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004050
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004051void MacroAssembler::DropAndRet(int drop) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004052 DCHECK(is_int16(drop * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004053 Ret(USE_DELAY_SLOT);
4054 addiu(sp, sp, drop * kPointerSize);
4055}
Steve Block44f0eee2011-05-26 01:26:41 +01004056
4057void MacroAssembler::DropAndRet(int drop,
4058 Condition cond,
4059 Register r1,
4060 const Operand& r2) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004061 // Both Drop and Ret need to be conditional.
Steve Block44f0eee2011-05-26 01:26:41 +01004062 Label skip;
4063 if (cond != cc_always) {
4064 Branch(&skip, NegateCondition(cond), r1, r2);
4065 }
4066
4067 Drop(drop);
4068 Ret();
4069
4070 if (cond != cc_always) {
4071 bind(&skip);
4072 }
4073}
4074
4075
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004076void MacroAssembler::Drop(int count,
4077 Condition cond,
4078 Register reg,
4079 const Operand& op) {
4080 if (count <= 0) {
4081 return;
4082 }
4083
4084 Label skip;
4085
4086 if (cond != al) {
4087 Branch(&skip, NegateCondition(cond), reg, op);
4088 }
4089
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004090 Addu(sp, sp, Operand(count * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004091
4092 if (cond != al) {
4093 bind(&skip);
4094 }
4095}
4096
4097
4098
Steve Block44f0eee2011-05-26 01:26:41 +01004099void MacroAssembler::Swap(Register reg1,
4100 Register reg2,
4101 Register scratch) {
4102 if (scratch.is(no_reg)) {
4103 Xor(reg1, reg1, Operand(reg2));
4104 Xor(reg2, reg2, Operand(reg1));
4105 Xor(reg1, reg1, Operand(reg2));
4106 } else {
4107 mov(scratch, reg1);
4108 mov(reg1, reg2);
4109 mov(reg2, scratch);
4110 }
Andrei Popescu31002712010-02-23 13:46:05 +00004111}
4112
4113
4114void MacroAssembler::Call(Label* target) {
Steve Block44f0eee2011-05-26 01:26:41 +01004115 BranchAndLink(target);
4116}
4117
4118
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004119void MacroAssembler::Push(Handle<Object> handle) {
4120 li(at, Operand(handle));
4121 push(at);
4122}
4123
4124
Steve Block44f0eee2011-05-26 01:26:41 +01004125void MacroAssembler::DebugBreak() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004126 PrepareCEntryArgs(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004127 PrepareCEntryFunction(
4128 ExternalReference(Runtime::kHandleDebuggerStatement, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004129 CEntryStub ces(isolate(), 1);
4130 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004131 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Steve Block44f0eee2011-05-26 01:26:41 +01004132}
4133
Steve Block6ded16b2010-05-10 14:33:55 +01004134
Andrei Popescu31002712010-02-23 13:46:05 +00004135// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004136// Exception handling.
Andrei Popescu31002712010-02-23 13:46:05 +00004137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004138void MacroAssembler::PushStackHandler() {
Steve Block6ded16b2010-05-10 14:33:55 +01004139 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004140 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004141 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004142
4143 // Link the current handler as the next handler.
4144 li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
4145 lw(t1, MemOperand(t2));
4146 push(t1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004147
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004148 // Set this new handler as the current one.
4149 sw(sp, MemOperand(t2));
Andrei Popescu31002712010-02-23 13:46:05 +00004150}
4151
4152
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004153void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004154 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004155 pop(a1);
4156 Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
Ben Murdoch589d6972011-11-30 16:04:58 +00004157 li(at, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004158 sw(a1, MemOperand(at));
Andrei Popescu31002712010-02-23 13:46:05 +00004159}
4160
4161
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004162void MacroAssembler::Allocate(int object_size,
4163 Register result,
4164 Register scratch1,
4165 Register scratch2,
4166 Label* gc_required,
4167 AllocationFlags flags) {
4168 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004169 DCHECK((flags & ALLOCATION_FOLDED) == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004170 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004171 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004172 // Trash the registers to simulate an allocation failure.
4173 li(result, 0x7091);
4174 li(scratch1, 0x7191);
4175 li(scratch2, 0x7291);
4176 }
4177 jmp(gc_required);
4178 return;
Steve Block6ded16b2010-05-10 14:33:55 +01004179 }
4180
Ben Murdoch097c5b22016-05-18 11:27:45 +01004181 DCHECK(!AreAliased(result, scratch1, scratch2, t9, at));
Steve Block6ded16b2010-05-10 14:33:55 +01004182
Steve Block44f0eee2011-05-26 01:26:41 +01004183 // Make object size into bytes.
4184 if ((flags & SIZE_IN_WORDS) != 0) {
4185 object_size *= kPointerSize;
4186 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004187 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +01004188
Steve Block44f0eee2011-05-26 01:26:41 +01004189 // Check relative positions of allocation top and limit addresses.
4190 // ARM adds additional checks to make sure the ldm instruction can be
4191 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004192 ExternalReference allocation_top =
4193 AllocationUtils::GetAllocationTopReference(isolate(), flags);
4194 ExternalReference allocation_limit =
4195 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
4196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004197 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
4198 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004199 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01004200
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004201 // Set up allocation top address and allocation limit registers.
4202 Register top_address = scratch1;
Steve Block44f0eee2011-05-26 01:26:41 +01004203 // This code stores a temporary value in t9.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004204 Register alloc_limit = t9;
4205 Register result_end = scratch2;
4206 li(top_address, Operand(allocation_top));
4207
Steve Block44f0eee2011-05-26 01:26:41 +01004208 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004209 // Load allocation top into result and allocation limit into alloc_limit.
4210 lw(result, MemOperand(top_address));
4211 lw(alloc_limit, MemOperand(top_address, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004212 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004213 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004214 // Assert that result actually contains top on entry.
4215 lw(alloc_limit, MemOperand(top_address));
4216 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01004217 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004218 // Load allocation limit. Result already contains allocation top.
4219 lw(alloc_limit, MemOperand(top_address, limit - top));
Steve Block44f0eee2011-05-26 01:26:41 +01004220 }
4221
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004222 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4223 // Align the next allocation. Storing the filler map without checking top is
4224 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004225 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004226 And(result_end, result, Operand(kDoubleAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004227 Label aligned;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004228 Branch(&aligned, eq, result_end, Operand(zero_reg));
4229 if ((flags & PRETENURE) != 0) {
4230 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004231 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004232 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
4233 sw(result_end, MemOperand(result));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004234 Addu(result, result, Operand(kDoubleSize / 2));
4235 bind(&aligned);
4236 }
4237
Steve Block44f0eee2011-05-26 01:26:41 +01004238 // Calculate new top and bail out if new space is exhausted. Use result
4239 // to calculate the new top.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004240 Addu(result_end, result, Operand(object_size));
4241 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01004242
Ben Murdochc5610432016-08-08 18:44:38 +01004243 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
4244 // The top pointer is not updated for allocation folding dominators.
4245 sw(result_end, MemOperand(top_address));
Steve Block44f0eee2011-05-26 01:26:41 +01004246 }
Ben Murdochc5610432016-08-08 18:44:38 +01004247
4248 // Tag object.
4249 Addu(result, result, Operand(kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01004250}
4251
4252
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004253void MacroAssembler::Allocate(Register object_size, Register result,
4254 Register result_end, Register scratch,
4255 Label* gc_required, AllocationFlags flags) {
Ben Murdochc5610432016-08-08 18:44:38 +01004256 DCHECK((flags & ALLOCATION_FOLDED) == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004257 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004258 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004259 // Trash the registers to simulate an allocation failure.
4260 li(result, 0x7091);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004261 li(scratch, 0x7191);
4262 li(result_end, 0x7291);
Steve Block44f0eee2011-05-26 01:26:41 +01004263 }
4264 jmp(gc_required);
4265 return;
4266 }
4267
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004268 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
4269 // is not specified. Other registers must not overlap.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004270 DCHECK(!AreAliased(object_size, result, scratch, t9, at));
4271 DCHECK(!AreAliased(result_end, result, scratch, t9, at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004272 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Block44f0eee2011-05-26 01:26:41 +01004273
4274 // Check relative positions of allocation top and limit addresses.
4275 // ARM adds additional checks to make sure the ldm instruction can be
4276 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004277 ExternalReference allocation_top =
4278 AllocationUtils::GetAllocationTopReference(isolate(), flags);
4279 ExternalReference allocation_limit =
4280 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004281 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
4282 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004283 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01004284
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004285 // Set up allocation top address and allocation limit registers.
4286 Register top_address = scratch;
Steve Block44f0eee2011-05-26 01:26:41 +01004287 // This code stores a temporary value in t9.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004288 Register alloc_limit = t9;
4289 li(top_address, Operand(allocation_top));
4290
Steve Block44f0eee2011-05-26 01:26:41 +01004291 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004292 // Load allocation top into result and allocation limit into alloc_limit.
4293 lw(result, MemOperand(top_address));
4294 lw(alloc_limit, MemOperand(top_address, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004295 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004296 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004297 // Assert that result actually contains top on entry.
4298 lw(alloc_limit, MemOperand(top_address));
4299 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01004300 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004301 // Load allocation limit. Result already contains allocation top.
4302 lw(alloc_limit, MemOperand(top_address, limit - top));
Steve Block44f0eee2011-05-26 01:26:41 +01004303 }
4304
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004305 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4306 // Align the next allocation. Storing the filler map without checking top is
4307 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004308 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004309 And(result_end, result, Operand(kDoubleAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004310 Label aligned;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004311 Branch(&aligned, eq, result_end, Operand(zero_reg));
4312 if ((flags & PRETENURE) != 0) {
4313 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004314 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004315 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
4316 sw(result_end, MemOperand(result));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004317 Addu(result, result, Operand(kDoubleSize / 2));
4318 bind(&aligned);
4319 }
4320
Steve Block44f0eee2011-05-26 01:26:41 +01004321 // Calculate new top and bail out if new space is exhausted. Use result
4322 // to calculate the new top. Object size may be in words so a shift is
4323 // required to get the number of bytes.
4324 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01004325 Lsa(result_end, result, object_size, kPointerSizeLog2);
Steve Block44f0eee2011-05-26 01:26:41 +01004326 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004327 Addu(result_end, result, Operand(object_size));
Steve Block44f0eee2011-05-26 01:26:41 +01004328 }
Ben Murdochc5610432016-08-08 18:44:38 +01004329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004330 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01004331
4332 // Update allocation top. result temporarily holds the new top.
Ben Murdoch257744e2011-11-30 15:57:28 +00004333 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004334 And(alloc_limit, result_end, Operand(kObjectAlignmentMask));
4335 Check(eq, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01004336 }
Steve Block44f0eee2011-05-26 01:26:41 +01004337
Ben Murdochc5610432016-08-08 18:44:38 +01004338 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
4339 // The top pointer is not updated for allocation folding dominators.
4340 sw(result_end, MemOperand(top_address));
Steve Block44f0eee2011-05-26 01:26:41 +01004341 }
Ben Murdochc5610432016-08-08 18:44:38 +01004342
4343 // Tag object.
4344 Addu(result, result, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01004345}
4346
Ben Murdochc5610432016-08-08 18:44:38 +01004347void MacroAssembler::FastAllocate(int object_size, Register result,
4348 Register scratch1, Register scratch2,
4349 AllocationFlags flags) {
4350 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
4351 DCHECK(!AreAliased(result, scratch1, scratch2, t9, at));
4352
4353 // Make object size into bytes.
4354 if ((flags & SIZE_IN_WORDS) != 0) {
4355 object_size *= kPointerSize;
4356 }
4357 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
4358
4359 ExternalReference allocation_top =
4360 AllocationUtils::GetAllocationTopReference(isolate(), flags);
4361
4362 // Set up allocation top address and allocation limit registers.
4363 Register top_address = scratch1;
4364 // This code stores a temporary value in t9.
4365 Register result_end = scratch2;
4366 li(top_address, Operand(allocation_top));
4367 lw(result, MemOperand(top_address));
4368
4369 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4370 // Align the next allocation. Storing the filler map without checking top is
4371 // safe in new-space because the limit of the heap is aligned there.
4372 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
4373 And(result_end, result, Operand(kDoubleAlignmentMask));
4374 Label aligned;
4375 Branch(&aligned, eq, result_end, Operand(zero_reg));
4376 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
4377 sw(result_end, MemOperand(result));
4378 Addu(result, result, Operand(kDoubleSize / 2));
4379 bind(&aligned);
4380 }
4381
4382 Addu(result_end, result, Operand(object_size));
4383
4384 // The top pointer is not updated for allocation folding dominators.
4385 sw(result_end, MemOperand(top_address));
4386
4387 Addu(result, result, Operand(kHeapObjectTag));
4388}
4389
4390void MacroAssembler::FastAllocate(Register object_size, Register result,
4391 Register result_end, Register scratch,
4392 AllocationFlags flags) {
4393 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
4394 // is not specified. Other registers must not overlap.
4395 DCHECK(!AreAliased(object_size, result, scratch, t9, at));
4396 DCHECK(!AreAliased(result_end, result, scratch, t9, at));
4397 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
4398
4399 ExternalReference allocation_top =
4400 AllocationUtils::GetAllocationTopReference(isolate(), flags);
4401
4402 // Set up allocation top address and allocation limit registers.
4403 Register top_address = scratch;
4404 // This code stores a temporary value in t9.
4405 li(top_address, Operand(allocation_top));
4406 lw(result, MemOperand(top_address));
4407
4408 if ((flags & DOUBLE_ALIGNMENT) != 0) {
4409 // Align the next allocation. Storing the filler map without checking top is
4410 // safe in new-space because the limit of the heap is aligned there.
4411 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
4412 And(result_end, result, Operand(kDoubleAlignmentMask));
4413 Label aligned;
4414 Branch(&aligned, eq, result_end, Operand(zero_reg));
4415 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
4416 sw(result_end, MemOperand(result));
4417 Addu(result, result, Operand(kDoubleSize / 2));
4418 bind(&aligned);
4419 }
4420
4421 // Calculate new top and bail out if new space is exhausted. Use result
4422 // to calculate the new top. Object size may be in words so a shift is
4423 // required to get the number of bytes.
4424 if ((flags & SIZE_IN_WORDS) != 0) {
4425 Lsa(result_end, result, object_size, kPointerSizeLog2);
4426 } else {
4427 Addu(result_end, result, Operand(object_size));
4428 }
4429
4430 // The top pointer is not updated for allocation folding dominators.
4431 sw(result_end, MemOperand(top_address));
4432
4433 Addu(result, result, Operand(kHeapObjectTag));
4434}
Steve Block44f0eee2011-05-26 01:26:41 +01004435
Steve Block44f0eee2011-05-26 01:26:41 +01004436void MacroAssembler::AllocateTwoByteString(Register result,
4437 Register length,
4438 Register scratch1,
4439 Register scratch2,
4440 Register scratch3,
4441 Label* gc_required) {
4442 // Calculate the number of bytes needed for the characters in the string while
4443 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004444 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004445 sll(scratch1, length, 1); // Length in bytes, not chars.
4446 addiu(scratch1, scratch1,
4447 kObjectAlignmentMask + SeqTwoByteString::kHeaderSize);
4448 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
4449
4450 // Allocate two-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01004451 Allocate(scratch1, result, scratch2, scratch3, gc_required,
4452 NO_ALLOCATION_FLAGS);
Steve Block44f0eee2011-05-26 01:26:41 +01004453
4454 // Set the map, length and hash field.
4455 InitializeNewString(result,
4456 length,
4457 Heap::kStringMapRootIndex,
4458 scratch1,
4459 scratch2);
4460}
4461
4462
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004463void MacroAssembler::AllocateOneByteString(Register result, Register length,
4464 Register scratch1, Register scratch2,
4465 Register scratch3,
4466 Label* gc_required) {
Steve Block44f0eee2011-05-26 01:26:41 +01004467 // Calculate the number of bytes needed for the characters in the string
4468 // while observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004469 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
4470 DCHECK(kCharSize == 1);
4471 addiu(scratch1, length, kObjectAlignmentMask + SeqOneByteString::kHeaderSize);
Steve Block44f0eee2011-05-26 01:26:41 +01004472 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
4473
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004474 // Allocate one-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01004475 Allocate(scratch1, result, scratch2, scratch3, gc_required,
4476 NO_ALLOCATION_FLAGS);
Steve Block44f0eee2011-05-26 01:26:41 +01004477
4478 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004479 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
4480 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01004481}
4482
4483
4484void MacroAssembler::AllocateTwoByteConsString(Register result,
4485 Register length,
4486 Register scratch1,
4487 Register scratch2,
4488 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004489 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01004490 NO_ALLOCATION_FLAGS);
Steve Block44f0eee2011-05-26 01:26:41 +01004491 InitializeNewString(result,
4492 length,
4493 Heap::kConsStringMapRootIndex,
4494 scratch1,
4495 scratch2);
4496}
4497
4498
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004499void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
4500 Register scratch1,
4501 Register scratch2,
4502 Label* gc_required) {
Ben Murdochc5610432016-08-08 18:44:38 +01004503 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
4504 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004505
4506 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
4507 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01004508}
4509
4510
Ben Murdoch589d6972011-11-30 16:04:58 +00004511void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4512 Register length,
4513 Register scratch1,
4514 Register scratch2,
4515 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004516 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01004517 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00004518
4519 InitializeNewString(result,
4520 length,
4521 Heap::kSlicedStringMapRootIndex,
4522 scratch1,
4523 scratch2);
4524}
4525
4526
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004527void MacroAssembler::AllocateOneByteSlicedString(Register result,
4528 Register length,
4529 Register scratch1,
4530 Register scratch2,
4531 Label* gc_required) {
4532 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01004533 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00004534
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004535 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
4536 scratch1, scratch2);
4537}
4538
4539
4540void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
4541 Label* not_unique_name) {
4542 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
4543 Label succeed;
4544 And(at, reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
4545 Branch(&succeed, eq, at, Operand(zero_reg));
4546 Branch(not_unique_name, ne, reg, Operand(SYMBOL_TYPE));
4547
4548 bind(&succeed);
Ben Murdoch589d6972011-11-30 16:04:58 +00004549}
4550
4551
Steve Block44f0eee2011-05-26 01:26:41 +01004552// Allocates a heap number or jumps to the label if the young space is full and
4553// a scavenge is needed.
4554void MacroAssembler::AllocateHeapNumber(Register result,
4555 Register scratch1,
4556 Register scratch2,
4557 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004558 Label* need_gc,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004559 MutableMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +01004560 // Allocate an object in the heap for the heap number and tag it as a heap
4561 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004562 Allocate(HeapNumber::kSize, result, scratch1, scratch2, need_gc,
Ben Murdochc5610432016-08-08 18:44:38 +01004563 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004564
4565 Heap::RootListIndex map_index = mode == MUTABLE
4566 ? Heap::kMutableHeapNumberMapRootIndex
4567 : Heap::kHeapNumberMapRootIndex;
4568 AssertIsRoot(heap_number_map, map_index);
Steve Block44f0eee2011-05-26 01:26:41 +01004569
4570 // Store heap number map in the allocated object.
Ben Murdochc5610432016-08-08 18:44:38 +01004571 sw(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004572}
4573
4574
4575void MacroAssembler::AllocateHeapNumberWithValue(Register result,
4576 FPURegister value,
4577 Register scratch1,
4578 Register scratch2,
4579 Label* gc_required) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004580 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
4581 AllocateHeapNumber(result, scratch1, scratch2, t8, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01004582 sdc1(value, FieldMemOperand(result, HeapNumber::kValueOffset));
4583}
4584
4585
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004586void MacroAssembler::AllocateJSValue(Register result, Register constructor,
4587 Register value, Register scratch1,
4588 Register scratch2, Label* gc_required) {
4589 DCHECK(!result.is(constructor));
4590 DCHECK(!result.is(scratch1));
4591 DCHECK(!result.is(scratch2));
4592 DCHECK(!result.is(value));
Steve Block44f0eee2011-05-26 01:26:41 +01004593
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004594 // Allocate JSValue in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01004595 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required,
4596 NO_ALLOCATION_FLAGS);
Steve Block44f0eee2011-05-26 01:26:41 +01004597
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004598 // Initialize the JSValue.
4599 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
4600 sw(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
4601 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
4602 sw(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
4603 sw(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
4604 sw(value, FieldMemOperand(result, JSValue::kValueOffset));
4605 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01004606}
4607
4608
Ben Murdoch257744e2011-11-30 15:57:28 +00004609void MacroAssembler::CopyBytes(Register src,
4610 Register dst,
4611 Register length,
4612 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004613 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoch257744e2011-11-30 15:57:28 +00004614
4615 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004616 Branch(&byte_loop, le, length, Operand(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00004617 bind(&align_loop_1);
4618 And(scratch, src, kPointerSize - 1);
4619 Branch(&word_loop, eq, scratch, Operand(zero_reg));
4620 lbu(scratch, MemOperand(src));
4621 Addu(src, src, 1);
4622 sb(scratch, MemOperand(dst));
4623 Addu(dst, dst, 1);
4624 Subu(length, length, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004625 Branch(&align_loop_1, ne, length, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00004626
4627 // Copy bytes in word size chunks.
4628 bind(&word_loop);
4629 if (emit_debug_code()) {
4630 And(scratch, src, kPointerSize - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004631 Assert(eq, kExpectingAlignmentForCopyBytes,
Ben Murdoch257744e2011-11-30 15:57:28 +00004632 scratch, Operand(zero_reg));
4633 }
4634 Branch(&byte_loop, lt, length, Operand(kPointerSize));
4635 lw(scratch, MemOperand(src));
4636 Addu(src, src, kPointerSize);
4637
4638 // TODO(kalmard) check if this can be optimized to use sw in most cases.
4639 // Can't use unaligned access - copy byte by byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004640 if (kArchEndian == kLittle) {
4641 sb(scratch, MemOperand(dst, 0));
4642 srl(scratch, scratch, 8);
4643 sb(scratch, MemOperand(dst, 1));
4644 srl(scratch, scratch, 8);
4645 sb(scratch, MemOperand(dst, 2));
4646 srl(scratch, scratch, 8);
4647 sb(scratch, MemOperand(dst, 3));
4648 } else {
4649 sb(scratch, MemOperand(dst, 3));
4650 srl(scratch, scratch, 8);
4651 sb(scratch, MemOperand(dst, 2));
4652 srl(scratch, scratch, 8);
4653 sb(scratch, MemOperand(dst, 1));
4654 srl(scratch, scratch, 8);
4655 sb(scratch, MemOperand(dst, 0));
4656 }
4657
Ben Murdoch257744e2011-11-30 15:57:28 +00004658 Addu(dst, dst, 4);
4659
4660 Subu(length, length, Operand(kPointerSize));
4661 Branch(&word_loop);
4662
4663 // Copy the last bytes if any left.
4664 bind(&byte_loop);
4665 Branch(&done, eq, length, Operand(zero_reg));
4666 bind(&byte_loop_1);
4667 lbu(scratch, MemOperand(src));
4668 Addu(src, src, 1);
4669 sb(scratch, MemOperand(dst));
4670 Addu(dst, dst, 1);
4671 Subu(length, length, Operand(1));
4672 Branch(&byte_loop_1, ne, length, Operand(zero_reg));
4673 bind(&done);
4674}
4675
4676
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004677void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
4678 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004679 Register filler) {
4680 Label loop, entry;
4681 Branch(&entry);
4682 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004683 sw(filler, MemOperand(current_address));
4684 Addu(current_address, current_address, kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004685 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004686 Branch(&loop, ult, current_address, Operand(end_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004687}
4688
4689
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004690void MacroAssembler::CheckFastElements(Register map,
4691 Register scratch,
4692 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004693 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4694 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4695 STATIC_ASSERT(FAST_ELEMENTS == 2);
4696 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004697 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004698 Branch(fail, hi, scratch,
4699 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004700}
4701
4702
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004703void MacroAssembler::CheckFastObjectElements(Register map,
4704 Register scratch,
4705 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004706 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4707 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4708 STATIC_ASSERT(FAST_ELEMENTS == 2);
4709 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004710 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
4711 Branch(fail, ls, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004712 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004713 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004714 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004715}
4716
4717
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004718void MacroAssembler::CheckFastSmiElements(Register map,
4719 Register scratch,
4720 Label* fail) {
4721 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4722 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004723 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
4724 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004725 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004726}
4727
4728
4729void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
4730 Register key_reg,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004731 Register elements_reg,
4732 Register scratch1,
4733 Register scratch2,
4734 Register scratch3,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004735 Label* fail,
4736 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004737 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1, scratch2,
4738 scratch3));
Ben Murdoch61f157c2016-09-16 13:49:30 +01004739 Label smi_value, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004740
4741 // Handle smi values specially.
4742 JumpIfSmi(value_reg, &smi_value);
4743
4744 // Ensure that the object is a heap number
4745 CheckMap(value_reg,
4746 scratch1,
4747 Heap::kHeapNumberMapRootIndex,
4748 fail,
4749 DONT_DO_SMI_CHECK);
4750
Ben Murdoch61f157c2016-09-16 13:49:30 +01004751 // Double value, turn potential sNaN into qNan.
4752 DoubleRegister double_result = f0;
4753 DoubleRegister double_scratch = f2;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004754
Ben Murdoch61f157c2016-09-16 13:49:30 +01004755 ldc1(double_result, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
4756 Branch(USE_DELAY_SLOT, &done); // Canonicalization is one instruction.
4757 FPUCanonicalizeNaN(double_result, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004758
4759 bind(&smi_value);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004760 Register untagged_value = scratch2;
4761 SmiUntag(untagged_value, value_reg);
4762 mtc1(untagged_value, double_scratch);
4763 cvt_d_w(double_result, double_scratch);
4764
4765 bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004766 Addu(scratch1, elements_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004767 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag -
4768 elements_offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004769 Lsa(scratch1, scratch1, key_reg, kDoubleSizeLog2 - kSmiTagSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004770 // scratch1 is now effective address of the double element
Ben Murdoch61f157c2016-09-16 13:49:30 +01004771 sdc1(double_result, MemOperand(scratch1, 0));
4772}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004773
Ben Murdoch61f157c2016-09-16 13:49:30 +01004774void MacroAssembler::SubNanPreservePayloadAndSign_s(FloatRegister fd,
4775 FloatRegister fs,
4776 FloatRegister ft) {
4777 FloatRegister dest = fd.is(fs) || fd.is(ft) ? kLithiumScratchDouble : fd;
4778 Label check_nan, save_payload, done;
4779 Register scratch1 = t8;
4780 Register scratch2 = t9;
4781
4782 sub_s(dest, fs, ft);
4783 // Check if the result of subtraction is NaN.
4784 BranchF32(nullptr, &check_nan, eq, fs, ft);
4785 Branch(USE_DELAY_SLOT, &done);
4786 dest.is(fd) ? nop() : mov_s(fd, dest);
4787
4788 bind(&check_nan);
4789 // Check if first operand is a NaN.
4790 mfc1(scratch1, fs);
4791 BranchF32(nullptr, &save_payload, eq, fs, fs);
4792 // Second operand must be a NaN.
4793 mfc1(scratch1, ft);
4794
4795 bind(&save_payload);
4796 // Reserve payload.
4797 And(scratch1, scratch1,
4798 Operand(kSingleSignMask | ((1 << kSingleNaNShift) - 1)));
4799 mfc1(scratch2, dest);
4800 And(scratch2, scratch2, Operand(kSingleNaNMask));
4801 Or(scratch2, scratch2, scratch1);
4802 mtc1(scratch2, fd);
4803
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004804 bind(&done);
4805}
4806
Ben Murdoch61f157c2016-09-16 13:49:30 +01004807void MacroAssembler::SubNanPreservePayloadAndSign_d(DoubleRegister fd,
4808 DoubleRegister fs,
4809 DoubleRegister ft) {
4810 FloatRegister dest = fd.is(fs) || fd.is(ft) ? kLithiumScratchDouble : fd;
4811 Label check_nan, save_payload, done;
4812 Register scratch1 = t8;
4813 Register scratch2 = t9;
4814
4815 sub_d(dest, fs, ft);
4816 // Check if the result of subtraction is NaN.
4817 BranchF64(nullptr, &check_nan, eq, fs, ft);
4818 Branch(USE_DELAY_SLOT, &done);
4819 dest.is(fd) ? nop() : mov_d(fd, dest);
4820
4821 bind(&check_nan);
4822 // Check if first operand is a NaN.
4823 Mfhc1(scratch1, fs);
4824 mov_s(dest, fs);
4825 BranchF64(nullptr, &save_payload, eq, fs, fs);
4826 // Second operand must be a NaN.
4827 Mfhc1(scratch1, ft);
4828 mov_s(dest, ft);
4829
4830 bind(&save_payload);
4831 // Reserve payload.
4832 And(scratch1, scratch1,
4833 Operand(kDoubleSignMask | ((1 << kDoubleNaNShift) - 1)));
4834 Mfhc1(scratch2, dest);
4835 And(scratch2, scratch2, Operand(kDoubleNaNMask));
4836 Or(scratch2, scratch2, scratch1);
4837 Move_s(fd, dest);
4838 Mthc1(scratch2, fd);
4839
4840 bind(&done);
4841}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004842
4843void MacroAssembler::CompareMapAndBranch(Register obj,
4844 Register scratch,
4845 Handle<Map> map,
4846 Label* early_success,
4847 Condition cond,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004848 Label* branch_to) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004849 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004850 CompareMapAndBranch(scratch, map, early_success, cond, branch_to);
4851}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004852
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004853
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004854void MacroAssembler::CompareMapAndBranch(Register obj_map,
4855 Handle<Map> map,
4856 Label* early_success,
4857 Condition cond,
4858 Label* branch_to) {
4859 Branch(branch_to, cond, obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004860}
4861
4862
Steve Block44f0eee2011-05-26 01:26:41 +01004863void MacroAssembler::CheckMap(Register obj,
4864 Register scratch,
4865 Handle<Map> map,
4866 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004867 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004868 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01004869 JumpIfSmi(obj, fail);
4870 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004871 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004872 CompareMapAndBranch(obj, scratch, map, &success, ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004873 bind(&success);
Steve Block44f0eee2011-05-26 01:26:41 +01004874}
4875
4876
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004877void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
4878 Register scratch2, Handle<WeakCell> cell,
4879 Handle<Code> success,
4880 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004881 Label fail;
4882 if (smi_check_type == DO_SMI_CHECK) {
4883 JumpIfSmi(obj, &fail);
4884 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004885 lw(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
4886 GetWeakValue(scratch2, cell);
4887 Jump(success, RelocInfo::CODE_TARGET, eq, scratch1, Operand(scratch2));
Ben Murdoch257744e2011-11-30 15:57:28 +00004888 bind(&fail);
4889}
4890
4891
Steve Block44f0eee2011-05-26 01:26:41 +01004892void MacroAssembler::CheckMap(Register obj,
4893 Register scratch,
4894 Heap::RootListIndex index,
4895 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00004896 SmiCheckType smi_check_type) {
4897 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01004898 JumpIfSmi(obj, fail);
4899 }
4900 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
4901 LoadRoot(at, index);
4902 Branch(fail, ne, scratch, Operand(at));
Steve Block6ded16b2010-05-10 14:33:55 +01004903}
4904
Ben Murdoch61f157c2016-09-16 13:49:30 +01004905void MacroAssembler::FPUCanonicalizeNaN(const DoubleRegister dst,
4906 const DoubleRegister src) {
4907 sub_d(dst, src, kDoubleRegZero);
4908}
Steve Block6ded16b2010-05-10 14:33:55 +01004909
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004910void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
4911 li(value, Operand(cell));
4912 lw(value, FieldMemOperand(value, WeakCell::kValueOffset));
4913}
4914
4915
4916void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
4917 Label* miss) {
4918 GetWeakValue(value, cell);
4919 JumpIfSmi(value, miss);
4920}
4921
4922
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004923void MacroAssembler::MovFromFloatResult(DoubleRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004924 if (IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004925 if (kArchEndian == kLittle) {
4926 Move(dst, v0, v1);
4927 } else {
4928 Move(dst, v1, v0);
4929 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004930 } else {
4931 Move(dst, f0); // Reg f0 is o32 ABI FP return value.
4932 }
4933}
4934
4935
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004936void MacroAssembler::MovFromFloatParameter(DoubleRegister dst) {
4937 if (IsMipsSoftFloatABI) {
4938 if (kArchEndian == kLittle) {
4939 Move(dst, a0, a1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004940 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004941 Move(dst, a1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004942 }
4943 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004944 Move(dst, f12); // Reg f12 is o32 ABI FP first argument value.
Ben Murdoch257744e2011-11-30 15:57:28 +00004945 }
4946}
4947
4948
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004949void MacroAssembler::MovToFloatParameter(DoubleRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004950 if (!IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004951 Move(f12, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00004952 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004953 if (kArchEndian == kLittle) {
4954 Move(a0, a1, src);
4955 } else {
4956 Move(a1, a0, src);
4957 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004958 }
4959}
4960
4961
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004962void MacroAssembler::MovToFloatResult(DoubleRegister src) {
4963 if (!IsMipsSoftFloatABI) {
4964 Move(f0, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00004965 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004966 if (kArchEndian == kLittle) {
4967 Move(v0, v1, src);
4968 } else {
4969 Move(v1, v0, src);
4970 }
4971 }
4972}
4973
4974
4975void MacroAssembler::MovToFloatParameters(DoubleRegister src1,
4976 DoubleRegister src2) {
4977 if (!IsMipsSoftFloatABI) {
4978 if (src2.is(f12)) {
4979 DCHECK(!src1.is(f14));
4980 Move(f14, src2);
4981 Move(f12, src1);
4982 } else {
4983 Move(f12, src1);
4984 Move(f14, src2);
4985 }
4986 } else {
4987 if (kArchEndian == kLittle) {
4988 Move(a0, a1, src1);
4989 Move(a2, a3, src2);
4990 } else {
4991 Move(a1, a0, src1);
4992 Move(a3, a2, src2);
4993 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004994 }
4995}
4996
4997
Steve Block6ded16b2010-05-10 14:33:55 +01004998// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004999// JavaScript invokes.
Steve Block6ded16b2010-05-10 14:33:55 +01005000
Ben Murdochda12d292016-06-02 14:46:10 +01005001void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
5002 Register caller_args_count_reg,
5003 Register scratch0, Register scratch1) {
5004#if DEBUG
5005 if (callee_args_count.is_reg()) {
5006 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
5007 scratch1));
5008 } else {
5009 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
5010 }
5011#endif
5012
5013 // Calculate the end of destination area where we will put the arguments
5014 // after we drop current frame. We add kPointerSize to count the receiver
5015 // argument which is not included into formal parameters count.
5016 Register dst_reg = scratch0;
5017 Lsa(dst_reg, fp, caller_args_count_reg, kPointerSizeLog2);
5018 Addu(dst_reg, dst_reg,
5019 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
5020
5021 Register src_reg = caller_args_count_reg;
5022 // Calculate the end of source area. +kPointerSize is for the receiver.
5023 if (callee_args_count.is_reg()) {
5024 Lsa(src_reg, sp, callee_args_count.reg(), kPointerSizeLog2);
5025 Addu(src_reg, src_reg, Operand(kPointerSize));
5026 } else {
5027 Addu(src_reg, sp,
5028 Operand((callee_args_count.immediate() + 1) * kPointerSize));
5029 }
5030
5031 if (FLAG_debug_code) {
5032 Check(lo, kStackAccessBelowStackPointer, src_reg, Operand(dst_reg));
5033 }
5034
5035 // Restore caller's frame pointer and return address now as they will be
5036 // overwritten by the copying loop.
5037 lw(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
5038 lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5039
5040 // Now copy callee arguments to the caller frame going backwards to avoid
5041 // callee arguments corruption (source and destination areas could overlap).
5042
5043 // Both src_reg and dst_reg are pointing to the word after the one to copy,
5044 // so they must be pre-decremented in the loop.
5045 Register tmp_reg = scratch1;
5046 Label loop, entry;
5047 Branch(&entry);
5048 bind(&loop);
5049 Subu(src_reg, src_reg, Operand(kPointerSize));
5050 Subu(dst_reg, dst_reg, Operand(kPointerSize));
5051 lw(tmp_reg, MemOperand(src_reg));
5052 sw(tmp_reg, MemOperand(dst_reg));
5053 bind(&entry);
5054 Branch(&loop, ne, sp, Operand(src_reg));
5055
5056 // Leave current frame.
5057 mov(sp, dst_reg);
5058}
5059
Steve Block6ded16b2010-05-10 14:33:55 +01005060void MacroAssembler::InvokePrologue(const ParameterCount& expected,
5061 const ParameterCount& actual,
Steve Block6ded16b2010-05-10 14:33:55 +01005062 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005063 bool* definitely_mismatches,
Steve Block44f0eee2011-05-26 01:26:41 +01005064 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005065 const CallWrapper& call_wrapper) {
Steve Block6ded16b2010-05-10 14:33:55 +01005066 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005067 *definitely_mismatches = false;
Steve Block6ded16b2010-05-10 14:33:55 +01005068 Label regular_invoke;
5069
5070 // Check whether the expected and actual arguments count match. If not,
5071 // setup registers according to contract with ArgumentsAdaptorTrampoline:
5072 // a0: actual arguments count
5073 // a1: function (passed through to callee)
5074 // a2: expected arguments count
Steve Block6ded16b2010-05-10 14:33:55 +01005075
5076 // The code below is made a lot easier because the calling code already sets
5077 // up actual and expected registers according to the contract if values are
5078 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005079 DCHECK(actual.is_immediate() || actual.reg().is(a0));
5080 DCHECK(expected.is_immediate() || expected.reg().is(a2));
Steve Block6ded16b2010-05-10 14:33:55 +01005081
5082 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005083 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005084 li(a0, Operand(actual.immediate()));
Steve Block6ded16b2010-05-10 14:33:55 +01005085 if (expected.immediate() == actual.immediate()) {
5086 definitely_matches = true;
5087 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005088 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5089 if (expected.immediate() == sentinel) {
5090 // Don't worry about adapting arguments for builtins that
5091 // don't want that done. Skip adaption code by making it look
5092 // like we have a match between expected and actual number of
5093 // arguments.
5094 definitely_matches = true;
5095 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005096 *definitely_mismatches = true;
Steve Block6ded16b2010-05-10 14:33:55 +01005097 li(a2, Operand(expected.immediate()));
5098 }
5099 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005100 } else if (actual.is_immediate()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005101 li(a0, Operand(actual.immediate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005102 Branch(&regular_invoke, eq, expected.reg(), Operand(a0));
Steve Block6ded16b2010-05-10 14:33:55 +01005103 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00005104 Branch(&regular_invoke, eq, expected.reg(), Operand(actual.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01005105 }
5106
5107 if (!definitely_matches) {
Steve Block44f0eee2011-05-26 01:26:41 +01005108 Handle<Code> adaptor =
5109 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Block6ded16b2010-05-10 14:33:55 +01005110 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005111 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005112 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00005113 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005114 if (!*definitely_mismatches) {
5115 Branch(done);
5116 }
Steve Block6ded16b2010-05-10 14:33:55 +01005117 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01005118 Jump(adaptor, RelocInfo::CODE_TARGET);
Steve Block6ded16b2010-05-10 14:33:55 +01005119 }
5120 bind(&regular_invoke);
5121 }
5122}
5123
Steve Block44f0eee2011-05-26 01:26:41 +01005124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005125void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
5126 const ParameterCount& expected,
5127 const ParameterCount& actual) {
5128 Label skip_flooding;
Ben Murdoch61f157c2016-09-16 13:49:30 +01005129 ExternalReference last_step_action =
5130 ExternalReference::debug_last_step_action_address(isolate());
5131 STATIC_ASSERT(StepFrame > StepIn);
5132 li(t0, Operand(last_step_action));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005133 lb(t0, MemOperand(t0));
Ben Murdoch61f157c2016-09-16 13:49:30 +01005134 Branch(&skip_flooding, lt, t0, Operand(StepIn));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005135 {
5136 FrameScope frame(this,
5137 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
5138 if (expected.is_reg()) {
5139 SmiTag(expected.reg());
5140 Push(expected.reg());
5141 }
5142 if (actual.is_reg()) {
5143 SmiTag(actual.reg());
5144 Push(actual.reg());
5145 }
5146 if (new_target.is_valid()) {
5147 Push(new_target);
5148 }
5149 Push(fun);
5150 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005151 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005152 Pop(fun);
5153 if (new_target.is_valid()) {
5154 Pop(new_target);
5155 }
5156 if (actual.is_reg()) {
5157 Pop(actual.reg());
5158 SmiUntag(actual.reg());
5159 }
5160 if (expected.is_reg()) {
5161 Pop(expected.reg());
5162 SmiUntag(expected.reg());
5163 }
5164 }
5165 bind(&skip_flooding);
5166}
5167
5168
5169void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
5170 const ParameterCount& expected,
5171 const ParameterCount& actual,
5172 InvokeFlag flag,
5173 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005174 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005175 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005176 DCHECK(function.is(a1));
5177 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(a3));
5178
5179 if (call_wrapper.NeedsDebugStepCheck()) {
5180 FloodFunctionIfStepping(function, new_target, expected, actual);
5181 }
5182
5183 // Clear the new.target register if not given.
5184 if (!new_target.is_valid()) {
5185 LoadRoot(a3, Heap::kUndefinedValueRootIndex);
5186 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005187
Steve Block6ded16b2010-05-10 14:33:55 +01005188 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005189 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005190 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005191 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005192 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005193 // We call indirectly through the code field in the function to
5194 // allow recompilation to take effect without changing any of the
5195 // call sites.
5196 Register code = t0;
5197 lw(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005198 if (flag == CALL_FUNCTION) {
5199 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005200 Call(code);
5201 call_wrapper.AfterCall();
5202 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005203 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005204 Jump(code);
5205 }
5206 // Continue here if InvokePrologue does handle the invocation due to
5207 // mismatched parameter counts.
5208 bind(&done);
Steve Block6ded16b2010-05-10 14:33:55 +01005209 }
Steve Block6ded16b2010-05-10 14:33:55 +01005210}
5211
5212
Steve Block6ded16b2010-05-10 14:33:55 +01005213void MacroAssembler::InvokeFunction(Register function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005214 Register new_target,
Steve Block6ded16b2010-05-10 14:33:55 +01005215 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +01005216 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005217 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005218 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005219 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005220
Steve Block6ded16b2010-05-10 14:33:55 +01005221 // Contract with called JS functions requires that function is passed in a1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005222 DCHECK(function.is(a1));
Steve Block6ded16b2010-05-10 14:33:55 +01005223 Register expected_reg = a2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005224 Register temp_reg = t0;
Steve Block6ded16b2010-05-10 14:33:55 +01005225
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005226 lw(temp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005227 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
5228 lw(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005229 FieldMemOperand(temp_reg,
5230 SharedFunctionInfo::kFormalParameterCountOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01005231 sra(expected_reg, expected_reg, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01005232
5233 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005234 InvokeFunctionCode(function, new_target, expected, actual, flag,
5235 call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01005236}
5237
5238
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005239void MacroAssembler::InvokeFunction(Register function,
5240 const ParameterCount& expected,
Steve Block44f0eee2011-05-26 01:26:41 +01005241 const ParameterCount& actual,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005242 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005243 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005244 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005245 DCHECK(flag == JUMP_FUNCTION || has_frame());
5246
5247 // Contract with called JS functions requires that function is passed in a1.
5248 DCHECK(function.is(a1));
Steve Block44f0eee2011-05-26 01:26:41 +01005249
5250 // Get the function and setup the context.
Steve Block44f0eee2011-05-26 01:26:41 +01005251 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
5252
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005253 InvokeFunctionCode(a1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005254}
5255
5256
5257void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
5258 const ParameterCount& expected,
5259 const ParameterCount& actual,
5260 InvokeFlag flag,
5261 const CallWrapper& call_wrapper) {
5262 li(a1, function);
5263 InvokeFunction(a1, expected, actual, flag, call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01005264}
5265
5266
Steve Block44f0eee2011-05-26 01:26:41 +01005267void MacroAssembler::IsObjectJSStringType(Register object,
5268 Register scratch,
5269 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005270 DCHECK(kNotStringTag != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005271
5272 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
5273 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
5274 And(scratch, scratch, Operand(kIsNotStringMask));
5275 Branch(fail, ne, scratch, Operand(zero_reg));
Steve Block6ded16b2010-05-10 14:33:55 +01005276}
5277
5278
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005279void MacroAssembler::IsObjectNameType(Register object,
5280 Register scratch,
5281 Label* fail) {
5282 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
5283 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
5284 Branch(fail, hi, scratch, Operand(LAST_NAME_TYPE));
5285}
5286
5287
Steve Block6ded16b2010-05-10 14:33:55 +01005288// ---------------------------------------------------------------------------
5289// Support functions.
5290
Steve Block44f0eee2011-05-26 01:26:41 +01005291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005292void MacroAssembler::GetMapConstructor(Register result, Register map,
5293 Register temp, Register temp2) {
5294 Label done, loop;
5295 lw(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
5296 bind(&loop);
5297 JumpIfSmi(result, &done);
5298 GetObjectType(result, temp, temp2);
5299 Branch(&done, ne, temp2, Operand(MAP_TYPE));
5300 lw(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
5301 Branch(&loop);
5302 bind(&done);
5303}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005304
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005305
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005306void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
5307 Register scratch, Label* miss) {
Steve Block44f0eee2011-05-26 01:26:41 +01005308 // Get the prototype or initial map from the function.
5309 lw(result,
5310 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
5311
5312 // If the prototype or initial map is the hole, don't return it and
5313 // simply miss the cache instead. This will allow us to allocate a
5314 // prototype object on-demand in the runtime system.
5315 LoadRoot(t8, Heap::kTheHoleValueRootIndex);
5316 Branch(miss, eq, result, Operand(t8));
5317
5318 // If the function does not have an initial map, we're done.
5319 Label done;
5320 GetObjectType(result, scratch, scratch);
5321 Branch(&done, ne, scratch, Operand(MAP_TYPE));
5322
5323 // Get the prototype from the initial map.
5324 lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01005325
Steve Block44f0eee2011-05-26 01:26:41 +01005326 // All done.
5327 bind(&done);
5328}
Steve Block6ded16b2010-05-10 14:33:55 +01005329
5330
Steve Block44f0eee2011-05-26 01:26:41 +01005331void MacroAssembler::GetObjectType(Register object,
5332 Register map,
5333 Register type_reg) {
5334 lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
5335 lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
5336}
Steve Block6ded16b2010-05-10 14:33:55 +01005337
5338
5339// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00005340// Runtime calls.
Steve Block6ded16b2010-05-10 14:33:55 +01005341
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005342void MacroAssembler::CallStub(CodeStub* stub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005343 TypeFeedbackId ast_id,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005344 Condition cond,
5345 Register r1,
5346 const Operand& r2,
5347 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005348 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
5349 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id,
5350 cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00005351}
5352
5353
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005354void MacroAssembler::TailCallStub(CodeStub* stub,
5355 Condition cond,
5356 Register r1,
5357 const Operand& r2,
5358 BranchDelaySlot bd) {
5359 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00005360}
5361
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005362
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005363bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005364 return has_frame_ || !stub->SometimesSetsUpAFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +00005365}
5366
Andrei Popescu31002712010-02-23 13:46:05 +00005367
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005368void MacroAssembler::IndexFromHash(Register hash, Register index) {
Steve Block44f0eee2011-05-26 01:26:41 +01005369 // If the hash field contains an array index pick it out. The assert checks
5370 // that the constants for the maximum number of digits for an array index
5371 // cached in the hash field and the number of bits reserved for it does not
5372 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005373 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Steve Block44f0eee2011-05-26 01:26:41 +01005374 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005375 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Steve Block44f0eee2011-05-26 01:26:41 +01005376}
5377
5378
5379void MacroAssembler::ObjectToDoubleFPURegister(Register object,
5380 FPURegister result,
5381 Register scratch1,
5382 Register scratch2,
5383 Register heap_number_map,
5384 Label* not_number,
5385 ObjectToDoubleFlags flags) {
5386 Label done;
5387 if ((flags & OBJECT_NOT_SMI) == 0) {
5388 Label not_smi;
5389 JumpIfNotSmi(object, &not_smi);
5390 // Remove smi tag and convert to double.
5391 sra(scratch1, object, kSmiTagSize);
5392 mtc1(scratch1, result);
5393 cvt_d_w(result, result);
5394 Branch(&done);
5395 bind(&not_smi);
5396 }
5397 // Check for heap number and load double value from it.
5398 lw(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
5399 Branch(not_number, ne, scratch1, Operand(heap_number_map));
5400
5401 if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
5402 // If exponent is all ones the number is either a NaN or +/-Infinity.
5403 Register exponent = scratch1;
5404 Register mask_reg = scratch2;
5405 lw(exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
5406 li(mask_reg, HeapNumber::kExponentMask);
5407
5408 And(exponent, exponent, mask_reg);
5409 Branch(not_number, eq, exponent, Operand(mask_reg));
5410 }
5411 ldc1(result, FieldMemOperand(object, HeapNumber::kValueOffset));
5412 bind(&done);
5413}
5414
5415
Steve Block44f0eee2011-05-26 01:26:41 +01005416void MacroAssembler::SmiToDoubleFPURegister(Register smi,
5417 FPURegister value,
5418 Register scratch1) {
5419 sra(scratch1, smi, kSmiTagSize);
5420 mtc1(scratch1, value);
5421 cvt_d_w(value, value);
5422}
5423
5424
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005425static inline void BranchOvfHelper(MacroAssembler* masm, Register overflow_dst,
5426 Label* overflow_label,
5427 Label* no_overflow_label) {
5428 DCHECK(overflow_label || no_overflow_label);
5429 if (!overflow_label) {
5430 DCHECK(no_overflow_label);
5431 masm->Branch(no_overflow_label, ge, overflow_dst, Operand(zero_reg));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005432 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005433 masm->Branch(overflow_label, lt, overflow_dst, Operand(zero_reg));
5434 if (no_overflow_label) masm->Branch(no_overflow_label);
5435 }
5436}
5437
5438
5439void MacroAssembler::AddBranchOvf(Register dst, Register left,
5440 const Operand& right, Label* overflow_label,
5441 Label* no_overflow_label, Register scratch) {
5442 if (right.is_reg()) {
5443 AddBranchOvf(dst, left, right.rm(), overflow_label, no_overflow_label,
5444 scratch);
5445 } else {
5446 if (IsMipsArchVariant(kMips32r6)) {
5447 Register right_reg = t9;
5448 DCHECK(!left.is(right_reg));
5449 li(right_reg, Operand(right));
5450 AddBranchOvf(dst, left, right_reg, overflow_label, no_overflow_label);
5451 } else {
5452 Register overflow_dst = t9;
5453 DCHECK(!dst.is(scratch));
5454 DCHECK(!dst.is(overflow_dst));
5455 DCHECK(!scratch.is(overflow_dst));
5456 DCHECK(!left.is(overflow_dst));
5457 if (dst.is(left)) {
5458 mov(scratch, left); // Preserve left.
5459 Addu(dst, left, right.immediate()); // Left is overwritten.
5460 xor_(scratch, dst, scratch); // Original left.
5461 // Load right since xori takes uint16 as immediate.
5462 Addu(overflow_dst, zero_reg, right);
5463 xor_(overflow_dst, dst, overflow_dst);
5464 and_(overflow_dst, overflow_dst, scratch);
5465 } else {
5466 Addu(dst, left, right.immediate());
5467 xor_(overflow_dst, dst, left);
5468 // Load right since xori takes uint16 as immediate.
5469 Addu(scratch, zero_reg, right);
5470 xor_(scratch, dst, scratch);
5471 and_(overflow_dst, scratch, overflow_dst);
5472 }
5473 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
5474 }
5475 }
5476}
5477
5478
5479void MacroAssembler::AddBranchOvf(Register dst, Register left, Register right,
5480 Label* overflow_label,
5481 Label* no_overflow_label, Register scratch) {
5482 if (IsMipsArchVariant(kMips32r6)) {
5483 if (!overflow_label) {
5484 DCHECK(no_overflow_label);
5485 DCHECK(!dst.is(scratch));
5486 Register left_reg = left.is(dst) ? scratch : left;
5487 Register right_reg = right.is(dst) ? t9 : right;
5488 DCHECK(!dst.is(left_reg));
5489 DCHECK(!dst.is(right_reg));
5490 Move(left_reg, left);
5491 Move(right_reg, right);
5492 addu(dst, left, right);
5493 bnvc(left_reg, right_reg, no_overflow_label);
5494 } else {
5495 bovc(left, right, overflow_label);
5496 addu(dst, left, right);
5497 if (no_overflow_label) bc(no_overflow_label);
5498 }
5499 } else {
5500 Register overflow_dst = t9;
5501 DCHECK(!dst.is(scratch));
5502 DCHECK(!dst.is(overflow_dst));
5503 DCHECK(!scratch.is(overflow_dst));
5504 DCHECK(!left.is(overflow_dst));
5505 DCHECK(!right.is(overflow_dst));
5506 DCHECK(!left.is(scratch));
5507 DCHECK(!right.is(scratch));
5508
5509 if (left.is(right) && dst.is(left)) {
5510 mov(overflow_dst, right);
5511 right = overflow_dst;
5512 }
5513
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005514 if (dst.is(left)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005515 mov(scratch, left); // Preserve left.
5516 addu(dst, left, right); // Left is overwritten.
5517 xor_(scratch, dst, scratch); // Original left.
5518 xor_(overflow_dst, dst, right);
5519 and_(overflow_dst, overflow_dst, scratch);
5520 } else if (dst.is(right)) {
5521 mov(scratch, right); // Preserve right.
5522 addu(dst, left, right); // Right is overwritten.
5523 xor_(scratch, dst, scratch); // Original right.
5524 xor_(overflow_dst, dst, left);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005525 and_(overflow_dst, overflow_dst, scratch);
5526 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005527 addu(dst, left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005528 xor_(overflow_dst, dst, left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005529 xor_(scratch, dst, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005530 and_(overflow_dst, scratch, overflow_dst);
5531 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005532 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005533 }
5534}
5535
5536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005537void MacroAssembler::SubBranchOvf(Register dst, Register left,
5538 const Operand& right, Label* overflow_label,
5539 Label* no_overflow_label, Register scratch) {
5540 DCHECK(overflow_label || no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005541 if (right.is_reg()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005542 SubBranchOvf(dst, left, right.rm(), overflow_label, no_overflow_label,
5543 scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005544 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005545 Register overflow_dst = t9;
5546 DCHECK(!dst.is(scratch));
5547 DCHECK(!dst.is(overflow_dst));
5548 DCHECK(!scratch.is(overflow_dst));
5549 DCHECK(!left.is(overflow_dst));
5550 DCHECK(!left.is(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005551 if (dst.is(left)) {
5552 mov(scratch, left); // Preserve left.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005553 Subu(dst, left, right.immediate()); // Left is overwritten.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005554 // Load right since xori takes uint16 as immediate.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005555 Addu(overflow_dst, zero_reg, right);
5556 xor_(overflow_dst, scratch, overflow_dst); // scratch is original left.
5557 xor_(scratch, dst, scratch); // scratch is original left.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005558 and_(overflow_dst, scratch, overflow_dst);
5559 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005560 Subu(dst, left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005561 xor_(overflow_dst, dst, left);
5562 // Load right since xori takes uint16 as immediate.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005563 Addu(scratch, zero_reg, right);
5564 xor_(scratch, left, scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005565 and_(overflow_dst, scratch, overflow_dst);
5566 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005567 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005568 }
5569}
5570
5571
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005572void MacroAssembler::SubBranchOvf(Register dst, Register left, Register right,
5573 Label* overflow_label,
5574 Label* no_overflow_label, Register scratch) {
5575 DCHECK(overflow_label || no_overflow_label);
5576 Register overflow_dst = t9;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005577 DCHECK(!dst.is(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005578 DCHECK(!dst.is(overflow_dst));
5579 DCHECK(!scratch.is(overflow_dst));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005580 DCHECK(!overflow_dst.is(left));
5581 DCHECK(!overflow_dst.is(right));
5582 DCHECK(!scratch.is(left));
5583 DCHECK(!scratch.is(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00005584
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005585 // This happens with some crankshaft code. Since Subu works fine if
5586 // left == right, let's not make that restriction here.
5587 if (left.is(right)) {
5588 mov(dst, zero_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005589 if (no_overflow_label) {
5590 Branch(no_overflow_label);
5591 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005592 }
5593
Ben Murdoch257744e2011-11-30 15:57:28 +00005594 if (dst.is(left)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005595 mov(scratch, left); // Preserve left.
5596 subu(dst, left, right); // Left is overwritten.
5597 xor_(overflow_dst, dst, scratch); // scratch is original left.
5598 xor_(scratch, scratch, right); // scratch is original left.
5599 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00005600 } else if (dst.is(right)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005601 mov(scratch, right); // Preserve right.
5602 subu(dst, left, right); // Right is overwritten.
5603 xor_(overflow_dst, dst, left);
5604 xor_(scratch, left, scratch); // Original right.
5605 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00005606 } else {
5607 subu(dst, left, right);
5608 xor_(overflow_dst, dst, left);
5609 xor_(scratch, left, right);
5610 and_(overflow_dst, scratch, overflow_dst);
5611 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005612 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Ben Murdoch257744e2011-11-30 15:57:28 +00005613}
5614
5615
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005616void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
5617 SaveFPRegsMode save_doubles,
5618 BranchDelaySlot bd) {
Steve Block6ded16b2010-05-10 14:33:55 +01005619 // All parameters are on the stack. v0 has the return value after call.
5620
5621 // If the expected number of arguments of the runtime function is
5622 // constant, we check that the actual number of arguments match the
5623 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005624 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01005625
5626 // TODO(1236192): Most runtime routines don't need the number of
5627 // arguments passed in because it is constant. At some point we
5628 // should remove this need and make the runtime routine entry code
5629 // smarter.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005630 PrepareCEntryArgs(num_arguments);
5631 PrepareCEntryFunction(ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005632 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005633 CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
Andrei Popescu31002712010-02-23 13:46:05 +00005634}
5635
5636
Steve Block44f0eee2011-05-26 01:26:41 +01005637void MacroAssembler::CallExternalReference(const ExternalReference& ext,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005638 int num_arguments,
5639 BranchDelaySlot bd) {
5640 PrepareCEntryArgs(num_arguments);
5641 PrepareCEntryFunction(ext);
Steve Block44f0eee2011-05-26 01:26:41 +01005642
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005643 CEntryStub stub(isolate(), 1);
5644 CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
Steve Block44f0eee2011-05-26 01:26:41 +01005645}
5646
5647
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005648void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
5649 const Runtime::Function* function = Runtime::FunctionForId(fid);
5650 DCHECK_EQ(1, function->result_size);
5651 if (function->nargs >= 0) {
5652 PrepareCEntryArgs(function->nargs);
5653 }
5654 JumpToExternalReference(ExternalReference(fid, isolate()));
Andrei Popescu31002712010-02-23 13:46:05 +00005655}
5656
5657
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005658void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
5659 BranchDelaySlot bd) {
5660 PrepareCEntryFunction(builtin);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005661 CEntryStub stub(isolate(), 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005662 Jump(stub.GetCode(),
5663 RelocInfo::CODE_TARGET,
5664 al,
5665 zero_reg,
5666 Operand(zero_reg),
5667 bd);
Andrei Popescu31002712010-02-23 13:46:05 +00005668}
5669
5670
Andrei Popescu31002712010-02-23 13:46:05 +00005671void MacroAssembler::SetCounter(StatsCounter* counter, int value,
5672 Register scratch1, Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +01005673 if (FLAG_native_code_counters && counter->Enabled()) {
5674 li(scratch1, Operand(value));
5675 li(scratch2, Operand(ExternalReference(counter)));
5676 sw(scratch1, MemOperand(scratch2));
5677 }
Andrei Popescu31002712010-02-23 13:46:05 +00005678}
5679
5680
5681void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
5682 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005683 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005684 if (FLAG_native_code_counters && counter->Enabled()) {
5685 li(scratch2, Operand(ExternalReference(counter)));
5686 lw(scratch1, MemOperand(scratch2));
5687 Addu(scratch1, scratch1, Operand(value));
5688 sw(scratch1, MemOperand(scratch2));
5689 }
Andrei Popescu31002712010-02-23 13:46:05 +00005690}
5691
5692
5693void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
5694 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005695 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005696 if (FLAG_native_code_counters && counter->Enabled()) {
5697 li(scratch2, Operand(ExternalReference(counter)));
5698 lw(scratch1, MemOperand(scratch2));
5699 Subu(scratch1, scratch1, Operand(value));
5700 sw(scratch1, MemOperand(scratch2));
5701 }
Andrei Popescu31002712010-02-23 13:46:05 +00005702}
5703
5704
Steve Block6ded16b2010-05-10 14:33:55 +01005705// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00005706// Debugging.
Andrei Popescu31002712010-02-23 13:46:05 +00005707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005708void MacroAssembler::Assert(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00005709 Register rs, Operand rt) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005710 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005711 Check(cc, reason, rs, rt);
Steve Block44f0eee2011-05-26 01:26:41 +01005712}
5713
5714
5715void MacroAssembler::AssertFastElements(Register elements) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005716 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005717 DCHECK(!elements.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01005718 Label ok;
Ben Murdoch257744e2011-11-30 15:57:28 +00005719 push(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01005720 lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
5721 LoadRoot(at, Heap::kFixedArrayMapRootIndex);
5722 Branch(&ok, eq, elements, Operand(at));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005723 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
5724 Branch(&ok, eq, elements, Operand(at));
Steve Block44f0eee2011-05-26 01:26:41 +01005725 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
5726 Branch(&ok, eq, elements, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005727 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Steve Block44f0eee2011-05-26 01:26:41 +01005728 bind(&ok);
Ben Murdoch257744e2011-11-30 15:57:28 +00005729 pop(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01005730 }
Andrei Popescu31002712010-02-23 13:46:05 +00005731}
5732
5733
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005734void MacroAssembler::Check(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00005735 Register rs, Operand rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01005736 Label L;
5737 Branch(&L, cc, rs, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005738 Abort(reason);
Ben Murdoch257744e2011-11-30 15:57:28 +00005739 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01005740 bind(&L);
Andrei Popescu31002712010-02-23 13:46:05 +00005741}
5742
5743
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005744void MacroAssembler::Abort(BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01005745 Label abort_start;
5746 bind(&abort_start);
Steve Block44f0eee2011-05-26 01:26:41 +01005747#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005748 const char* msg = GetBailoutReason(reason);
Steve Block44f0eee2011-05-26 01:26:41 +01005749 if (msg != NULL) {
5750 RecordComment("Abort message: ");
5751 RecordComment(msg);
5752 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005753
5754 if (FLAG_trap_on_abort) {
5755 stop(msg);
5756 return;
5757 }
Steve Block44f0eee2011-05-26 01:26:41 +01005758#endif
Steve Block44f0eee2011-05-26 01:26:41 +01005759
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005760 li(a0, Operand(Smi::FromInt(reason)));
Ben Murdoch257744e2011-11-30 15:57:28 +00005761 push(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005762 // Disable stub call restrictions to always allow calls to abort.
5763 if (!has_frame_) {
5764 // We don't actually want to generate a pile of code for this, so just
5765 // claim there is a stack frame, without generating one.
5766 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005767 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005768 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005769 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005770 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005771 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01005772 if (is_trampoline_pool_blocked()) {
5773 // If the calling code cares about the exact number of
5774 // instructions generated, we insert padding here to keep the size
5775 // of the Abort macro constant.
5776 // Currently in debug mode with debug_code enabled the number of
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005777 // generated instructions is 10, so we use this as a maximum value.
5778 static const int kExpectedAbortInstructions = 10;
Steve Block44f0eee2011-05-26 01:26:41 +01005779 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005780 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block44f0eee2011-05-26 01:26:41 +01005781 while (abort_instructions++ < kExpectedAbortInstructions) {
5782 nop();
5783 }
5784 }
5785}
5786
5787
5788void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
5789 if (context_chain_length > 0) {
5790 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005791 lw(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01005792 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005793 lw(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01005794 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005795 } else {
5796 // Slot is in the current function context. Move it into the
5797 // destination register in case we store into it (the write barrier
5798 // cannot be allowed to destroy the context in esi).
5799 Move(dst, cp);
5800 }
Steve Block44f0eee2011-05-26 01:26:41 +01005801}
5802
5803
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005804void MacroAssembler::LoadTransitionedArrayMapConditional(
5805 ElementsKind expected_kind,
5806 ElementsKind transitioned_kind,
5807 Register map_in_out,
5808 Register scratch,
5809 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005810 DCHECK(IsFastElementsKind(expected_kind));
5811 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005812
5813 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005814 lw(scratch, NativeContextMemOperand());
5815 lw(at, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005816 Branch(no_map_match, ne, map_in_out, Operand(at));
5817
5818 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005819 lw(map_in_out,
5820 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005821}
5822
5823
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005824void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
5825 lw(dst, NativeContextMemOperand());
5826 lw(dst, ContextMemOperand(dst, index));
Steve Block44f0eee2011-05-26 01:26:41 +01005827}
5828
5829
5830void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
5831 Register map,
5832 Register scratch) {
5833 // Load the initial map. The global functions all have initial maps.
5834 lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00005835 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01005836 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00005837 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Steve Block44f0eee2011-05-26 01:26:41 +01005838 Branch(&ok);
5839 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005840 Abort(kGlobalFunctionsMustHaveInitialMap);
Steve Block44f0eee2011-05-26 01:26:41 +01005841 bind(&ok);
5842 }
Andrei Popescu31002712010-02-23 13:46:05 +00005843}
5844
Ben Murdochda12d292016-06-02 14:46:10 +01005845void MacroAssembler::StubPrologue(StackFrame::Type type) {
5846 li(at, Operand(Smi::FromInt(type)));
5847 PushCommonFrame(at);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005848}
5849
5850
5851void MacroAssembler::Prologue(bool code_pre_aging) {
5852 PredictableCodeSizeScope predictible_code_size_scope(
5853 this, kNoCodeAgeSequenceLength);
5854 // The following three instructions must remain together and unmodified
5855 // for code aging to work properly.
5856 if (code_pre_aging) {
5857 // Pre-age the code.
5858 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
5859 nop(Assembler::CODE_AGE_MARKER_NOP);
5860 // Load the stub address to t9 and call it,
5861 // GetCodeAgeAndParity() extracts the stub address from this instruction.
5862 li(t9,
5863 Operand(reinterpret_cast<uint32_t>(stub->instruction_start())),
5864 CONSTANT_SIZE);
5865 nop(); // Prevent jalr to jal optimization.
5866 jalr(t9, a0);
5867 nop(); // Branch delay slot nop.
5868 nop(); // Pad the empty space.
5869 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01005870 PushStandardFrame(a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005871 nop(Assembler::CODE_AGE_SEQUENCE_NOP);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005872 }
5873}
5874
5875
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005876void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
5877 lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01005878 lw(vector, FieldMemOperand(vector, JSFunction::kLiteralsOffset));
5879 lw(vector, FieldMemOperand(vector, LiteralsArray::kFeedbackVectorOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005880}
5881
5882
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005883void MacroAssembler::EnterFrame(StackFrame::Type type,
5884 bool load_constant_pool_pointer_reg) {
5885 // Out-of-line constant pool not implemented on mips.
5886 UNREACHABLE();
5887}
5888
5889
Steve Block6ded16b2010-05-10 14:33:55 +01005890void MacroAssembler::EnterFrame(StackFrame::Type type) {
Ben Murdochda12d292016-06-02 14:46:10 +01005891 int stack_offset, fp_offset;
5892 if (type == StackFrame::INTERNAL) {
5893 stack_offset = -4 * kPointerSize;
5894 fp_offset = 2 * kPointerSize;
5895 } else {
5896 stack_offset = -3 * kPointerSize;
5897 fp_offset = 1 * kPointerSize;
5898 }
5899 addiu(sp, sp, stack_offset);
5900 stack_offset = -stack_offset - kPointerSize;
5901 sw(ra, MemOperand(sp, stack_offset));
5902 stack_offset -= kPointerSize;
5903 sw(fp, MemOperand(sp, stack_offset));
5904 stack_offset -= kPointerSize;
5905 li(t9, Operand(Smi::FromInt(type)));
5906 sw(t9, MemOperand(sp, stack_offset));
5907 if (type == StackFrame::INTERNAL) {
5908 DCHECK_EQ(stack_offset, kPointerSize);
5909 li(t9, Operand(CodeObject()));
5910 sw(t9, MemOperand(sp, 0));
5911 } else {
5912 DCHECK_EQ(stack_offset, 0);
5913 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005914 // Adjust FP to point to saved FP.
Ben Murdochda12d292016-06-02 14:46:10 +01005915 Addu(fp, sp, Operand(fp_offset));
Steve Block6ded16b2010-05-10 14:33:55 +01005916}
5917
5918
5919void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Ben Murdochda12d292016-06-02 14:46:10 +01005920 addiu(sp, fp, 2 * kPointerSize);
5921 lw(ra, MemOperand(fp, 1 * kPointerSize));
5922 lw(fp, MemOperand(fp, 0 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01005923}
5924
Ben Murdochda12d292016-06-02 14:46:10 +01005925void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005926 // Set up the frame structure on the stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00005927 STATIC_ASSERT(2 * kPointerSize == ExitFrameConstants::kCallerSPDisplacement);
5928 STATIC_ASSERT(1 * kPointerSize == ExitFrameConstants::kCallerPCOffset);
5929 STATIC_ASSERT(0 * kPointerSize == ExitFrameConstants::kCallerFPOffset);
Steve Block6ded16b2010-05-10 14:33:55 +01005930
Ben Murdoch257744e2011-11-30 15:57:28 +00005931 // This is how the stack will look:
5932 // fp + 2 (==kCallerSPDisplacement) - old stack's end
5933 // [fp + 1 (==kCallerPCOffset)] - saved old ra
5934 // [fp + 0 (==kCallerFPOffset)] - saved old fp
Ben Murdochda12d292016-06-02 14:46:10 +01005935 // [fp - 1 StackFrame::EXIT Smi
5936 // [fp - 2 (==kSPOffset)] - sp of the called function
5937 // [fp - 3 (==kCodeOffset)] - CodeObject
Ben Murdoch257744e2011-11-30 15:57:28 +00005938 // fp - (2 + stack_space + alignment) == sp == [fp - kSPOffset] - top of the
5939 // new stack (will contain saved ra)
Steve Block6ded16b2010-05-10 14:33:55 +01005940
Ben Murdochda12d292016-06-02 14:46:10 +01005941 // Save registers and reserve room for saved entry sp and code object.
5942 addiu(sp, sp, -2 * kPointerSize - ExitFrameConstants::kFixedFrameSizeFromFp);
5943 sw(ra, MemOperand(sp, 4 * kPointerSize));
5944 sw(fp, MemOperand(sp, 3 * kPointerSize));
5945 li(at, Operand(Smi::FromInt(StackFrame::EXIT)));
5946 sw(at, MemOperand(sp, 2 * kPointerSize));
5947 // Set up new frame pointer.
5948 addiu(fp, sp, ExitFrameConstants::kFixedFrameSizeFromFp);
Steve Block6ded16b2010-05-10 14:33:55 +01005949
Ben Murdoch257744e2011-11-30 15:57:28 +00005950 if (emit_debug_code()) {
5951 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset));
5952 }
5953
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005954 // Accessed from ExitFrame::code_slot.
5955 li(t8, Operand(CodeObject()), CONSTANT_SIZE);
Ben Murdoch257744e2011-11-30 15:57:28 +00005956 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005957
5958 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00005959 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005960 sw(fp, MemOperand(t8));
Ben Murdoch589d6972011-11-30 16:04:58 +00005961 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005962 sw(cp, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005963
Ben Murdoch257744e2011-11-30 15:57:28 +00005964 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01005965 if (save_doubles) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005966 // The stack must be allign to 0 modulo 8 for stores with sdc1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005967 DCHECK(kDoubleSize == frame_alignment);
Ben Murdoch257744e2011-11-30 15:57:28 +00005968 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005969 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00005970 And(sp, sp, Operand(-frame_alignment)); // Align stack.
5971 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005972 int space = FPURegister::kMaxNumRegisters * kDoubleSize;
Steve Block44f0eee2011-05-26 01:26:41 +01005973 Subu(sp, sp, Operand(space));
5974 // Remember: we only need to save every 2nd double FPU value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005975 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01005976 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00005977 sdc1(reg, MemOperand(sp, i * kDoubleSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005978 }
Steve Block44f0eee2011-05-26 01:26:41 +01005979 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005980
5981 // Reserve place for the return address, stack space and an optional slot
5982 // (used by the DirectCEntryStub to hold the return value if a struct is
5983 // returned) and align the frame preparing for calling the runtime function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005984 DCHECK(stack_space >= 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00005985 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize));
5986 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005987 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00005988 And(sp, sp, Operand(-frame_alignment)); // Align stack.
5989 }
5990
5991 // Set the exit frame sp value to point just before the return address
5992 // location.
5993 addiu(at, sp, kPointerSize);
5994 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005995}
5996
5997
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005998void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
5999 bool restore_context, bool do_return,
6000 bool argument_count_is_length) {
Steve Block44f0eee2011-05-26 01:26:41 +01006001 // Optionally restore all double registers.
6002 if (save_doubles) {
Steve Block44f0eee2011-05-26 01:26:41 +01006003 // Remember: we only need to restore every 2nd double FPU value.
Ben Murdoch257744e2011-11-30 15:57:28 +00006004 lw(t8, MemOperand(fp, ExitFrameConstants::kSPOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006005 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01006006 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006007 ldc1(reg, MemOperand(t8, i * kDoubleSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01006008 }
6009 }
6010
Steve Block6ded16b2010-05-10 14:33:55 +01006011 // Clear top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00006012 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01006013 sw(zero_reg, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01006014
6015 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006016 if (restore_context) {
6017 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
6018 lw(cp, MemOperand(t8));
6019 }
Steve Block6ded16b2010-05-10 14:33:55 +01006020#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006021 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01006022 sw(a3, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01006023#endif
6024
6025 // Pop the arguments, restore registers, and return.
6026 mov(sp, fp); // Respect ABI stack constraint.
Ben Murdoch257744e2011-11-30 15:57:28 +00006027 lw(fp, MemOperand(sp, ExitFrameConstants::kCallerFPOffset));
6028 lw(ra, MemOperand(sp, ExitFrameConstants::kCallerPCOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006029
Ben Murdoch257744e2011-11-30 15:57:28 +00006030 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006031 if (argument_count_is_length) {
6032 addu(sp, sp, argument_count);
6033 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01006034 Lsa(sp, sp, argument_count, kPointerSizeLog2, t8);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006035 }
Ben Murdoch257744e2011-11-30 15:57:28 +00006036 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006037
6038 if (do_return) {
6039 Ret(USE_DELAY_SLOT);
6040 // If returning, the instruction in the delay slot will be the addiu below.
6041 }
6042 addiu(sp, sp, 8);
Steve Block6ded16b2010-05-10 14:33:55 +01006043}
6044
6045
Steve Block44f0eee2011-05-26 01:26:41 +01006046void MacroAssembler::InitializeNewString(Register string,
6047 Register length,
6048 Heap::RootListIndex map_index,
6049 Register scratch1,
6050 Register scratch2) {
6051 sll(scratch1, length, kSmiTagSize);
6052 LoadRoot(scratch2, map_index);
6053 sw(scratch1, FieldMemOperand(string, String::kLengthOffset));
6054 li(scratch1, Operand(String::kEmptyHashField));
6055 sw(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
6056 sw(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
6057}
6058
6059
6060int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006061#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01006062 // Running on the real platform. Use the alignment as mandated by the local
6063 // environment.
6064 // Note: This will break if we ever start generating snapshots on one Mips
6065 // platform for another Mips platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006066 return base::OS::ActivationFrameAlignment();
6067#else // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01006068 // If we are using the simulator then we should always align to the expected
6069 // alignment. As the simulator is used to generate snapshots we do not know
6070 // if the target platform will need alignment, so this is controlled from a
6071 // flag.
6072 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006073#endif // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01006074}
6075
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006076
Ben Murdoch257744e2011-11-30 15:57:28 +00006077void MacroAssembler::AssertStackIsAligned() {
6078 if (emit_debug_code()) {
6079 const int frame_alignment = ActivationFrameAlignment();
6080 const int frame_alignment_mask = frame_alignment - 1;
Steve Block44f0eee2011-05-26 01:26:41 +01006081
Ben Murdoch257744e2011-11-30 15:57:28 +00006082 if (frame_alignment > kPointerSize) {
6083 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006084 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00006085 andi(at, sp, frame_alignment_mask);
6086 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
6087 // Don't use Check here, as it will call Runtime_Abort re-entering here.
6088 stop("Unexpected stack alignment");
6089 bind(&alignment_as_expected);
6090 }
Steve Block6ded16b2010-05-10 14:33:55 +01006091 }
Steve Block6ded16b2010-05-10 14:33:55 +01006092}
6093
Steve Block44f0eee2011-05-26 01:26:41 +01006094
Steve Block44f0eee2011-05-26 01:26:41 +01006095void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
6096 Register reg,
6097 Register scratch,
6098 Label* not_power_of_two_or_zero) {
6099 Subu(scratch, reg, Operand(1));
6100 Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt,
6101 scratch, Operand(zero_reg));
6102 and_(at, scratch, reg); // In the delay slot.
6103 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg));
6104}
6105
6106
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006107void MacroAssembler::SmiTagCheckOverflow(Register reg, Register overflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006108 DCHECK(!reg.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006109 mov(overflow, reg); // Save original value.
6110 SmiTag(reg);
6111 xor_(overflow, overflow, reg); // Overflow if (value ^ 2 * value) < 0.
6112}
6113
6114
6115void MacroAssembler::SmiTagCheckOverflow(Register dst,
6116 Register src,
6117 Register overflow) {
6118 if (dst.is(src)) {
6119 // Fall back to slower case.
6120 SmiTagCheckOverflow(dst, overflow);
6121 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006122 DCHECK(!dst.is(src));
6123 DCHECK(!dst.is(overflow));
6124 DCHECK(!src.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006125 SmiTag(dst, src);
6126 xor_(overflow, dst, src); // Overflow if (value ^ 2 * value) < 0.
6127 }
6128}
6129
6130
6131void MacroAssembler::UntagAndJumpIfSmi(Register dst,
6132 Register src,
6133 Label* smi_case) {
6134 JumpIfSmi(src, smi_case, at, USE_DELAY_SLOT);
6135 SmiUntag(dst, src);
6136}
6137
6138
6139void MacroAssembler::UntagAndJumpIfNotSmi(Register dst,
6140 Register src,
6141 Label* non_smi_case) {
6142 JumpIfNotSmi(src, non_smi_case, at, USE_DELAY_SLOT);
6143 SmiUntag(dst, src);
6144}
6145
6146void MacroAssembler::JumpIfSmi(Register value,
6147 Label* smi_label,
6148 Register scratch,
6149 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006150 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006151 andi(scratch, value, kSmiTagMask);
6152 Branch(bd, smi_label, eq, scratch, Operand(zero_reg));
6153}
6154
6155void MacroAssembler::JumpIfNotSmi(Register value,
6156 Label* not_smi_label,
6157 Register scratch,
6158 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006159 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006160 andi(scratch, value, kSmiTagMask);
6161 Branch(bd, not_smi_label, ne, scratch, Operand(zero_reg));
6162}
6163
6164
Steve Block44f0eee2011-05-26 01:26:41 +01006165void MacroAssembler::JumpIfNotBothSmi(Register reg1,
6166 Register reg2,
6167 Label* on_not_both_smi) {
6168 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006169 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01006170 or_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006171 JumpIfNotSmi(at, on_not_both_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01006172}
6173
6174
6175void MacroAssembler::JumpIfEitherSmi(Register reg1,
6176 Register reg2,
6177 Label* on_either_smi) {
6178 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006179 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01006180 // Both Smi tags must be 1 (not Smi).
6181 and_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006182 JumpIfSmi(at, on_either_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01006183}
6184
Ben Murdochda12d292016-06-02 14:46:10 +01006185void MacroAssembler::AssertNotNumber(Register object) {
6186 if (emit_debug_code()) {
6187 STATIC_ASSERT(kSmiTag == 0);
6188 andi(at, object, kSmiTagMask);
6189 Check(ne, kOperandIsANumber, at, Operand(zero_reg));
6190 GetObjectType(object, t8, t8);
6191 Check(ne, kOperandIsNotANumber, t8, Operand(HEAP_NUMBER_TYPE));
6192 }
6193}
Steve Block44f0eee2011-05-26 01:26:41 +01006194
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006195void MacroAssembler::AssertNotSmi(Register object) {
6196 if (emit_debug_code()) {
6197 STATIC_ASSERT(kSmiTag == 0);
6198 andi(at, object, kSmiTagMask);
6199 Check(ne, kOperandIsASmi, at, Operand(zero_reg));
6200 }
Steve Block44f0eee2011-05-26 01:26:41 +01006201}
6202
6203
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006204void MacroAssembler::AssertSmi(Register object) {
6205 if (emit_debug_code()) {
6206 STATIC_ASSERT(kSmiTag == 0);
6207 andi(at, object, kSmiTagMask);
6208 Check(eq, kOperandIsASmi, at, Operand(zero_reg));
6209 }
Steve Block44f0eee2011-05-26 01:26:41 +01006210}
6211
6212
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006213void MacroAssembler::AssertString(Register object) {
6214 if (emit_debug_code()) {
6215 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006216 SmiTst(object, t8);
6217 Check(ne, kOperandIsASmiAndNotAString, t8, Operand(zero_reg));
6218 GetObjectType(object, t8, t8);
6219 Check(lo, kOperandIsNotAString, t8, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006220 }
Ben Murdoch257744e2011-11-30 15:57:28 +00006221}
6222
6223
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006224void MacroAssembler::AssertName(Register object) {
6225 if (emit_debug_code()) {
6226 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006227 SmiTst(object, t8);
6228 Check(ne, kOperandIsASmiAndNotAName, t8, Operand(zero_reg));
6229 GetObjectType(object, t8, t8);
6230 Check(le, kOperandIsNotAName, t8, Operand(LAST_NAME_TYPE));
6231 }
6232}
6233
6234
6235void MacroAssembler::AssertFunction(Register object) {
6236 if (emit_debug_code()) {
6237 STATIC_ASSERT(kSmiTag == 0);
6238 SmiTst(object, t8);
6239 Check(ne, kOperandIsASmiAndNotAFunction, t8, Operand(zero_reg));
6240 GetObjectType(object, t8, t8);
6241 Check(eq, kOperandIsNotAFunction, t8, Operand(JS_FUNCTION_TYPE));
6242 }
6243}
6244
6245
6246void MacroAssembler::AssertBoundFunction(Register object) {
6247 if (emit_debug_code()) {
6248 STATIC_ASSERT(kSmiTag == 0);
6249 SmiTst(object, t8);
6250 Check(ne, kOperandIsASmiAndNotABoundFunction, t8, Operand(zero_reg));
6251 GetObjectType(object, t8, t8);
6252 Check(eq, kOperandIsNotABoundFunction, t8, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006253 }
6254}
6255
Ben Murdochc5610432016-08-08 18:44:38 +01006256void MacroAssembler::AssertGeneratorObject(Register object) {
6257 if (emit_debug_code()) {
6258 STATIC_ASSERT(kSmiTag == 0);
6259 SmiTst(object, t8);
6260 Check(ne, kOperandIsASmiAndNotAGeneratorObject, t8, Operand(zero_reg));
6261 GetObjectType(object, t8, t8);
6262 Check(eq, kOperandIsNotAGeneratorObject, t8,
6263 Operand(JS_GENERATOR_OBJECT_TYPE));
6264 }
6265}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006266
Ben Murdoch097c5b22016-05-18 11:27:45 +01006267void MacroAssembler::AssertReceiver(Register object) {
6268 if (emit_debug_code()) {
6269 STATIC_ASSERT(kSmiTag == 0);
6270 SmiTst(object, t8);
6271 Check(ne, kOperandIsASmiAndNotAReceiver, t8, Operand(zero_reg));
6272 GetObjectType(object, t8, t8);
6273 Check(ge, kOperandIsNotAReceiver, t8, Operand(FIRST_JS_RECEIVER_TYPE));
6274 }
6275}
6276
6277
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006278void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
6279 Register scratch) {
6280 if (emit_debug_code()) {
6281 Label done_checking;
6282 AssertNotSmi(object);
6283 LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
6284 Branch(&done_checking, eq, object, Operand(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006285 lw(t8, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006286 LoadRoot(scratch, Heap::kAllocationSiteMapRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006287 Assert(eq, kExpectedUndefinedOrCell, t8, Operand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006288 bind(&done_checking);
6289 }
6290}
6291
6292
6293void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
6294 if (emit_debug_code()) {
6295 DCHECK(!reg.is(at));
6296 LoadRoot(at, index);
6297 Check(eq, kHeapNumberMapRegisterClobbered, reg, Operand(at));
6298 }
Steve Block44f0eee2011-05-26 01:26:41 +01006299}
6300
6301
6302void MacroAssembler::JumpIfNotHeapNumber(Register object,
6303 Register heap_number_map,
6304 Register scratch,
6305 Label* on_not_heap_number) {
6306 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006307 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01006308 Branch(on_not_heap_number, ne, scratch, Operand(heap_number_map));
6309}
6310
6311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006312void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
6313 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01006314 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006315 // Test that both first and second are sequential one-byte strings.
Steve Block44f0eee2011-05-26 01:26:41 +01006316 // Assume that they are non-smis.
6317 lw(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
6318 lw(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
6319 lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
6320 lbu(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
6321
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006322 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
6323 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01006324}
6325
6326
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006327void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
6328 Register second,
6329 Register scratch1,
6330 Register scratch2,
6331 Label* failure) {
Steve Block44f0eee2011-05-26 01:26:41 +01006332 // Check that neither is a smi.
6333 STATIC_ASSERT(kSmiTag == 0);
6334 And(scratch1, first, Operand(second));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006335 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006336 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
6337 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01006338}
6339
6340
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006341void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
6342 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01006343 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006344 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01006345 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006346 const int kFlatOneByteStringTag =
6347 kStringTag | kOneByteStringTag | kSeqStringTag;
6348 DCHECK(kFlatOneByteStringTag <= 0xffff); // Ensure this fits 16-bit immed.
6349 andi(scratch1, first, kFlatOneByteStringMask);
6350 Branch(failure, ne, scratch1, Operand(kFlatOneByteStringTag));
6351 andi(scratch2, second, kFlatOneByteStringMask);
6352 Branch(failure, ne, scratch2, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01006353}
6354
6355
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006356void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
6357 Register scratch,
6358 Label* failure) {
6359 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01006360 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006361 const int kFlatOneByteStringTag =
6362 kStringTag | kOneByteStringTag | kSeqStringTag;
6363 And(scratch, type, Operand(kFlatOneByteStringMask));
6364 Branch(failure, ne, scratch, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01006365}
6366
6367
6368static const int kRegisterPassedArguments = 4;
6369
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006370int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
6371 int num_double_arguments) {
6372 int stack_passed_words = 0;
6373 num_reg_arguments += 2 * num_double_arguments;
6374
6375 // Up to four simple arguments are passed in registers a0..a3.
6376 if (num_reg_arguments > kRegisterPassedArguments) {
6377 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
6378 }
6379 stack_passed_words += kCArgSlotCount;
6380 return stack_passed_words;
6381}
6382
6383
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006384void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
6385 Register index,
6386 Register value,
6387 Register scratch,
6388 uint32_t encoding_mask) {
6389 Label is_object;
6390 SmiTst(string, at);
6391 Check(ne, kNonObject, at, Operand(zero_reg));
6392
6393 lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
6394 lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
6395
6396 andi(at, at, kStringRepresentationMask | kStringEncodingMask);
6397 li(scratch, Operand(encoding_mask));
6398 Check(eq, kUnexpectedStringType, at, Operand(scratch));
6399
6400 // The index is assumed to be untagged coming in, tag it to compare with the
6401 // string length without using a temp register, it is restored at the end of
6402 // this function.
6403 Label index_tag_ok, index_tag_bad;
6404 TrySmiTag(index, scratch, &index_tag_bad);
6405 Branch(&index_tag_ok);
6406 bind(&index_tag_bad);
6407 Abort(kIndexIsTooLarge);
6408 bind(&index_tag_ok);
6409
6410 lw(at, FieldMemOperand(string, String::kLengthOffset));
6411 Check(lt, kIndexIsTooLarge, index, Operand(at));
6412
6413 DCHECK(Smi::FromInt(0) == 0);
6414 Check(ge, kIndexIsNegative, index, Operand(zero_reg));
6415
6416 SmiUntag(index, index);
6417}
6418
6419
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006420void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
6421 int num_double_arguments,
6422 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01006423 int frame_alignment = ActivationFrameAlignment();
6424
Steve Block44f0eee2011-05-26 01:26:41 +01006425 // Up to four simple arguments are passed in registers a0..a3.
6426 // Those four arguments must have reserved argument slots on the stack for
6427 // mips, even though those argument slots are not normally used.
6428 // Remaining arguments are pushed on the stack, above (higher address than)
6429 // the argument slots.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006430 int stack_passed_arguments = CalculateStackPassedWords(
6431 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01006432 if (frame_alignment > kPointerSize) {
6433 // Make stack end at alignment and make room for num_arguments - 4 words
6434 // and the original value of sp.
6435 mov(scratch, sp);
6436 Subu(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006437 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01006438 And(sp, sp, Operand(-frame_alignment));
6439 sw(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
6440 } else {
6441 Subu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
6442 }
6443}
6444
6445
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006446void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
6447 Register scratch) {
6448 PrepareCallCFunction(num_reg_arguments, 0, scratch);
6449}
6450
6451
Steve Block44f0eee2011-05-26 01:26:41 +01006452void MacroAssembler::CallCFunction(ExternalReference function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006453 int num_reg_arguments,
6454 int num_double_arguments) {
6455 li(t8, Operand(function));
6456 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01006457}
6458
6459
6460void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006461 int num_reg_arguments,
6462 int num_double_arguments) {
6463 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
6464}
6465
6466
6467void MacroAssembler::CallCFunction(ExternalReference function,
Steve Block44f0eee2011-05-26 01:26:41 +01006468 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006469 CallCFunction(function, num_arguments, 0);
6470}
6471
6472
6473void MacroAssembler::CallCFunction(Register function,
6474 int num_arguments) {
6475 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01006476}
6477
6478
6479void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006480 int num_reg_arguments,
6481 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006482 DCHECK(has_frame());
Steve Block44f0eee2011-05-26 01:26:41 +01006483 // Make sure that the stack is aligned before calling a C function unless
6484 // running in the simulator. The simulator has its own alignment check which
6485 // provides more information.
6486 // The argument stots are presumed to have been set up by
6487 // PrepareCallCFunction. The C function must be called via t9, for mips ABI.
6488
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006489#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01006490 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006491 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01006492 int frame_alignment_mask = frame_alignment - 1;
6493 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006494 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01006495 Label alignment_as_expected;
6496 And(at, sp, Operand(frame_alignment_mask));
6497 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
6498 // Don't use Check here, as it will call Runtime_Abort possibly
6499 // re-entering here.
6500 stop("Unexpected alignment in CallCFunction");
6501 bind(&alignment_as_expected);
6502 }
6503 }
6504#endif // V8_HOST_ARCH_MIPS
6505
6506 // Just call directly. The function called cannot cause a GC, or
6507 // allow preemption, so the return address in the link register
6508 // stays correct.
Steve Block44f0eee2011-05-26 01:26:41 +01006509
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006510 if (!function.is(t9)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00006511 mov(t9, function);
Steve Block44f0eee2011-05-26 01:26:41 +01006512 function = t9;
6513 }
6514
6515 Call(function);
6516
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006517 int stack_passed_arguments = CalculateStackPassedWords(
6518 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01006519
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006520 if (base::OS::ActivationFrameAlignment() > kPointerSize) {
Steve Block44f0eee2011-05-26 01:26:41 +01006521 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
6522 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006523 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01006524 }
6525}
6526
6527
6528#undef BRANCH_ARGS_CHECK
6529
6530
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006531void MacroAssembler::CheckPageFlag(
6532 Register object,
6533 Register scratch,
6534 int mask,
6535 Condition cc,
6536 Label* condition_met) {
6537 And(scratch, object, Operand(~Page::kPageAlignmentMask));
6538 lw(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
6539 And(scratch, scratch, Operand(mask));
6540 Branch(condition_met, cc, scratch, Operand(zero_reg));
6541}
6542
6543
6544void MacroAssembler::JumpIfBlack(Register object,
6545 Register scratch0,
6546 Register scratch1,
6547 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006548 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
6549 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006550}
6551
6552
6553void MacroAssembler::HasColor(Register object,
6554 Register bitmap_scratch,
6555 Register mask_scratch,
6556 Label* has_color,
6557 int first_bit,
6558 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006559 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t8));
6560 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006561
6562 GetMarkBits(object, bitmap_scratch, mask_scratch);
6563
6564 Label other_color, word_boundary;
6565 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
6566 And(t8, t9, Operand(mask_scratch));
6567 Branch(&other_color, first_bit == 1 ? eq : ne, t8, Operand(zero_reg));
6568 // Shift left 1 by adding.
6569 Addu(mask_scratch, mask_scratch, Operand(mask_scratch));
6570 Branch(&word_boundary, eq, mask_scratch, Operand(zero_reg));
6571 And(t8, t9, Operand(mask_scratch));
6572 Branch(has_color, second_bit == 1 ? ne : eq, t8, Operand(zero_reg));
6573 jmp(&other_color);
6574
6575 bind(&word_boundary);
6576 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
6577 And(t9, t9, Operand(1));
6578 Branch(has_color, second_bit == 1 ? ne : eq, t9, Operand(zero_reg));
6579 bind(&other_color);
6580}
6581
6582
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006583void MacroAssembler::GetMarkBits(Register addr_reg,
6584 Register bitmap_reg,
6585 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006586 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006587 And(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
6588 Ext(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
6589 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
6590 Ext(t8, addr_reg, kLowBits, kPageSizeBits - kLowBits);
Ben Murdoch097c5b22016-05-18 11:27:45 +01006591 Lsa(bitmap_reg, bitmap_reg, t8, kPointerSizeLog2, t8);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006592 li(t8, Operand(1));
6593 sllv(mask_reg, t8, mask_reg);
6594}
6595
6596
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006597void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
6598 Register mask_scratch, Register load_scratch,
6599 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006600 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006601 GetMarkBits(value, bitmap_scratch, mask_scratch);
6602
6603 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006604 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006605 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
6606 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006607 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006608
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006609 // Since both black and grey have a 1 in the first position and white does
6610 // not have a 1 there we only need to check one bit.
6611 lw(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
6612 And(t8, mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006613 Branch(value_is_white, eq, t8, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006614}
6615
6616
Ben Murdoch257744e2011-11-30 15:57:28 +00006617void MacroAssembler::LoadInstanceDescriptors(Register map,
6618 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006619 lw(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
6620}
6621
6622
6623void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
6624 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
6625 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
6626}
6627
6628
6629void MacroAssembler::EnumLength(Register dst, Register map) {
6630 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
6631 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
6632 And(dst, dst, Operand(Map::EnumLengthBits::kMask));
6633 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00006634}
6635
6636
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006637void MacroAssembler::LoadAccessor(Register dst, Register holder,
6638 int accessor_index,
6639 AccessorComponent accessor) {
6640 lw(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
6641 LoadInstanceDescriptors(dst, dst);
6642 lw(dst,
6643 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
6644 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
6645 : AccessorPair::kSetterOffset;
6646 lw(dst, FieldMemOperand(dst, offset));
6647}
6648
6649
Ben Murdoch097c5b22016-05-18 11:27:45 +01006650void MacroAssembler::CheckEnumCache(Label* call_runtime) {
6651 Register null_value = t1;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006652 Register empty_fixed_array_value = t2;
6653 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006654 Label next, start;
6655 mov(a2, a0);
6656
6657 // Check if the enum length field is properly initialized, indicating that
6658 // there is an enum cache.
6659 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
6660
6661 EnumLength(a3, a1);
6662 Branch(
6663 call_runtime, eq, a3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
6664
Ben Murdoch097c5b22016-05-18 11:27:45 +01006665 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006666 jmp(&start);
6667
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006668 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006669 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006670
6671 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006672 EnumLength(a3, a1);
6673 Branch(call_runtime, ne, a3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006674
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006675 bind(&start);
6676
6677 // Check that there are no elements. Register a2 contains the current JS
6678 // object we've reached through the prototype chain.
6679 Label no_elements;
6680 lw(a2, FieldMemOperand(a2, JSObject::kElementsOffset));
6681 Branch(&no_elements, eq, a2, Operand(empty_fixed_array_value));
6682
6683 // Second chance, the object may be using the empty slow element dictionary.
6684 LoadRoot(at, Heap::kEmptySlowElementDictionaryRootIndex);
6685 Branch(call_runtime, ne, a2, Operand(at));
6686
6687 bind(&no_elements);
6688 lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
6689 Branch(&next, ne, a2, Operand(null_value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006690}
6691
6692
6693void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006694 DCHECK(!output_reg.is(input_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006695 Label done;
6696 li(output_reg, Operand(255));
6697 // Normal branch: nop in delay slot.
6698 Branch(&done, gt, input_reg, Operand(output_reg));
6699 // Use delay slot in this branch.
6700 Branch(USE_DELAY_SLOT, &done, lt, input_reg, Operand(zero_reg));
6701 mov(output_reg, zero_reg); // In delay slot.
6702 mov(output_reg, input_reg); // Value is in range 0..255.
6703 bind(&done);
6704}
6705
6706
6707void MacroAssembler::ClampDoubleToUint8(Register result_reg,
6708 DoubleRegister input_reg,
6709 DoubleRegister temp_double_reg) {
6710 Label above_zero;
6711 Label done;
6712 Label in_bounds;
6713
6714 Move(temp_double_reg, 0.0);
6715 BranchF(&above_zero, NULL, gt, input_reg, temp_double_reg);
6716
6717 // Double value is less than zero, NaN or Inf, return 0.
6718 mov(result_reg, zero_reg);
6719 Branch(&done);
6720
6721 // Double value is >= 255, return 255.
6722 bind(&above_zero);
6723 Move(temp_double_reg, 255.0);
6724 BranchF(&in_bounds, NULL, le, input_reg, temp_double_reg);
6725 li(result_reg, Operand(255));
6726 Branch(&done);
6727
6728 // In 0-255 range, round and truncate.
6729 bind(&in_bounds);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006730 cvt_w_d(temp_double_reg, input_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006731 mfc1(result_reg, temp_double_reg);
6732 bind(&done);
6733}
6734
Ben Murdochda12d292016-06-02 14:46:10 +01006735void MacroAssembler::TestJSArrayForAllocationMemento(Register receiver_reg,
6736 Register scratch_reg,
6737 Label* no_memento_found) {
6738 Label map_check;
6739 Label top_check;
Ben Murdochc5610432016-08-08 18:44:38 +01006740 ExternalReference new_space_allocation_top_adr =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006741 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01006742 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
6743 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
6744
6745 // Bail out if the object is not in new space.
6746 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
6747 // If the object is in new space, we need to check whether it is on the same
6748 // page as the current top.
6749 Addu(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01006750 li(at, Operand(new_space_allocation_top_adr));
6751 lw(at, MemOperand(at));
6752 Xor(scratch_reg, scratch_reg, Operand(at));
Ben Murdochda12d292016-06-02 14:46:10 +01006753 And(scratch_reg, scratch_reg, Operand(~Page::kPageAlignmentMask));
6754 Branch(&top_check, eq, scratch_reg, Operand(zero_reg));
6755 // The object is on a different page than allocation top. Bail out if the
6756 // object sits on the page boundary as no memento can follow and we cannot
6757 // touch the memory following it.
6758 Addu(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
6759 Xor(scratch_reg, scratch_reg, Operand(receiver_reg));
6760 And(scratch_reg, scratch_reg, Operand(~Page::kPageAlignmentMask));
6761 Branch(no_memento_found, ne, scratch_reg, Operand(zero_reg));
6762 // Continue with the actual map check.
6763 jmp(&map_check);
6764 // If top is on the same page as the current object, we need to check whether
6765 // we are below top.
6766 bind(&top_check);
6767 Addu(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01006768 li(at, Operand(new_space_allocation_top_adr));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006769 lw(at, MemOperand(at));
6770 Branch(no_memento_found, gt, scratch_reg, Operand(at));
Ben Murdochda12d292016-06-02 14:46:10 +01006771 // Memento map check.
6772 bind(&map_check);
6773 lw(scratch_reg, MemOperand(receiver_reg, kMementoMapOffset));
6774 Branch(no_memento_found, ne, scratch_reg,
6775 Operand(isolate()->factory()->allocation_memento_map()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01006776}
6777
6778
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006779Register GetRegisterThatIsNotOneOf(Register reg1,
6780 Register reg2,
6781 Register reg3,
6782 Register reg4,
6783 Register reg5,
6784 Register reg6) {
6785 RegList regs = 0;
6786 if (reg1.is_valid()) regs |= reg1.bit();
6787 if (reg2.is_valid()) regs |= reg2.bit();
6788 if (reg3.is_valid()) regs |= reg3.bit();
6789 if (reg4.is_valid()) regs |= reg4.bit();
6790 if (reg5.is_valid()) regs |= reg5.bit();
6791 if (reg6.is_valid()) regs |= reg6.bit();
6792
Ben Murdoch61f157c2016-09-16 13:49:30 +01006793 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006794 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
6795 int code = config->GetAllocatableGeneralCode(i);
6796 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006797 if (regs & candidate.bit()) continue;
6798 return candidate;
6799 }
6800 UNREACHABLE();
6801 return no_reg;
6802}
6803
6804
6805void MacroAssembler::JumpIfDictionaryInPrototypeChain(
6806 Register object,
6807 Register scratch0,
6808 Register scratch1,
6809 Label* found) {
6810 DCHECK(!scratch1.is(scratch0));
6811 Factory* factory = isolate()->factory();
6812 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006813 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006814
6815 // Scratch contained elements pointer.
6816 Move(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006817 lw(current, FieldMemOperand(current, HeapObject::kMapOffset));
6818 lw(current, FieldMemOperand(current, Map::kPrototypeOffset));
6819 Branch(&end, eq, current, Operand(factory->null_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006820
6821 // Loop based on the map going up the prototype chain.
6822 bind(&loop_again);
6823 lw(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006824 lbu(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
6825 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
6826 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
6827 Branch(found, lo, scratch1, Operand(JS_OBJECT_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006828 lb(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
6829 DecodeField<Map::ElementsKindBits>(scratch1);
6830 Branch(found, eq, scratch1, Operand(DICTIONARY_ELEMENTS));
6831 lw(current, FieldMemOperand(current, Map::kPrototypeOffset));
6832 Branch(&loop_again, ne, current, Operand(factory->null_value()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006833
6834 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006835}
6836
6837
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006838bool AreAliased(Register reg1, Register reg2, Register reg3, Register reg4,
6839 Register reg5, Register reg6, Register reg7, Register reg8,
6840 Register reg9, Register reg10) {
6841 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + reg3.is_valid() +
6842 reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
6843 reg7.is_valid() + reg8.is_valid() + reg9.is_valid() +
6844 reg10.is_valid();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006845
6846 RegList regs = 0;
6847 if (reg1.is_valid()) regs |= reg1.bit();
6848 if (reg2.is_valid()) regs |= reg2.bit();
6849 if (reg3.is_valid()) regs |= reg3.bit();
6850 if (reg4.is_valid()) regs |= reg4.bit();
6851 if (reg5.is_valid()) regs |= reg5.bit();
6852 if (reg6.is_valid()) regs |= reg6.bit();
6853 if (reg7.is_valid()) regs |= reg7.bit();
6854 if (reg8.is_valid()) regs |= reg8.bit();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006855 if (reg9.is_valid()) regs |= reg9.bit();
6856 if (reg10.is_valid()) regs |= reg10.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006857 int n_of_non_aliasing_regs = NumRegs(regs);
6858
6859 return n_of_valid_regs != n_of_non_aliasing_regs;
6860}
6861
6862
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006863CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006864 FlushICache flush_cache)
Steve Block44f0eee2011-05-26 01:26:41 +01006865 : address_(address),
Steve Block44f0eee2011-05-26 01:26:41 +01006866 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006867 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006868 flush_cache_(flush_cache) {
Steve Block44f0eee2011-05-26 01:26:41 +01006869 // Create a new macro assembler pointing to the address of the code to patch.
6870 // The size is adjusted with kGap on order for the assembler to generate size
6871 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006872 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01006873}
6874
6875
6876CodePatcher::~CodePatcher() {
6877 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006878 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006879 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006880 }
Steve Block44f0eee2011-05-26 01:26:41 +01006881
6882 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006883 DCHECK(masm_.pc_ == address_ + size_);
6884 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01006885}
6886
6887
Ben Murdoch257744e2011-11-30 15:57:28 +00006888void CodePatcher::Emit(Instr instr) {
6889 masm()->emit(instr);
Steve Block44f0eee2011-05-26 01:26:41 +01006890}
6891
6892
6893void CodePatcher::Emit(Address addr) {
6894 masm()->emit(reinterpret_cast<Instr>(addr));
6895}
6896
6897
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006898void CodePatcher::ChangeBranchCondition(Instr current_instr,
6899 uint32_t new_opcode) {
6900 current_instr = (current_instr & ~kOpcodeMask) | new_opcode;
6901 masm_.emit(current_instr);
Ben Murdoch257744e2011-11-30 15:57:28 +00006902}
Steve Block44f0eee2011-05-26 01:26:41 +01006903
6904
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006905void MacroAssembler::TruncatingDiv(Register result,
6906 Register dividend,
6907 int32_t divisor) {
6908 DCHECK(!dividend.is(result));
6909 DCHECK(!dividend.is(at));
6910 DCHECK(!result.is(at));
6911 base::MagicNumbersForDivision<uint32_t> mag =
6912 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
6913 li(at, Operand(mag.multiplier));
6914 Mulh(result, dividend, Operand(at));
6915 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
6916 if (divisor > 0 && neg) {
6917 Addu(result, result, Operand(dividend));
6918 }
6919 if (divisor < 0 && !neg && mag.multiplier > 0) {
6920 Subu(result, result, Operand(dividend));
6921 }
6922 if (mag.shift > 0) sra(result, result, mag.shift);
6923 srl(at, dividend, 31);
6924 Addu(result, result, Operand(at));
6925}
6926
6927
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00006928} // namespace internal
6929} // namespace v8
Andrei Popescu31002712010-02-23 13:46:05 +00006930
Leon Clarkef7060e22010-06-03 12:02:55 +01006931#endif // V8_TARGET_ARCH_MIPS