blob: e3544c5eec9afdf65237843f334ca336e42977b9 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00005
Ben Murdoch257744e2011-11-30 15:57:28 +00006#include <limits.h> // For LONG_MIN, LONG_MAX.
Andrei Popescu31002712010-02-23 13:46:05 +00007
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +01009
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/base/bits.h"
11#include "src/base/division-by-constant.h"
12#include "src/bootstrapper.h"
13#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/debug/debug.h"
15#include "src/mips/macro-assembler-mips.h"
16#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017#include "src/runtime/runtime.h"
Andrei Popescu31002712010-02-23 13:46:05 +000018
19namespace v8 {
20namespace internal {
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch257744e2011-11-30 15:57:28 +000024 : Assembler(arg_isolate, buffer, size),
Andrei Popescu31002712010-02-23 13:46:05 +000025 generating_stub_(false),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 has_frame_(false),
27 has_double_zero_reg_set_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028 if (create_code_object == CodeObjectRequired::kYes) {
29 code_object_ =
30 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +000031 }
Andrei Popescu31002712010-02-23 13:46:05 +000032}
33
34
Ben Murdochb8a8cc12014-11-26 15:28:44 +000035void MacroAssembler::Load(Register dst,
36 const MemOperand& src,
37 Representation r) {
38 DCHECK(!r.IsDouble());
39 if (r.IsInteger8()) {
40 lb(dst, src);
41 } else if (r.IsUInteger8()) {
42 lbu(dst, src);
43 } else if (r.IsInteger16()) {
44 lh(dst, src);
45 } else if (r.IsUInteger16()) {
46 lhu(dst, src);
47 } else {
48 lw(dst, src);
49 }
50}
51
52
53void MacroAssembler::Store(Register src,
54 const MemOperand& dst,
55 Representation r) {
56 DCHECK(!r.IsDouble());
57 if (r.IsInteger8() || r.IsUInteger8()) {
58 sb(src, dst);
59 } else if (r.IsInteger16() || r.IsUInteger16()) {
60 sh(src, dst);
61 } else {
62 if (r.IsHeapObject()) {
63 AssertNotSmi(src);
64 } else if (r.IsSmi()) {
65 AssertSmi(src);
66 }
67 sw(src, dst);
68 }
69}
70
71
Andrei Popescu31002712010-02-23 13:46:05 +000072void MacroAssembler::LoadRoot(Register destination,
73 Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010074 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000075}
76
Steve Block44f0eee2011-05-26 01:26:41 +010077
Andrei Popescu31002712010-02-23 13:46:05 +000078void MacroAssembler::LoadRoot(Register destination,
79 Heap::RootListIndex index,
80 Condition cond,
81 Register src1, const Operand& src2) {
Steve Block44f0eee2011-05-26 01:26:41 +010082 Branch(2, NegateCondition(cond), src1, src2);
Steve Block6ded16b2010-05-10 14:33:55 +010083 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000084}
85
86
Steve Block44f0eee2011-05-26 01:26:41 +010087void MacroAssembler::StoreRoot(Register source,
88 Heap::RootListIndex index) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Steve Block44f0eee2011-05-26 01:26:41 +010090 sw(source, MemOperand(s6, index << kPointerSizeLog2));
91}
92
93
94void MacroAssembler::StoreRoot(Register source,
95 Heap::RootListIndex index,
96 Condition cond,
97 Register src1, const Operand& src2) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000098 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Steve Block44f0eee2011-05-26 01:26:41 +010099 Branch(2, NegateCondition(cond), src1, src2);
100 sw(source, MemOperand(s6, index << kPointerSizeLog2));
101}
102
103
Ben Murdoch257744e2011-11-30 15:57:28 +0000104// Push and pop all registers that can hold pointers.
105void MacroAssembler::PushSafepointRegisters() {
106 // Safepoints expect a block of kNumSafepointRegisters values on the
107 // stack, so adjust the stack for unsaved registers.
108 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000109 DCHECK(num_unsaved >= 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100110 if (num_unsaved > 0) {
111 Subu(sp, sp, Operand(num_unsaved * kPointerSize));
112 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000113 MultiPush(kSafepointSavedRegisters);
114}
115
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000116
Ben Murdoch257744e2011-11-30 15:57:28 +0000117void MacroAssembler::PopSafepointRegisters() {
118 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
119 MultiPop(kSafepointSavedRegisters);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100120 if (num_unsaved > 0) {
121 Addu(sp, sp, Operand(num_unsaved * kPointerSize));
122 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000123}
124
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000125
Ben Murdoch257744e2011-11-30 15:57:28 +0000126void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
127 sw(src, SafepointRegisterSlot(dst));
128}
129
130
131void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
132 lw(dst, SafepointRegisterSlot(src));
133}
134
135
136int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
137 // The registers are pushed starting with the highest encoding,
138 // which means that lowest encodings are closest to the stack pointer.
139 return kSafepointRegisterStackIndexMap[reg_code];
140}
141
142
143MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
144 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
145}
146
147
148MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100149 UNIMPLEMENTED_MIPS();
Ben Murdoch257744e2011-11-30 15:57:28 +0000150 // General purpose registers are pushed last on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151 int doubles_size = DoubleRegister::kMaxNumRegisters * kDoubleSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000152 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
153 return MemOperand(sp, doubles_size + register_offset);
154}
155
156
Steve Block44f0eee2011-05-26 01:26:41 +0100157void MacroAssembler::InNewSpace(Register object,
158 Register scratch,
159 Condition cc,
160 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 DCHECK(cc == eq || cc == ne);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100162 const int mask =
163 1 << MemoryChunk::IN_FROM_SPACE | 1 << MemoryChunk::IN_TO_SPACE;
164 CheckPageFlag(object, scratch, mask, cc, branch);
Steve Block44f0eee2011-05-26 01:26:41 +0100165}
166
167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168// Clobbers object, dst, value, and ra, if (ra_status == kRAHasBeenSaved)
169// The register 'object' contains a heap object pointer. The heap object
170// tag is shifted away.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100171void MacroAssembler::RecordWriteField(
172 Register object,
173 int offset,
174 Register value,
175 Register dst,
176 RAStatus ra_status,
177 SaveFPRegsMode save_fp,
178 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 SmiCheck smi_check,
180 PointersToHereCheck pointers_to_here_check_for_value) {
181 DCHECK(!AreAliased(value, dst, t8, object));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100182 // First, check if a write barrier is even needed. The tests below
183 // catch stores of Smis.
Steve Block44f0eee2011-05-26 01:26:41 +0100184 Label done;
185
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100186 // Skip barrier if writing a smi.
187 if (smi_check == INLINE_SMI_CHECK) {
188 JumpIfSmi(value, &done);
189 }
Steve Block44f0eee2011-05-26 01:26:41 +0100190
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100191 // Although the object register is tagged, the offset is relative to the start
192 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 DCHECK(IsAligned(offset, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100194
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100195 Addu(dst, object, Operand(offset - kHeapObjectTag));
196 if (emit_debug_code()) {
197 Label ok;
198 And(t8, dst, Operand((1 << kPointerSizeLog2) - 1));
199 Branch(&ok, eq, t8, Operand(zero_reg));
200 stop("Unaligned cell in write barrier");
201 bind(&ok);
202 }
203
204 RecordWrite(object,
205 dst,
206 value,
207 ra_status,
208 save_fp,
209 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000210 OMIT_SMI_CHECK,
211 pointers_to_here_check_for_value);
Steve Block44f0eee2011-05-26 01:26:41 +0100212
213 bind(&done);
214
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100215 // Clobber clobbered input registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100216 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000217 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000218 li(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
219 li(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
220 }
221}
222
223
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224// Clobbers object, dst, map, and ra, if (ra_status == kRAHasBeenSaved)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000225void MacroAssembler::RecordWriteForMap(Register object,
226 Register map,
227 Register dst,
228 RAStatus ra_status,
229 SaveFPRegsMode fp_mode) {
230 if (emit_debug_code()) {
231 DCHECK(!dst.is(at));
232 lw(dst, FieldMemOperand(map, HeapObject::kMapOffset));
233 Check(eq,
234 kWrongAddressOrValuePassedToRecordWrite,
235 dst,
236 Operand(isolate()->factory()->meta_map()));
237 }
238
239 if (!FLAG_incremental_marking) {
240 return;
241 }
242
243 if (emit_debug_code()) {
244 lw(at, FieldMemOperand(object, HeapObject::kMapOffset));
245 Check(eq,
246 kWrongAddressOrValuePassedToRecordWrite,
247 map,
248 Operand(at));
249 }
250
251 Label done;
252
253 // A single check of the map's pages interesting flag suffices, since it is
254 // only set during incremental collection, and then it's also guaranteed that
255 // the from object's page's interesting flag is also set. This optimization
256 // relies on the fact that maps can never be in new space.
257 CheckPageFlag(map,
258 map, // Used as scratch.
259 MemoryChunk::kPointersToHereAreInterestingMask,
260 eq,
261 &done);
262
263 Addu(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
264 if (emit_debug_code()) {
265 Label ok;
266 And(at, dst, Operand((1 << kPointerSizeLog2) - 1));
267 Branch(&ok, eq, at, Operand(zero_reg));
268 stop("Unaligned cell in write barrier");
269 bind(&ok);
270 }
271
272 // Record the actual write.
273 if (ra_status == kRAHasNotBeenSaved) {
274 push(ra);
275 }
276 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
277 fp_mode);
278 CallStub(&stub);
279 if (ra_status == kRAHasNotBeenSaved) {
280 pop(ra);
281 }
282
283 bind(&done);
284
285 // Count number of write barriers in generated code.
286 isolate()->counters()->write_barriers_static()->Increment();
287 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at, dst);
288
289 // Clobber clobbered registers when running with the debug-code flag
290 // turned on to provoke errors.
291 if (emit_debug_code()) {
292 li(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
293 li(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Steve Block44f0eee2011-05-26 01:26:41 +0100294 }
295}
296
297
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000298// Clobbers object, address, value, and ra, if (ra_status == kRAHasBeenSaved)
299// The register 'object' contains a heap object pointer. The heap object
Steve Block44f0eee2011-05-26 01:26:41 +0100300// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301void MacroAssembler::RecordWrite(
302 Register object,
303 Register address,
304 Register value,
305 RAStatus ra_status,
306 SaveFPRegsMode fp_mode,
307 RememberedSetAction remembered_set_action,
308 SmiCheck smi_check,
309 PointersToHereCheck pointers_to_here_check_for_value) {
310 DCHECK(!AreAliased(object, address, value, t8));
311 DCHECK(!AreAliased(object, address, value, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100312
313 if (emit_debug_code()) {
314 lw(at, MemOperand(address));
315 Assert(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100317 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000318
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000319 if (remembered_set_action == OMIT_REMEMBERED_SET &&
320 !FLAG_incremental_marking) {
321 return;
322 }
323
324 // First, check if a write barrier is even needed. The tests below
325 // catch stores of smis and stores into the young generation.
Steve Block44f0eee2011-05-26 01:26:41 +0100326 Label done;
327
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100328 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100330 JumpIfSmi(value, &done);
331 }
332
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
334 CheckPageFlag(value,
335 value, // Used as scratch.
336 MemoryChunk::kPointersToHereAreInterestingMask,
337 eq,
338 &done);
339 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100340 CheckPageFlag(object,
341 value, // Used as scratch.
342 MemoryChunk::kPointersFromHereAreInterestingMask,
343 eq,
344 &done);
Steve Block44f0eee2011-05-26 01:26:41 +0100345
346 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100347 if (ra_status == kRAHasNotBeenSaved) {
348 push(ra);
349 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
351 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352 CallStub(&stub);
353 if (ra_status == kRAHasNotBeenSaved) {
354 pop(ra);
355 }
Steve Block44f0eee2011-05-26 01:26:41 +0100356
357 bind(&done);
358
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359 // Count number of write barriers in generated code.
360 isolate()->counters()->write_barriers_static()->Increment();
361 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at,
362 value);
363
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100364 // Clobber clobbered registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100365 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000366 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 li(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
368 li(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100369 }
370}
371
Ben Murdoch097c5b22016-05-18 11:27:45 +0100372void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
373 Register code_entry,
374 Register scratch) {
375 const int offset = JSFunction::kCodeEntryOffset;
376
377 // Since a code entry (value) is always in old space, we don't need to update
378 // remembered set. If incremental marking is off, there is nothing for us to
379 // do.
380 if (!FLAG_incremental_marking) return;
381
382 DCHECK(js_function.is(a1));
383 DCHECK(code_entry.is(t0));
384 DCHECK(scratch.is(t1));
385 AssertNotSmi(js_function);
386
387 if (emit_debug_code()) {
388 Addu(scratch, js_function, Operand(offset - kHeapObjectTag));
389 lw(at, MemOperand(scratch));
390 Assert(eq, kWrongAddressOrValuePassedToRecordWrite, at,
391 Operand(code_entry));
392 }
393
394 // First, check if a write barrier is even needed. The tests below
395 // catch stores of Smis and stores into young gen.
396 Label done;
397
398 CheckPageFlag(code_entry, scratch,
399 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
400 CheckPageFlag(js_function, scratch,
401 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
402
403 const Register dst = scratch;
404 Addu(dst, js_function, Operand(offset - kHeapObjectTag));
405
406 // Save caller-saved registers. js_function and code_entry are in the
407 // caller-saved register list.
408 DCHECK(kJSCallerSaved & js_function.bit());
409 DCHECK(kJSCallerSaved & code_entry.bit());
410 MultiPush(kJSCallerSaved | ra.bit());
411
412 int argument_count = 3;
413
414 PrepareCallCFunction(argument_count, 0, code_entry);
415
416 mov(a0, js_function);
417 mov(a1, dst);
418 li(a2, Operand(ExternalReference::isolate_address(isolate())));
419
420 {
421 AllowExternalCallThatCantCauseGC scope(this);
422 CallCFunction(
423 ExternalReference::incremental_marking_record_write_code_entry_function(
424 isolate()),
425 argument_count);
426 }
427
428 // Restore caller-saved registers.
429 MultiPop(kJSCallerSaved | ra.bit());
430
431 bind(&done);
432}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100433
434void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
435 Register address,
436 Register scratch,
437 SaveFPRegsMode fp_mode,
438 RememberedSetFinalAction and_then) {
439 Label done;
440 if (emit_debug_code()) {
441 Label ok;
442 JumpIfNotInNewSpace(object, scratch, &ok);
443 stop("Remembered set pointer is in new space");
444 bind(&ok);
445 }
446 // Load store buffer top.
447 ExternalReference store_buffer =
448 ExternalReference::store_buffer_top(isolate());
449 li(t8, Operand(store_buffer));
450 lw(scratch, MemOperand(t8));
451 // Store pointer to buffer and increment buffer top.
452 sw(address, MemOperand(scratch));
453 Addu(scratch, scratch, kPointerSize);
454 // Write back new top of buffer.
455 sw(scratch, MemOperand(t8));
456 // Call stub on end of buffer.
457 // Check for end of buffer.
458 And(t8, scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
459 if (and_then == kFallThroughAtEnd) {
460 Branch(&done, eq, t8, Operand(zero_reg));
461 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 DCHECK(and_then == kReturnAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100463 Ret(eq, t8, Operand(zero_reg));
464 }
465 push(ra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100467 CallStub(&store_buffer_overflow);
468 pop(ra);
469 bind(&done);
470 if (and_then == kReturnAtEnd) {
471 Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100472 }
473}
474
475
476// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000477// Allocation support.
Steve Block44f0eee2011-05-26 01:26:41 +0100478
479
480void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
481 Register scratch,
482 Label* miss) {
483 Label same_contexts;
484
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000485 DCHECK(!holder_reg.is(scratch));
486 DCHECK(!holder_reg.is(at));
487 DCHECK(!scratch.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100488
489 // Load current lexical context from the stack frame.
490 lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
491 // In debug mode, make sure the lexical context is set.
492#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100494 scratch, Operand(zero_reg));
495#endif
496
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000497 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 lw(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Block44f0eee2011-05-26 01:26:41 +0100499
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000501 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000502 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000503 // Read the first word and compare to the native_context_map.
Steve Block44f0eee2011-05-26 01:26:41 +0100504 lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 LoadRoot(at, Heap::kNativeContextMapRootIndex);
506 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100507 holder_reg, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +0000508 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100509 }
510
511 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100513 Branch(&same_contexts, eq, scratch, Operand(at));
514
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000516 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000517 push(holder_reg); // Temporarily save holder on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +0100518 mov(holder_reg, at); // Move at to its holding place.
519 LoadRoot(at, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 Check(ne, kJSGlobalProxyContextShouldNotBeNull,
Steve Block44f0eee2011-05-26 01:26:41 +0100521 holder_reg, Operand(at));
522
523 lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 LoadRoot(at, Heap::kNativeContextMapRootIndex);
525 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100526 holder_reg, Operand(at));
527 // Restore at is not needed. at is reloaded below.
Ben Murdoch257744e2011-11-30 15:57:28 +0000528 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100529 // Restore at to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100531 }
532
533 // Check that the security token in the calling global object is
534 // compatible with the security token in the receiving global
535 // object.
536 int token_offset = Context::kHeaderSize +
537 Context::SECURITY_TOKEN_INDEX * kPointerSize;
538
539 lw(scratch, FieldMemOperand(scratch, token_offset));
540 lw(at, FieldMemOperand(at, token_offset));
541 Branch(miss, ne, scratch, Operand(at));
542
543 bind(&same_contexts);
Andrei Popescu31002712010-02-23 13:46:05 +0000544}
545
546
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547// Compute the hash code from the untagged key. This must be kept in sync with
548// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
549// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +0000550void MacroAssembler::GetNumberHash(Register reg0, Register scratch) {
551 // First of all we assign the hash seed to scratch.
552 LoadRoot(scratch, Heap::kHashSeedRootIndex);
553 SmiUntag(scratch);
554
555 // Xor original key with a seed.
556 xor_(reg0, reg0, scratch);
557
558 // Compute the hash code from the untagged key. This must be kept in sync
559 // with ComputeIntegerHash in utils.h.
560 //
561 // hash = ~hash + (hash << 15);
562 nor(scratch, reg0, zero_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100563 Lsa(reg0, scratch, reg0, 15);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000564
565 // hash = hash ^ (hash >> 12);
566 srl(at, reg0, 12);
567 xor_(reg0, reg0, at);
568
569 // hash = hash + (hash << 2);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100570 Lsa(reg0, reg0, reg0, 2);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000571
572 // hash = hash ^ (hash >> 4);
573 srl(at, reg0, 4);
574 xor_(reg0, reg0, at);
575
576 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100577 sll(scratch, reg0, 11);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100578 Lsa(reg0, reg0, reg0, 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100579 addu(reg0, reg0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000580
581 // hash = hash ^ (hash >> 16);
582 srl(at, reg0, 16);
583 xor_(reg0, reg0, at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000584 And(reg0, reg0, Operand(0x3fffffff));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000585}
586
587
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000588void MacroAssembler::LoadFromNumberDictionary(Label* miss,
589 Register elements,
590 Register key,
591 Register result,
592 Register reg0,
593 Register reg1,
594 Register reg2) {
595 // Register use:
596 //
597 // elements - holds the slow-case elements of the receiver on entry.
598 // Unchanged unless 'result' is the same register.
599 //
600 // key - holds the smi key on entry.
601 // Unchanged unless 'result' is the same register.
602 //
603 //
604 // result - holds the result on exit if the load succeeded.
605 // Allowed to be the same as 'key' or 'result'.
606 // Unchanged on bailout so 'key' or 'result' can be used
607 // in further computation.
608 //
609 // Scratch registers:
610 //
611 // reg0 - holds the untagged key on entry and holds the hash once computed.
612 //
613 // reg1 - Used to hold the capacity mask of the dictionary.
614 //
615 // reg2 - Used for the index into the dictionary.
616 // at - Temporary (avoid MacroAssembler instructions also using 'at').
617 Label done;
618
Ben Murdochc7cc0282012-03-05 14:35:55 +0000619 GetNumberHash(reg0, reg1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000620
621 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +0000622 lw(reg1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000623 sra(reg1, reg1, kSmiTagSize);
624 Subu(reg1, reg1, Operand(1));
625
626 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000628 // Use reg2 for index calculations and keep the hash intact in reg0.
629 mov(reg2, reg0);
630 // Compute the masked index: (hash + i + i * i) & mask.
631 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +0000632 Addu(reg2, reg2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000633 }
634 and_(reg2, reg2, reg1);
635
636 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100638 Lsa(reg2, reg2, reg2, 1); // reg2 = reg2 * 3.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000639
640 // Check if the key is identical to the name.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100641 Lsa(reg2, elements, reg2, kPointerSizeLog2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000642
Ben Murdochc7cc0282012-03-05 14:35:55 +0000643 lw(at, FieldMemOperand(reg2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000645 Branch(&done, eq, key, Operand(at));
646 } else {
647 Branch(miss, ne, key, Operand(at));
648 }
649 }
650
651 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400652 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000653 // reg2: elements + (index * kPointerSize).
654 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000655 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000656 lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000657 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +0000658 And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000659 Branch(miss, ne, at, Operand(zero_reg));
660
661 // Get the value at the masked, scaled index and return.
662 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000663 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000664 lw(result, FieldMemOperand(reg2, kValueOffset));
665}
666
667
Andrei Popescu31002712010-02-23 13:46:05 +0000668// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000669// Instruction macros.
Andrei Popescu31002712010-02-23 13:46:05 +0000670
Andrei Popescu31002712010-02-23 13:46:05 +0000671void MacroAssembler::Addu(Register rd, Register rs, const Operand& rt) {
672 if (rt.is_reg()) {
673 addu(rd, rs, rt.rm());
674 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100675 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000676 addiu(rd, rs, rt.imm32_);
677 } else {
678 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000679 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000680 li(at, rt);
681 addu(rd, rs, at);
682 }
683 }
684}
685
686
Steve Block44f0eee2011-05-26 01:26:41 +0100687void MacroAssembler::Subu(Register rd, Register rs, const Operand& rt) {
688 if (rt.is_reg()) {
689 subu(rd, rs, rt.rm());
690 } else {
691 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
692 addiu(rd, rs, -rt.imm32_); // No subiu instr, use addiu(x, y, -imm).
693 } else {
694 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100696 li(at, rt);
697 subu(rd, rs, at);
698 }
699 }
700}
701
702
Andrei Popescu31002712010-02-23 13:46:05 +0000703void MacroAssembler::Mul(Register rd, Register rs, const Operand& rt) {
704 if (rt.is_reg()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100706 mult(rs, rt.rm());
707 mflo(rd);
708 } else {
709 mul(rd, rs, rt.rm());
710 }
Andrei Popescu31002712010-02-23 13:46:05 +0000711 } else {
712 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000714 li(at, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000715 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100716 mult(rs, at);
717 mflo(rd);
718 } else {
719 mul(rd, rs, at);
720 }
Andrei Popescu31002712010-02-23 13:46:05 +0000721 }
722}
723
724
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725void MacroAssembler::Mul(Register rd_hi, Register rd_lo,
726 Register rs, const Operand& rt) {
727 if (rt.is_reg()) {
728 if (!IsMipsArchVariant(kMips32r6)) {
729 mult(rs, rt.rm());
730 mflo(rd_lo);
731 mfhi(rd_hi);
732 } else {
733 if (rd_lo.is(rs)) {
734 DCHECK(!rd_hi.is(rs));
735 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
736 muh(rd_hi, rs, rt.rm());
737 mul(rd_lo, rs, rt.rm());
738 } else {
739 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
740 mul(rd_lo, rs, rt.rm());
741 muh(rd_hi, rs, rt.rm());
742 }
743 }
744 } else {
745 // li handles the relocation.
746 DCHECK(!rs.is(at));
747 li(at, rt);
748 if (!IsMipsArchVariant(kMips32r6)) {
749 mult(rs, at);
750 mflo(rd_lo);
751 mfhi(rd_hi);
752 } else {
753 if (rd_lo.is(rs)) {
754 DCHECK(!rd_hi.is(rs));
755 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
756 muh(rd_hi, rs, at);
757 mul(rd_lo, rs, at);
758 } else {
759 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
760 mul(rd_lo, rs, at);
761 muh(rd_hi, rs, at);
762 }
763 }
764 }
765}
766
767
768void MacroAssembler::Mulh(Register rd, Register rs, const Operand& rt) {
769 if (rt.is_reg()) {
770 if (!IsMipsArchVariant(kMips32r6)) {
771 mult(rs, rt.rm());
772 mfhi(rd);
773 } else {
774 muh(rd, rs, rt.rm());
775 }
776 } else {
777 // li handles the relocation.
778 DCHECK(!rs.is(at));
779 li(at, rt);
780 if (!IsMipsArchVariant(kMips32r6)) {
781 mult(rs, at);
782 mfhi(rd);
783 } else {
784 muh(rd, rs, at);
785 }
786 }
787}
788
789
Andrei Popescu31002712010-02-23 13:46:05 +0000790void MacroAssembler::Mult(Register rs, const Operand& rt) {
791 if (rt.is_reg()) {
792 mult(rs, rt.rm());
793 } else {
794 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000796 li(at, rt);
797 mult(rs, at);
798 }
799}
800
801
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400802void MacroAssembler::Mulhu(Register rd, Register rs, const Operand& rt) {
803 if (rt.is_reg()) {
804 if (!IsMipsArchVariant(kMips32r6)) {
805 multu(rs, rt.rm());
806 mfhi(rd);
807 } else {
808 muhu(rd, rs, rt.rm());
809 }
810 } else {
811 // li handles the relocation.
812 DCHECK(!rs.is(at));
813 li(at, rt);
814 if (!IsMipsArchVariant(kMips32r6)) {
815 multu(rs, at);
816 mfhi(rd);
817 } else {
818 muhu(rd, rs, at);
819 }
820 }
821}
822
823
Andrei Popescu31002712010-02-23 13:46:05 +0000824void MacroAssembler::Multu(Register rs, const Operand& rt) {
825 if (rt.is_reg()) {
826 multu(rs, rt.rm());
827 } else {
828 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000829 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000830 li(at, rt);
831 multu(rs, at);
832 }
833}
834
835
836void MacroAssembler::Div(Register rs, const Operand& rt) {
837 if (rt.is_reg()) {
838 div(rs, rt.rm());
839 } else {
840 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000841 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000842 li(at, rt);
843 div(rs, at);
844 }
845}
846
847
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000848void MacroAssembler::Div(Register rem, Register res,
849 Register rs, const Operand& rt) {
850 if (rt.is_reg()) {
851 if (!IsMipsArchVariant(kMips32r6)) {
852 div(rs, rt.rm());
853 mflo(res);
854 mfhi(rem);
855 } else {
856 div(res, rs, rt.rm());
857 mod(rem, rs, rt.rm());
858 }
859 } else {
860 // li handles the relocation.
861 DCHECK(!rs.is(at));
862 li(at, rt);
863 if (!IsMipsArchVariant(kMips32r6)) {
864 div(rs, at);
865 mflo(res);
866 mfhi(rem);
867 } else {
868 div(res, rs, at);
869 mod(rem, rs, at);
870 }
871 }
872}
873
874
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400875void MacroAssembler::Div(Register res, Register rs, const Operand& rt) {
876 if (rt.is_reg()) {
877 if (!IsMipsArchVariant(kMips32r6)) {
878 div(rs, rt.rm());
879 mflo(res);
880 } else {
881 div(res, rs, rt.rm());
882 }
883 } else {
884 // li handles the relocation.
885 DCHECK(!rs.is(at));
886 li(at, rt);
887 if (!IsMipsArchVariant(kMips32r6)) {
888 div(rs, at);
889 mflo(res);
890 } else {
891 div(res, rs, at);
892 }
893 }
894}
895
896
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000897void MacroAssembler::Mod(Register rd, Register rs, const Operand& rt) {
898 if (rt.is_reg()) {
899 if (!IsMipsArchVariant(kMips32r6)) {
900 div(rs, rt.rm());
901 mfhi(rd);
902 } else {
903 mod(rd, rs, rt.rm());
904 }
905 } else {
906 // li handles the relocation.
907 DCHECK(!rs.is(at));
908 li(at, rt);
909 if (!IsMipsArchVariant(kMips32r6)) {
910 div(rs, at);
911 mfhi(rd);
912 } else {
913 mod(rd, rs, at);
914 }
915 }
916}
917
918
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400919void MacroAssembler::Modu(Register rd, Register rs, const Operand& rt) {
920 if (rt.is_reg()) {
921 if (!IsMipsArchVariant(kMips32r6)) {
922 divu(rs, rt.rm());
923 mfhi(rd);
924 } else {
925 modu(rd, rs, rt.rm());
926 }
927 } else {
928 // li handles the relocation.
929 DCHECK(!rs.is(at));
930 li(at, rt);
931 if (!IsMipsArchVariant(kMips32r6)) {
932 divu(rs, at);
933 mfhi(rd);
934 } else {
935 modu(rd, rs, at);
936 }
937 }
938}
939
940
Andrei Popescu31002712010-02-23 13:46:05 +0000941void MacroAssembler::Divu(Register rs, const Operand& rt) {
942 if (rt.is_reg()) {
943 divu(rs, rt.rm());
944 } else {
945 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000947 li(at, rt);
948 divu(rs, at);
949 }
950}
951
952
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400953void MacroAssembler::Divu(Register res, Register rs, const Operand& rt) {
954 if (rt.is_reg()) {
955 if (!IsMipsArchVariant(kMips32r6)) {
956 divu(rs, rt.rm());
957 mflo(res);
958 } else {
959 divu(res, rs, rt.rm());
960 }
961 } else {
962 // li handles the relocation.
963 DCHECK(!rs.is(at));
964 li(at, rt);
965 if (!IsMipsArchVariant(kMips32r6)) {
966 divu(rs, at);
967 mflo(res);
968 } else {
969 divu(res, rs, at);
970 }
971 }
972}
973
974
Andrei Popescu31002712010-02-23 13:46:05 +0000975void MacroAssembler::And(Register rd, Register rs, const Operand& rt) {
976 if (rt.is_reg()) {
977 and_(rd, rs, rt.rm());
978 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100979 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000980 andi(rd, rs, rt.imm32_);
981 } else {
982 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000983 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000984 li(at, rt);
985 and_(rd, rs, at);
986 }
987 }
988}
989
990
991void MacroAssembler::Or(Register rd, Register rs, const Operand& rt) {
992 if (rt.is_reg()) {
993 or_(rd, rs, rt.rm());
994 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100995 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000996 ori(rd, rs, rt.imm32_);
997 } else {
998 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000999 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001000 li(at, rt);
1001 or_(rd, rs, at);
1002 }
1003 }
1004}
1005
1006
1007void MacroAssembler::Xor(Register rd, Register rs, const Operand& rt) {
1008 if (rt.is_reg()) {
1009 xor_(rd, rs, rt.rm());
1010 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001011 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001012 xori(rd, rs, rt.imm32_);
1013 } else {
1014 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001015 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001016 li(at, rt);
1017 xor_(rd, rs, at);
1018 }
1019 }
1020}
1021
1022
1023void MacroAssembler::Nor(Register rd, Register rs, const Operand& rt) {
1024 if (rt.is_reg()) {
1025 nor(rd, rs, rt.rm());
1026 } else {
1027 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001029 li(at, rt);
1030 nor(rd, rs, at);
1031 }
1032}
1033
1034
Ben Murdoch257744e2011-11-30 15:57:28 +00001035void MacroAssembler::Neg(Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001036 DCHECK(rt.is_reg());
1037 DCHECK(!at.is(rs));
1038 DCHECK(!at.is(rt.rm()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001039 li(at, -1);
1040 xor_(rs, rt.rm(), at);
1041}
1042
1043
Andrei Popescu31002712010-02-23 13:46:05 +00001044void MacroAssembler::Slt(Register rd, Register rs, const Operand& rt) {
1045 if (rt.is_reg()) {
1046 slt(rd, rs, rt.rm());
1047 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001048 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001049 slti(rd, rs, rt.imm32_);
1050 } else {
1051 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001052 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001053 li(at, rt);
1054 slt(rd, rs, at);
1055 }
1056 }
1057}
1058
1059
1060void MacroAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
1061 if (rt.is_reg()) {
1062 sltu(rd, rs, rt.rm());
1063 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001064 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001065 sltiu(rd, rs, rt.imm32_);
1066 } else {
1067 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +00001069 li(at, rt);
1070 sltu(rd, rs, at);
1071 }
1072 }
1073}
1074
1075
Steve Block44f0eee2011-05-26 01:26:41 +01001076void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001078 if (rt.is_reg()) {
1079 rotrv(rd, rs, rt.rm());
1080 } else {
1081 rotr(rd, rs, rt.imm32_);
1082 }
1083 } else {
1084 if (rt.is_reg()) {
1085 subu(at, zero_reg, rt.rm());
1086 sllv(at, rs, at);
1087 srlv(rd, rs, rt.rm());
1088 or_(rd, rd, at);
1089 } else {
1090 if (rt.imm32_ == 0) {
1091 srl(rd, rs, 0);
1092 } else {
1093 srl(at, rs, rt.imm32_);
1094 sll(rd, rs, (0x20 - rt.imm32_) & 0x1f);
1095 or_(rd, rd, at);
1096 }
1097 }
1098 }
Andrei Popescu31002712010-02-23 13:46:05 +00001099}
1100
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001101
1102void MacroAssembler::Pref(int32_t hint, const MemOperand& rs) {
1103 if (IsMipsArchVariant(kLoongson)) {
1104 lw(zero_reg, rs);
1105 } else {
1106 pref(hint, rs);
1107 }
1108}
1109
1110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001111void MacroAssembler::Lsa(Register rd, Register rt, Register rs, uint8_t sa,
1112 Register scratch) {
1113 if (IsMipsArchVariant(kMips32r6) && sa <= 4) {
1114 lsa(rd, rt, rs, sa);
1115 } else {
1116 Register tmp = rd.is(rt) ? scratch : rd;
1117 DCHECK(!tmp.is(rt));
1118 sll(tmp, rs, sa);
1119 Addu(rd, rt, tmp);
1120 }
1121}
1122
1123
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124// ------------Pseudo-instructions-------------
1125
1126void MacroAssembler::Ulw(Register rd, const MemOperand& rs) {
1127 lwr(rd, rs);
1128 lwl(rd, MemOperand(rs.rm(), rs.offset() + 3));
1129}
1130
1131
1132void MacroAssembler::Usw(Register rd, const MemOperand& rs) {
1133 swr(rd, rs);
1134 swl(rd, MemOperand(rs.rm(), rs.offset() + 3));
1135}
1136
1137
1138void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) {
1139 AllowDeferredHandleDereference smi_check;
1140 if (value->IsSmi()) {
1141 li(dst, Operand(value), mode);
1142 } else {
1143 DCHECK(value->IsHeapObject());
1144 if (isolate()->heap()->InNewSpace(*value)) {
1145 Handle<Cell> cell = isolate()->factory()->NewCell(value);
1146 li(dst, Operand(cell));
1147 lw(dst, FieldMemOperand(dst, Cell::kValueOffset));
1148 } else {
1149 li(dst, Operand(value));
1150 }
1151 }
1152}
1153
Steve Block44f0eee2011-05-26 01:26:41 +01001154
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001155void MacroAssembler::li(Register rd, Operand j, LiFlags mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001156 DCHECK(!j.is_reg());
Steve Block44f0eee2011-05-26 01:26:41 +01001157 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001158 if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) {
Andrei Popescu31002712010-02-23 13:46:05 +00001159 // Normal load of an immediate value which does not need Relocation Info.
1160 if (is_int16(j.imm32_)) {
1161 addiu(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001162 } else if (!(j.imm32_ & kHiMask)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001163 ori(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001164 } else if (!(j.imm32_ & kImm16Mask)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001165 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +00001166 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001167 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Steve Block44f0eee2011-05-26 01:26:41 +01001168 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001169 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001170 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001171 if (MustUseReg(j.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001172 RecordRelocInfo(j.rmode_, j.imm32_);
1173 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001174 // We always need the same number of instructions as we may need to patch
Andrei Popescu31002712010-02-23 13:46:05 +00001175 // this code to load another value which may need 2 instructions to load.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001176 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Ben Murdoch257744e2011-11-30 15:57:28 +00001177 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001178 }
1179}
1180
1181
Andrei Popescu31002712010-02-23 13:46:05 +00001182void MacroAssembler::MultiPush(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001183 int16_t num_to_push = NumberOfBitsSet(regs);
1184 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001185
Ben Murdoch589d6972011-11-30 16:04:58 +00001186 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001187 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001188 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001189 stack_offset -= kPointerSize;
1190 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001191 }
1192 }
1193}
1194
1195
1196void MacroAssembler::MultiPushReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001197 int16_t num_to_push = NumberOfBitsSet(regs);
1198 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001199
Ben Murdoch589d6972011-11-30 16:04:58 +00001200 Subu(sp, sp, Operand(stack_offset));
Steve Block6ded16b2010-05-10 14:33:55 +01001201 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001202 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001203 stack_offset -= kPointerSize;
1204 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001205 }
1206 }
1207}
1208
1209
1210void MacroAssembler::MultiPop(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001211 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001212
Steve Block6ded16b2010-05-10 14:33:55 +01001213 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001214 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001215 lw(ToRegister(i), MemOperand(sp, stack_offset));
1216 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001217 }
1218 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001219 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001220}
1221
1222
1223void MacroAssembler::MultiPopReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001224 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001225
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001226 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001227 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001228 lw(ToRegister(i), MemOperand(sp, stack_offset));
1229 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001230 }
1231 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001232 addiu(sp, sp, stack_offset);
1233}
1234
1235
1236void MacroAssembler::MultiPushFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001237 int16_t num_to_push = NumberOfBitsSet(regs);
1238 int16_t stack_offset = num_to_push * kDoubleSize;
1239
1240 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001241 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001242 if ((regs & (1 << i)) != 0) {
1243 stack_offset -= kDoubleSize;
1244 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1245 }
1246 }
1247}
1248
1249
1250void MacroAssembler::MultiPushReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001251 int16_t num_to_push = NumberOfBitsSet(regs);
1252 int16_t stack_offset = num_to_push * kDoubleSize;
1253
1254 Subu(sp, sp, Operand(stack_offset));
1255 for (int16_t i = 0; i < kNumRegisters; i++) {
1256 if ((regs & (1 << i)) != 0) {
1257 stack_offset -= kDoubleSize;
1258 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1259 }
1260 }
1261}
1262
1263
1264void MacroAssembler::MultiPopFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001265 int16_t stack_offset = 0;
1266
1267 for (int16_t i = 0; i < kNumRegisters; i++) {
1268 if ((regs & (1 << i)) != 0) {
1269 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1270 stack_offset += kDoubleSize;
1271 }
1272 }
1273 addiu(sp, sp, stack_offset);
1274}
1275
1276
1277void MacroAssembler::MultiPopReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001278 int16_t stack_offset = 0;
1279
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001280 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001281 if ((regs & (1 << i)) != 0) {
1282 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1283 stack_offset += kDoubleSize;
1284 }
1285 }
1286 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001287}
1288
1289
Steve Block44f0eee2011-05-26 01:26:41 +01001290void MacroAssembler::Ext(Register rt,
1291 Register rs,
1292 uint16_t pos,
1293 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001294 DCHECK(pos < 32);
1295 DCHECK(pos + size < 33);
Andrei Popescu31002712010-02-23 13:46:05 +00001296
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001297 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001298 ext_(rt, rs, pos, size);
1299 } else {
1300 // Move rs to rt and shift it left then right to get the
1301 // desired bitfield on the right side and zeroes on the left.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001302 int shift_left = 32 - (pos + size);
1303 sll(rt, rs, shift_left); // Acts as a move if shift_left == 0.
1304
1305 int shift_right = 32 - size;
1306 if (shift_right > 0) {
1307 srl(rt, rt, shift_right);
1308 }
Steve Block44f0eee2011-05-26 01:26:41 +01001309 }
1310}
1311
1312
1313void MacroAssembler::Ins(Register rt,
1314 Register rs,
1315 uint16_t pos,
1316 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001317 DCHECK(pos < 32);
1318 DCHECK(pos + size <= 32);
1319 DCHECK(size != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01001320
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001321 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001322 ins_(rt, rs, pos, size);
1323 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324 DCHECK(!rt.is(t8) && !rs.is(t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001325 Subu(at, zero_reg, Operand(1));
1326 srl(at, at, 32 - size);
1327 and_(t8, rs, at);
1328 sll(t8, t8, pos);
1329 sll(at, at, pos);
1330 nor(at, at, zero_reg);
1331 and_(at, rt, at);
1332 or_(rt, t8, at);
Steve Block44f0eee2011-05-26 01:26:41 +01001333 }
1334}
1335
1336
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001337void MacroAssembler::Cvt_d_uw(FPURegister fd, Register rs,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001338 FPURegister scratch) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001339 // In FP64Mode we do convertion from long.
1340 if (IsFp64Mode()) {
1341 mtc1(rs, scratch);
1342 Mthc1(zero_reg, scratch);
1343 cvt_d_l(fd, scratch);
1344 } else {
1345 // Convert rs to a FP value in fd.
1346 DCHECK(!fd.is(scratch));
1347 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01001348
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001349 Label msb_clear, conversion_done;
1350 // For a value which is < 2^31, regard it as a signed positve word.
1351 Branch(&msb_clear, ge, rs, Operand(zero_reg), USE_DELAY_SLOT);
1352 mtc1(rs, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001353
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001354 li(at, 0x41F00000); // FP value: 2^32.
Steve Block44f0eee2011-05-26 01:26:41 +01001355
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001356 // For unsigned inputs > 2^31, we convert to double as a signed int32,
1357 // then add 2^32 to move it back to unsigned value in range 2^31..2^31-1.
1358 mtc1(zero_reg, scratch);
1359 Mthc1(at, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001360
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 cvt_d_w(fd, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001362
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001363 Branch(USE_DELAY_SLOT, &conversion_done);
1364 add_d(fd, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001365
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001366 bind(&msb_clear);
1367 cvt_d_w(fd, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001368
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001369 bind(&conversion_done);
1370 }
Steve Block44f0eee2011-05-26 01:26:41 +01001371}
1372
1373
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001374void MacroAssembler::Trunc_uw_d(FPURegister fd,
1375 FPURegister fs,
1376 FPURegister scratch) {
1377 Trunc_uw_d(fs, t8, scratch);
1378 mtc1(t8, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001379}
1380
Ben Murdoch097c5b22016-05-18 11:27:45 +01001381void MacroAssembler::Trunc_uw_s(FPURegister fd, FPURegister fs,
1382 FPURegister scratch) {
1383 Trunc_uw_s(fs, t8, scratch);
1384 mtc1(t8, fd);
1385}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001386
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001387void MacroAssembler::Trunc_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1389 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001390 trunc_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001392 } else {
1393 trunc_w_d(fd, fs);
1394 }
1395}
1396
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001397
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001398void MacroAssembler::Round_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001399 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1400 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001401 round_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001403 } else {
1404 round_w_d(fd, fs);
1405 }
1406}
1407
1408
1409void MacroAssembler::Floor_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001410 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1411 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001412 floor_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001413 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001414 } else {
1415 floor_w_d(fd, fs);
1416 }
1417}
1418
1419
1420void MacroAssembler::Ceil_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001421 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1422 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001423 ceil_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001425 } else {
1426 ceil_w_d(fd, fs);
1427 }
1428}
1429
Steve Block44f0eee2011-05-26 01:26:41 +01001430
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001431void MacroAssembler::Trunc_uw_d(FPURegister fd,
1432 Register rs,
1433 FPURegister scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001434 DCHECK(!fd.is(scratch));
1435 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01001436
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001437 // Load 2^31 into scratch as its float representation.
1438 li(at, 0x41E00000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001439 mtc1(zero_reg, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001440 Mthc1(at, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001441 // Test if scratch > fd.
Ben Murdoch85b71792012-04-11 18:30:58 +01001442 // If fd < 2^31 we can convert it normally.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001443 Label simple_convert;
1444 BranchF(&simple_convert, NULL, lt, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001445
1446 // First we subtract 2^31 from fd, then trunc it to rs
1447 // and add 2^31 to rs.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001448 sub_d(scratch, fd, scratch);
1449 trunc_w_d(scratch, scratch);
1450 mfc1(rs, scratch);
1451 Or(rs, rs, 1 << 31);
Steve Block44f0eee2011-05-26 01:26:41 +01001452
1453 Label done;
1454 Branch(&done);
1455 // Simple conversion.
1456 bind(&simple_convert);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001457 trunc_w_d(scratch, fd);
1458 mfc1(rs, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001459
1460 bind(&done);
1461}
1462
Ben Murdoch097c5b22016-05-18 11:27:45 +01001463void MacroAssembler::Trunc_uw_s(FPURegister fd, Register rs,
1464 FPURegister scratch) {
1465 DCHECK(!fd.is(scratch));
1466 DCHECK(!rs.is(at));
1467
1468 // Load 2^31 into scratch as its float representation.
1469 li(at, 0x4F000000);
1470 mtc1(at, scratch);
1471 // Test if scratch > fd.
1472 // If fd < 2^31 we can convert it normally.
1473 Label simple_convert;
1474 BranchF32(&simple_convert, NULL, lt, fd, scratch);
1475
1476 // First we subtract 2^31 from fd, then trunc it to rs
1477 // and add 2^31 to rs.
1478 sub_s(scratch, fd, scratch);
1479 trunc_w_s(scratch, scratch);
1480 mfc1(rs, scratch);
1481 Or(rs, rs, 1 << 31);
1482
1483 Label done;
1484 Branch(&done);
1485 // Simple conversion.
1486 bind(&simple_convert);
1487 trunc_w_s(scratch, fd);
1488 mfc1(rs, scratch);
1489
1490 bind(&done);
1491}
Steve Block44f0eee2011-05-26 01:26:41 +01001492
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001493void MacroAssembler::Mthc1(Register rt, FPURegister fs) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001494 if (IsFp32Mode()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001495 mtc1(rt, fs.high());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001496 } else {
1497 DCHECK(IsFp64Mode() || IsFpxxMode());
1498 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
1499 mthc1(rt, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001500 }
1501}
1502
1503
1504void MacroAssembler::Mfhc1(Register rt, FPURegister fs) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001505 if (IsFp32Mode()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001506 mfc1(rt, fs.high());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001507 } else {
1508 DCHECK(IsFp64Mode() || IsFpxxMode());
1509 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
1510 mfhc1(rt, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001511 }
1512}
1513
1514
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001515void MacroAssembler::BranchFCommon(SecondaryField sizeField, Label* target,
1516 Label* nan, Condition cond, FPURegister cmp1,
1517 FPURegister cmp2, BranchDelaySlot bd) {
1518 {
1519 BlockTrampolinePoolScope block_trampoline_pool(this);
1520 if (cond == al) {
1521 Branch(bd, target);
1522 return;
1523 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001524
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001525 if (IsMipsArchVariant(kMips32r6)) {
1526 sizeField = sizeField == D ? L : W;
1527 }
1528 DCHECK(nan || target);
1529 // Check for unordered (NaN) cases.
1530 if (nan) {
1531 bool long_branch =
1532 nan->is_bound() ? is_near(nan) : is_trampoline_emitted();
1533 if (!IsMipsArchVariant(kMips32r6)) {
1534 if (long_branch) {
1535 Label skip;
1536 c(UN, sizeField, cmp1, cmp2);
1537 bc1f(&skip);
1538 nop();
1539 BranchLong(nan, bd);
1540 bind(&skip);
1541 } else {
1542 c(UN, sizeField, cmp1, cmp2);
1543 bc1t(nan);
1544 if (bd == PROTECT) {
1545 nop();
1546 }
1547 }
1548 } else {
1549 // Use kDoubleCompareReg for comparison result. It has to be unavailable
1550 // to lithium register allocator.
1551 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
1552 if (long_branch) {
1553 Label skip;
1554 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
1555 bc1eqz(&skip, kDoubleCompareReg);
1556 nop();
1557 BranchLong(nan, bd);
1558 bind(&skip);
1559 } else {
1560 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
1561 bc1nez(nan, kDoubleCompareReg);
1562 if (bd == PROTECT) {
1563 nop();
1564 }
1565 }
1566 }
1567 }
1568
1569 if (target) {
1570 bool long_branch =
1571 target->is_bound() ? is_near(target) : is_trampoline_emitted();
1572 if (long_branch) {
1573 Label skip;
1574 Condition neg_cond = NegateFpuCondition(cond);
1575 BranchShortF(sizeField, &skip, neg_cond, cmp1, cmp2, bd);
1576 BranchLong(target, bd);
1577 bind(&skip);
1578 } else {
1579 BranchShortF(sizeField, target, cond, cmp1, cmp2, bd);
1580 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001581 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001582 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001584
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585void MacroAssembler::BranchShortF(SecondaryField sizeField, Label* target,
1586 Condition cc, FPURegister cmp1,
1587 FPURegister cmp2, BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001588 if (!IsMipsArchVariant(kMips32r6)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001589 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001590 if (target) {
1591 // Here NaN cases were either handled by this function or are assumed to
1592 // have been handled by the caller.
1593 switch (cc) {
1594 case lt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001595 c(OLT, sizeField, cmp1, cmp2);
1596 bc1t(target);
1597 break;
1598 case ult:
1599 c(ULT, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001600 bc1t(target);
1601 break;
1602 case gt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001603 c(ULE, sizeField, cmp1, cmp2);
1604 bc1f(target);
1605 break;
1606 case ugt:
1607 c(OLE, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001608 bc1f(target);
1609 break;
1610 case ge:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001611 c(ULT, sizeField, cmp1, cmp2);
1612 bc1f(target);
1613 break;
1614 case uge:
1615 c(OLT, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001616 bc1f(target);
1617 break;
1618 case le:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001619 c(OLE, sizeField, cmp1, cmp2);
1620 bc1t(target);
1621 break;
1622 case ule:
1623 c(ULE, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001624 bc1t(target);
1625 break;
1626 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627 c(EQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001628 bc1t(target);
1629 break;
1630 case ueq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001631 c(UEQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001632 bc1t(target);
1633 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 case ne: // Unordered or not equal.
1635 c(EQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001636 bc1f(target);
1637 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001638 case ogl:
1639 c(UEQ, sizeField, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640 bc1f(target);
1641 break;
1642 default:
1643 CHECK(0);
1644 }
1645 }
1646 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001647 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 if (target) {
1649 // Here NaN cases were either handled by this function or are assumed to
1650 // have been handled by the caller.
1651 // Unsigned conditions are treated as their signed counterpart.
1652 // Use kDoubleCompareReg for comparison result, it is
1653 // valid in fp64 (FR = 1) mode which is implied for mips32r6.
1654 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
1655 switch (cc) {
1656 case lt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001657 cmp(OLT, sizeField, kDoubleCompareReg, cmp1, cmp2);
1658 bc1nez(target, kDoubleCompareReg);
1659 break;
1660 case ult:
1661 cmp(ULT, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001662 bc1nez(target, kDoubleCompareReg);
1663 break;
1664 case gt:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001665 cmp(ULE, sizeField, kDoubleCompareReg, cmp1, cmp2);
1666 bc1eqz(target, kDoubleCompareReg);
1667 break;
1668 case ugt:
1669 cmp(OLE, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670 bc1eqz(target, kDoubleCompareReg);
1671 break;
1672 case ge:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001673 cmp(ULT, sizeField, kDoubleCompareReg, cmp1, cmp2);
1674 bc1eqz(target, kDoubleCompareReg);
1675 break;
1676 case uge:
1677 cmp(OLT, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001678 bc1eqz(target, kDoubleCompareReg);
1679 break;
1680 case le:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001681 cmp(OLE, sizeField, kDoubleCompareReg, cmp1, cmp2);
1682 bc1nez(target, kDoubleCompareReg);
1683 break;
1684 case ule:
1685 cmp(ULE, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 bc1nez(target, kDoubleCompareReg);
1687 break;
1688 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001689 cmp(EQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690 bc1nez(target, kDoubleCompareReg);
1691 break;
1692 case ueq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 cmp(UEQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001694 bc1nez(target, kDoubleCompareReg);
1695 break;
1696 case ne:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001697 cmp(EQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 bc1eqz(target, kDoubleCompareReg);
1699 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001700 case ogl:
1701 cmp(UEQ, sizeField, kDoubleCompareReg, cmp1, cmp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001702 bc1eqz(target, kDoubleCompareReg);
1703 break;
1704 default:
1705 CHECK(0);
1706 }
1707 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001708 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001709 if (bd == PROTECT) {
1710 nop();
1711 }
1712}
1713
1714
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001715void MacroAssembler::FmoveLow(FPURegister dst, Register src_low) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001716 if (IsFp32Mode()) {
1717 mtc1(src_low, dst);
1718 } else {
1719 DCHECK(IsFp64Mode() || IsFpxxMode());
1720 DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 DCHECK(!src_low.is(at));
1722 mfhc1(at, dst);
1723 mtc1(src_low, dst);
1724 mthc1(at, dst);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725 }
1726}
1727
1728
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001729void MacroAssembler::Move(FPURegister dst, float imm) {
1730 li(at, Operand(bit_cast<int32_t>(imm)));
1731 mtc1(at, dst);
1732}
1733
1734
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001735void MacroAssembler::Move(FPURegister dst, double imm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001736 static const DoubleRepresentation minus_zero(-0.0);
1737 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001738 DoubleRepresentation value_rep(imm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001739 // Handle special values first.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001740 if (value_rep == zero && has_double_zero_reg_set_) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001741 mov_d(dst, kDoubleRegZero);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001742 } else if (value_rep == minus_zero && has_double_zero_reg_set_) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001743 neg_d(dst, kDoubleRegZero);
1744 } else {
1745 uint32_t lo, hi;
1746 DoubleAsTwoUInt32(imm, &lo, &hi);
1747 // Move the low part of the double into the lower of the corresponding FPU
1748 // register of FPU register pair.
1749 if (lo != 0) {
1750 li(at, Operand(lo));
1751 mtc1(at, dst);
1752 } else {
1753 mtc1(zero_reg, dst);
1754 }
1755 // Move the high part of the double into the higher of the corresponding FPU
1756 // register of FPU register pair.
1757 if (hi != 0) {
1758 li(at, Operand(hi));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759 Mthc1(at, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001760 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001761 Mthc1(zero_reg, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001762 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001763 if (dst.is(kDoubleRegZero)) has_double_zero_reg_set_ = true;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001764 }
1765}
1766
1767
1768void MacroAssembler::Movz(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001769 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001770 Label done;
1771 Branch(&done, ne, rt, Operand(zero_reg));
1772 mov(rd, rs);
1773 bind(&done);
1774 } else {
1775 movz(rd, rs, rt);
1776 }
1777}
1778
1779
1780void MacroAssembler::Movn(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001781 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001782 Label done;
1783 Branch(&done, eq, rt, Operand(zero_reg));
1784 mov(rd, rs);
1785 bind(&done);
1786 } else {
1787 movn(rd, rs, rt);
1788 }
1789}
1790
1791
1792void MacroAssembler::Movt(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001794 // Tests an FP condition code and then conditionally move rs to rd.
1795 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001796 DCHECK(cc == 0);
1797 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001798 Label done;
1799 Register scratch = t8;
1800 // For testing purposes we need to fetch content of the FCSR register and
1801 // than test its cc (floating point condition code) bit (for cc = 0, it is
1802 // 24. bit of the FCSR).
1803 cfc1(scratch, FCSR);
1804 // For the MIPS I, II and III architectures, the contents of scratch is
1805 // UNPREDICTABLE for the instruction immediately following CFC1.
1806 nop();
1807 srl(scratch, scratch, 16);
1808 andi(scratch, scratch, 0x0080);
1809 Branch(&done, eq, scratch, Operand(zero_reg));
1810 mov(rd, rs);
1811 bind(&done);
1812 } else {
1813 movt(rd, rs, cc);
1814 }
1815}
1816
1817
1818void MacroAssembler::Movf(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001819 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001820 // Tests an FP condition code and then conditionally move rs to rd.
1821 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001822 DCHECK(cc == 0);
1823 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001824 Label done;
1825 Register scratch = t8;
1826 // For testing purposes we need to fetch content of the FCSR register and
1827 // than test its cc (floating point condition code) bit (for cc = 0, it is
1828 // 24. bit of the FCSR).
1829 cfc1(scratch, FCSR);
1830 // For the MIPS I, II and III architectures, the contents of scratch is
1831 // UNPREDICTABLE for the instruction immediately following CFC1.
1832 nop();
1833 srl(scratch, scratch, 16);
1834 andi(scratch, scratch, 0x0080);
1835 Branch(&done, ne, scratch, Operand(zero_reg));
1836 mov(rd, rs);
1837 bind(&done);
1838 } else {
1839 movf(rd, rs, cc);
1840 }
1841}
1842
1843
1844void MacroAssembler::Clz(Register rd, Register rs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001845 if (IsMipsArchVariant(kLoongson)) {
1846 DCHECK(!(rd.is(t8) || rd.is(t9)) && !(rs.is(t8) || rs.is(t9)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001847 Register mask = t8;
1848 Register scratch = t9;
1849 Label loop, end;
1850 mov(at, rs);
1851 mov(rd, zero_reg);
1852 lui(mask, 0x8000);
1853 bind(&loop);
1854 and_(scratch, at, mask);
1855 Branch(&end, ne, scratch, Operand(zero_reg));
1856 addiu(rd, rd, 1);
1857 Branch(&loop, ne, mask, Operand(zero_reg), USE_DELAY_SLOT);
1858 srl(mask, mask, 1);
1859 bind(&end);
1860 } else {
1861 clz(rd, rs);
1862 }
1863}
1864
1865
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001866void MacroAssembler::EmitFPUTruncate(FPURoundingMode rounding_mode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001867 Register result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001868 DoubleRegister double_input,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001869 Register scratch,
1870 DoubleRegister double_scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001871 Register except_flag,
1872 CheckForInexactConversion check_inexact) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001873 DCHECK(!result.is(scratch));
1874 DCHECK(!double_input.is(double_scratch));
1875 DCHECK(!except_flag.is(scratch));
1876
1877 Label done;
1878
1879 // Clear the except flag (0 = no exception)
1880 mov(except_flag, zero_reg);
1881
1882 // Test for values that can be exactly represented as a signed 32-bit integer.
1883 cvt_w_d(double_scratch, double_input);
1884 mfc1(result, double_scratch);
1885 cvt_d_w(double_scratch, double_scratch);
1886 BranchF(&done, NULL, eq, double_input, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001887
1888 int32_t except_mask = kFCSRFlagMask; // Assume interested in all exceptions.
1889
1890 if (check_inexact == kDontCheckForInexactConversion) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001891 // Ignore inexact exceptions.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001892 except_mask &= ~kFCSRInexactFlagMask;
1893 }
1894
1895 // Save FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001896 cfc1(scratch, FCSR);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001897 // Disable FPU exceptions.
1898 ctc1(zero_reg, FCSR);
1899
1900 // Do operation based on rounding mode.
1901 switch (rounding_mode) {
1902 case kRoundToNearest:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001903 Round_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001904 break;
1905 case kRoundToZero:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906 Trunc_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001907 break;
1908 case kRoundToPlusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001909 Ceil_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001910 break;
1911 case kRoundToMinusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912 Floor_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001913 break;
1914 } // End of switch-statement.
1915
1916 // Retrieve FCSR.
1917 cfc1(except_flag, FCSR);
1918 // Restore FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001919 ctc1(scratch, FCSR);
1920 // Move the converted value into the result register.
1921 mfc1(result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001922
1923 // Check for fpu exceptions.
1924 And(except_flag, except_flag, Operand(except_mask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001925
Ben Murdoch257744e2011-11-30 15:57:28 +00001926 bind(&done);
1927}
1928
1929
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001930void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
1931 DoubleRegister double_input,
1932 Label* done) {
1933 DoubleRegister single_scratch = kLithiumScratchDouble.low();
1934 Register scratch = at;
1935 Register scratch2 = t9;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001936
1937 // Clear cumulative exception flags and save the FCSR.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001938 cfc1(scratch2, FCSR);
1939 ctc1(zero_reg, FCSR);
1940 // Try a conversion to a signed integer.
1941 trunc_w_d(single_scratch, double_input);
1942 mfc1(result, single_scratch);
1943 // Retrieve and restore the FCSR.
1944 cfc1(scratch, FCSR);
1945 ctc1(scratch2, FCSR);
1946 // Check for overflow and NaNs.
1947 And(scratch,
1948 scratch,
1949 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
1950 // If we had no exceptions we are done.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001951 Branch(done, eq, scratch, Operand(zero_reg));
1952}
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001953
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001954
1955void MacroAssembler::TruncateDoubleToI(Register result,
1956 DoubleRegister double_input) {
1957 Label done;
1958
1959 TryInlineTruncateDoubleToI(result, double_input, &done);
1960
1961 // If we fell through then inline version didn't succeed - call stub instead.
1962 push(ra);
1963 Subu(sp, sp, Operand(kDoubleSize)); // Put input on stack.
1964 sdc1(double_input, MemOperand(sp, 0));
1965
1966 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
1967 CallStub(&stub);
1968
1969 Addu(sp, sp, Operand(kDoubleSize));
1970 pop(ra);
1971
1972 bind(&done);
1973}
1974
1975
1976void MacroAssembler::TruncateHeapNumberToI(Register result, Register object) {
1977 Label done;
1978 DoubleRegister double_scratch = f12;
1979 DCHECK(!result.is(object));
1980
1981 ldc1(double_scratch,
1982 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
1983 TryInlineTruncateDoubleToI(result, double_scratch, &done);
1984
1985 // If we fell through then inline version didn't succeed - call stub instead.
1986 push(ra);
1987 DoubleToIStub stub(isolate(),
1988 object,
1989 result,
1990 HeapNumber::kValueOffset - kHeapObjectTag,
1991 true,
1992 true);
1993 CallStub(&stub);
1994 pop(ra);
1995
1996 bind(&done);
1997}
1998
1999
2000void MacroAssembler::TruncateNumberToI(Register object,
2001 Register result,
2002 Register heap_number_map,
2003 Register scratch,
2004 Label* not_number) {
2005 Label done;
2006 DCHECK(!result.is(object));
2007
2008 UntagAndJumpIfSmi(result, object, &done);
2009 JumpIfNotHeapNumber(object, heap_number_map, scratch, not_number);
2010 TruncateHeapNumberToI(result, object);
2011
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002012 bind(&done);
2013}
2014
2015
Ben Murdoch257744e2011-11-30 15:57:28 +00002016void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2017 Register src,
2018 int num_least_bits) {
2019 Ext(dst, src, kSmiTagSize, num_least_bits);
2020}
2021
2022
2023void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2024 Register src,
2025 int num_least_bits) {
2026 And(dst, src, Operand((1 << num_least_bits) - 1));
2027}
2028
2029
Steve Block44f0eee2011-05-26 01:26:41 +01002030// Emulated condtional branches do not emit a nop in the branch delay slot.
2031//
2032// BRANCH_ARGS_CHECK checks that conditional jump arguments are correct.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002033#define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \
Steve Block44f0eee2011-05-26 01:26:41 +01002034 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
2035 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
2036
2037
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002038void MacroAssembler::Branch(int32_t offset, BranchDelaySlot bdslot) {
2039 DCHECK(IsMipsArchVariant(kMips32r6) ? is_int26(offset) : is_int16(offset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002040 BranchShort(offset, bdslot);
2041}
2042
2043
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002044void MacroAssembler::Branch(int32_t offset, Condition cond, Register rs,
2045 const Operand& rt, BranchDelaySlot bdslot) {
2046 bool is_near = BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2047 DCHECK(is_near);
2048 USE(is_near);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002049}
2050
2051
2052void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002053 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002054 if (is_near_branch(L)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002055 BranchShort(L, bdslot);
2056 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 BranchLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002058 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002059 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002060 if (is_trampoline_emitted()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002061 BranchLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002062 } else {
2063 BranchShort(L, bdslot);
2064 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002065 }
2066}
2067
2068
2069void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
2070 const Operand& rt,
2071 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002072 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002073 if (!BranchShortCheck(0, L, cond, rs, rt, bdslot)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002074 if (cond != cc_always) {
2075 Label skip;
2076 Condition neg_cond = NegateCondition(cond);
2077 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002078 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002079 bind(&skip);
2080 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002081 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002082 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002083 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002084 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002085 if (is_trampoline_emitted()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002086 if (cond != cc_always) {
2087 Label skip;
2088 Condition neg_cond = NegateCondition(cond);
2089 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002090 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002091 bind(&skip);
2092 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002093 BranchLong(L, bdslot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002094 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002095 } else {
2096 BranchShort(L, cond, rs, rt, bdslot);
2097 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002098 }
2099}
2100
2101
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002102void MacroAssembler::Branch(Label* L,
2103 Condition cond,
2104 Register rs,
2105 Heap::RootListIndex index,
2106 BranchDelaySlot bdslot) {
2107 LoadRoot(at, index);
2108 Branch(L, cond, rs, Operand(at), bdslot);
2109}
2110
2111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112void MacroAssembler::BranchShortHelper(int16_t offset, Label* L,
2113 BranchDelaySlot bdslot) {
2114 DCHECK(L == nullptr || offset == 0);
2115 offset = GetOffset(offset, L, OffsetSize::kOffset16);
Steve Block44f0eee2011-05-26 01:26:41 +01002116 b(offset);
2117
2118 // Emit a nop in the branch delay slot if required.
2119 if (bdslot == PROTECT)
2120 nop();
2121}
2122
2123
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002124void MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L) {
2125 DCHECK(L == nullptr || offset == 0);
2126 offset = GetOffset(offset, L, OffsetSize::kOffset26);
2127 bc(offset);
2128}
Steve Block44f0eee2011-05-26 01:26:41 +01002129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002130
2131void MacroAssembler::BranchShort(int32_t offset, BranchDelaySlot bdslot) {
2132 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2133 DCHECK(is_int26(offset));
2134 BranchShortHelperR6(offset, nullptr);
Steve Block44f0eee2011-05-26 01:26:41 +01002135 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002136 DCHECK(is_int16(offset));
2137 BranchShortHelper(offset, nullptr, bdslot);
Andrei Popescu31002712010-02-23 13:46:05 +00002138 }
2139}
2140
2141
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002142void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002143 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2144 BranchShortHelperR6(0, L);
2145 } else {
2146 BranchShortHelper(0, L, bdslot);
2147 }
Andrei Popescu31002712010-02-23 13:46:05 +00002148}
2149
2150
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002151static inline bool IsZero(const Operand& rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01002152 if (rt.is_reg()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002153 return rt.rm().is(zero_reg);
2154 } else {
2155 return rt.immediate() == 0;
2156 }
2157}
2158
2159
2160int32_t MacroAssembler::GetOffset(int32_t offset, Label* L, OffsetSize bits) {
2161 if (L) {
2162 offset = branch_offset_helper(L, bits) >> 2;
2163 } else {
2164 DCHECK(is_intn(offset, bits));
2165 }
2166 return offset;
2167}
2168
2169
2170Register MacroAssembler::GetRtAsRegisterHelper(const Operand& rt,
2171 Register scratch) {
2172 Register r2 = no_reg;
2173 if (rt.is_reg()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002174 r2 = rt.rm_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002175 } else {
2176 r2 = scratch;
2177 li(r2, rt);
2178 }
2179
2180 return r2;
2181}
2182
2183
2184bool MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L,
2185 Condition cond, Register rs,
2186 const Operand& rt) {
2187 DCHECK(L == nullptr || offset == 0);
2188 Register scratch = rs.is(at) ? t8 : at;
2189 OffsetSize bits = OffsetSize::kOffset16;
2190
2191 // Be careful to always use shifted_branch_offset only just before the
2192 // branch instruction, as the location will be remember for patching the
2193 // target.
2194 {
2195 BlockTrampolinePoolScope block_trampoline_pool(this);
Steve Block44f0eee2011-05-26 01:26:41 +01002196 switch (cond) {
2197 case cc_always:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002198 bits = OffsetSize::kOffset26;
2199 if (!is_near(L, bits)) return false;
2200 offset = GetOffset(offset, L, bits);
2201 bc(offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002202 break;
2203 case eq:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002204 if (rs.code() == rt.rm_.reg_code) {
2205 // Pre R6 beq is used here to make the code patchable. Otherwise bc
2206 // should be used which has no condition field so is not patchable.
2207 bits = OffsetSize::kOffset16;
2208 if (!is_near(L, bits)) return false;
2209 scratch = GetRtAsRegisterHelper(rt, scratch);
2210 offset = GetOffset(offset, L, bits);
2211 beq(rs, scratch, offset);
2212 nop();
2213 } else if (IsZero(rt)) {
2214 bits = OffsetSize::kOffset21;
2215 if (!is_near(L, bits)) return false;
2216 offset = GetOffset(offset, L, bits);
2217 beqzc(rs, offset);
2218 } else {
2219 // We don't want any other register but scratch clobbered.
2220 bits = OffsetSize::kOffset16;
2221 if (!is_near(L, bits)) return false;
2222 scratch = GetRtAsRegisterHelper(rt, scratch);
2223 offset = GetOffset(offset, L, bits);
2224 beqc(rs, scratch, offset);
2225 }
Steve Block44f0eee2011-05-26 01:26:41 +01002226 break;
2227 case ne:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002228 if (rs.code() == rt.rm_.reg_code) {
2229 // Pre R6 bne is used here to make the code patchable. Otherwise we
2230 // should not generate any instruction.
2231 bits = OffsetSize::kOffset16;
2232 if (!is_near(L, bits)) return false;
2233 scratch = GetRtAsRegisterHelper(rt, scratch);
2234 offset = GetOffset(offset, L, bits);
2235 bne(rs, scratch, offset);
2236 nop();
2237 } else if (IsZero(rt)) {
2238 bits = OffsetSize::kOffset21;
2239 if (!is_near(L, bits)) return false;
2240 offset = GetOffset(offset, L, bits);
2241 bnezc(rs, offset);
2242 } else {
2243 // We don't want any other register but scratch clobbered.
2244 bits = OffsetSize::kOffset16;
2245 if (!is_near(L, bits)) return false;
2246 scratch = GetRtAsRegisterHelper(rt, scratch);
2247 offset = GetOffset(offset, L, bits);
2248 bnec(rs, scratch, offset);
2249 }
Steve Block44f0eee2011-05-26 01:26:41 +01002250 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251
Ben Murdoch257744e2011-11-30 15:57:28 +00002252 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01002253 case greater:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002254 // rs > rt
2255 if (rs.code() == rt.rm_.reg_code) {
2256 break; // No code needs to be emitted.
2257 } else if (rs.is(zero_reg)) {
2258 bits = OffsetSize::kOffset16;
2259 if (!is_near(L, bits)) return false;
2260 scratch = GetRtAsRegisterHelper(rt, scratch);
2261 offset = GetOffset(offset, L, bits);
2262 bltzc(scratch, offset);
2263 } else if (IsZero(rt)) {
2264 bits = OffsetSize::kOffset16;
2265 if (!is_near(L, bits)) return false;
2266 offset = GetOffset(offset, L, bits);
2267 bgtzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002268 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 bits = OffsetSize::kOffset16;
2270 if (!is_near(L, bits)) return false;
2271 scratch = GetRtAsRegisterHelper(rt, scratch);
2272 DCHECK(!rs.is(scratch));
2273 offset = GetOffset(offset, L, bits);
2274 bltc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002275 }
2276 break;
2277 case greater_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002278 // rs >= rt
2279 if (rs.code() == rt.rm_.reg_code) {
2280 bits = OffsetSize::kOffset26;
2281 if (!is_near(L, bits)) return false;
2282 offset = GetOffset(offset, L, bits);
2283 bc(offset);
2284 } else if (rs.is(zero_reg)) {
2285 bits = OffsetSize::kOffset16;
2286 if (!is_near(L, bits)) return false;
2287 scratch = GetRtAsRegisterHelper(rt, scratch);
2288 offset = GetOffset(offset, L, bits);
2289 blezc(scratch, offset);
2290 } else if (IsZero(rt)) {
2291 bits = OffsetSize::kOffset16;
2292 if (!is_near(L, bits)) return false;
2293 offset = GetOffset(offset, L, bits);
2294 bgezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002295 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002296 bits = OffsetSize::kOffset16;
2297 if (!is_near(L, bits)) return false;
2298 scratch = GetRtAsRegisterHelper(rt, scratch);
2299 DCHECK(!rs.is(scratch));
2300 offset = GetOffset(offset, L, bits);
2301 bgec(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002302 }
2303 break;
2304 case less:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002305 // rs < rt
2306 if (rs.code() == rt.rm_.reg_code) {
2307 break; // No code needs to be emitted.
2308 } else if (rs.is(zero_reg)) {
2309 bits = OffsetSize::kOffset16;
2310 if (!is_near(L, bits)) return false;
2311 scratch = GetRtAsRegisterHelper(rt, scratch);
2312 offset = GetOffset(offset, L, bits);
2313 bgtzc(scratch, offset);
2314 } else if (IsZero(rt)) {
2315 bits = OffsetSize::kOffset16;
2316 if (!is_near(L, bits)) return false;
2317 offset = GetOffset(offset, L, bits);
2318 bltzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002319 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002320 bits = OffsetSize::kOffset16;
2321 if (!is_near(L, bits)) return false;
2322 scratch = GetRtAsRegisterHelper(rt, scratch);
2323 DCHECK(!rs.is(scratch));
2324 offset = GetOffset(offset, L, bits);
2325 bltc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002326 }
2327 break;
2328 case less_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002329 // rs <= rt
2330 if (rs.code() == rt.rm_.reg_code) {
2331 bits = OffsetSize::kOffset26;
2332 if (!is_near(L, bits)) return false;
2333 offset = GetOffset(offset, L, bits);
2334 bc(offset);
2335 } else if (rs.is(zero_reg)) {
2336 bits = OffsetSize::kOffset16;
2337 if (!is_near(L, bits)) return false;
2338 scratch = GetRtAsRegisterHelper(rt, scratch);
2339 offset = GetOffset(offset, L, bits);
2340 bgezc(scratch, offset);
2341 } else if (IsZero(rt)) {
2342 bits = OffsetSize::kOffset16;
2343 if (!is_near(L, bits)) return false;
2344 offset = GetOffset(offset, L, bits);
2345 blezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002346 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002347 bits = OffsetSize::kOffset16;
2348 if (!is_near(L, bits)) return false;
2349 scratch = GetRtAsRegisterHelper(rt, scratch);
2350 DCHECK(!rs.is(scratch));
2351 offset = GetOffset(offset, L, bits);
2352 bgec(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002353 }
2354 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002355
Steve Block44f0eee2011-05-26 01:26:41 +01002356 // Unsigned comparison.
2357 case Ugreater:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002358 // rs > rt
2359 if (rs.code() == rt.rm_.reg_code) {
2360 break; // No code needs to be emitted.
2361 } else if (rs.is(zero_reg)) {
2362 bits = OffsetSize::kOffset21;
2363 if (!is_near(L, bits)) return false;
2364 scratch = GetRtAsRegisterHelper(rt, scratch);
2365 offset = GetOffset(offset, L, bits);
2366 bnezc(scratch, offset);
2367 } else if (IsZero(rt)) {
2368 bits = OffsetSize::kOffset21;
2369 if (!is_near(L, bits)) return false;
2370 offset = GetOffset(offset, L, bits);
2371 bnezc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002372 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002373 bits = OffsetSize::kOffset16;
2374 if (!is_near(L, bits)) return false;
2375 scratch = GetRtAsRegisterHelper(rt, scratch);
2376 DCHECK(!rs.is(scratch));
2377 offset = GetOffset(offset, L, bits);
2378 bltuc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002379 }
2380 break;
2381 case Ugreater_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002382 // rs >= rt
2383 if (rs.code() == rt.rm_.reg_code) {
2384 bits = OffsetSize::kOffset26;
2385 if (!is_near(L, bits)) return false;
2386 offset = GetOffset(offset, L, bits);
2387 bc(offset);
2388 } else if (rs.is(zero_reg)) {
2389 bits = OffsetSize::kOffset21;
2390 if (!is_near(L, bits)) return false;
2391 scratch = GetRtAsRegisterHelper(rt, scratch);
2392 offset = GetOffset(offset, L, bits);
2393 beqzc(scratch, offset);
2394 } else if (IsZero(rt)) {
2395 bits = OffsetSize::kOffset26;
2396 if (!is_near(L, bits)) return false;
2397 offset = GetOffset(offset, L, bits);
2398 bc(offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002399 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002400 bits = OffsetSize::kOffset16;
2401 if (!is_near(L, bits)) return false;
2402 scratch = GetRtAsRegisterHelper(rt, scratch);
2403 DCHECK(!rs.is(scratch));
2404 offset = GetOffset(offset, L, bits);
2405 bgeuc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002406 }
2407 break;
2408 case Uless:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002409 // rs < rt
2410 if (rs.code() == rt.rm_.reg_code) {
2411 break; // No code needs to be emitted.
2412 } else if (rs.is(zero_reg)) {
2413 bits = OffsetSize::kOffset21;
2414 if (!is_near(L, bits)) return false;
2415 scratch = GetRtAsRegisterHelper(rt, scratch);
2416 offset = GetOffset(offset, L, bits);
2417 bnezc(scratch, offset);
2418 } else if (IsZero(rt)) {
2419 break; // No code needs to be emitted.
Steve Block44f0eee2011-05-26 01:26:41 +01002420 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002421 bits = OffsetSize::kOffset16;
2422 if (!is_near(L, bits)) return false;
2423 scratch = GetRtAsRegisterHelper(rt, scratch);
2424 DCHECK(!rs.is(scratch));
2425 offset = GetOffset(offset, L, bits);
2426 bltuc(rs, scratch, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002427 }
2428 break;
2429 case Uless_equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002430 // rs <= rt
2431 if (rs.code() == rt.rm_.reg_code) {
2432 bits = OffsetSize::kOffset26;
2433 if (!is_near(L, bits)) return false;
2434 offset = GetOffset(offset, L, bits);
2435 bc(offset);
2436 } else if (rs.is(zero_reg)) {
2437 bits = OffsetSize::kOffset26;
2438 if (!is_near(L, bits)) return false;
2439 scratch = GetRtAsRegisterHelper(rt, scratch);
2440 offset = GetOffset(offset, L, bits);
2441 bc(offset);
2442 } else if (IsZero(rt)) {
2443 bits = OffsetSize::kOffset21;
2444 if (!is_near(L, bits)) return false;
2445 offset = GetOffset(offset, L, bits);
2446 beqzc(rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002447 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002448 bits = OffsetSize::kOffset16;
2449 if (!is_near(L, bits)) return false;
2450 scratch = GetRtAsRegisterHelper(rt, scratch);
2451 DCHECK(!rs.is(scratch));
2452 offset = GetOffset(offset, L, bits);
2453 bgeuc(scratch, rs, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002454 }
2455 break;
2456 default:
2457 UNREACHABLE();
2458 }
2459 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460 CheckTrampolinePoolQuick(1);
2461 return true;
Steve Block44f0eee2011-05-26 01:26:41 +01002462}
2463
2464
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002465bool MacroAssembler::BranchShortHelper(int16_t offset, Label* L, Condition cond,
2466 Register rs, const Operand& rt,
2467 BranchDelaySlot bdslot) {
2468 DCHECK(L == nullptr || offset == 0);
2469 if (!is_near(L, OffsetSize::kOffset16)) return false;
2470
2471 Register scratch = at;
2472 int32_t offset32;
2473
2474 // Be careful to always use shifted_branch_offset only just before the
2475 // branch instruction, as the location will be remember for patching the
2476 // target.
2477 {
2478 BlockTrampolinePoolScope block_trampoline_pool(this);
2479 switch (cond) {
2480 case cc_always:
2481 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2482 b(offset32);
2483 break;
2484 case eq:
2485 if (IsZero(rt)) {
2486 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2487 beq(rs, zero_reg, offset32);
2488 } else {
2489 // We don't want any other register but scratch clobbered.
2490 scratch = GetRtAsRegisterHelper(rt, scratch);
2491 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2492 beq(rs, scratch, offset32);
2493 }
2494 break;
2495 case ne:
2496 if (IsZero(rt)) {
2497 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2498 bne(rs, zero_reg, offset32);
2499 } else {
2500 // We don't want any other register but scratch clobbered.
2501 scratch = GetRtAsRegisterHelper(rt, scratch);
2502 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2503 bne(rs, scratch, offset32);
2504 }
2505 break;
2506
2507 // Signed comparison.
2508 case greater:
2509 if (IsZero(rt)) {
2510 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2511 bgtz(rs, offset32);
2512 } else {
2513 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2514 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2515 bne(scratch, zero_reg, offset32);
2516 }
2517 break;
2518 case greater_equal:
2519 if (IsZero(rt)) {
2520 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2521 bgez(rs, offset32);
2522 } else {
2523 Slt(scratch, rs, rt);
2524 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2525 beq(scratch, zero_reg, offset32);
2526 }
2527 break;
2528 case less:
2529 if (IsZero(rt)) {
2530 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2531 bltz(rs, offset32);
2532 } else {
2533 Slt(scratch, rs, rt);
2534 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2535 bne(scratch, zero_reg, offset32);
2536 }
2537 break;
2538 case less_equal:
2539 if (IsZero(rt)) {
2540 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2541 blez(rs, offset32);
2542 } else {
2543 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2544 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2545 beq(scratch, zero_reg, offset32);
2546 }
2547 break;
2548
2549 // Unsigned comparison.
2550 case Ugreater:
2551 if (IsZero(rt)) {
2552 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2553 bne(rs, zero_reg, offset32);
2554 } else {
2555 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2556 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2557 bne(scratch, zero_reg, offset32);
2558 }
2559 break;
2560 case Ugreater_equal:
2561 if (IsZero(rt)) {
2562 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2563 b(offset32);
2564 } else {
2565 Sltu(scratch, rs, rt);
2566 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2567 beq(scratch, zero_reg, offset32);
2568 }
2569 break;
2570 case Uless:
2571 if (IsZero(rt)) {
2572 return true; // No code needs to be emitted.
2573 } else {
2574 Sltu(scratch, rs, rt);
2575 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2576 bne(scratch, zero_reg, offset32);
2577 }
2578 break;
2579 case Uless_equal:
2580 if (IsZero(rt)) {
2581 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2582 beq(rs, zero_reg, offset32);
2583 } else {
2584 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2585 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2586 beq(scratch, zero_reg, offset32);
2587 }
2588 break;
2589 default:
2590 UNREACHABLE();
2591 }
2592 }
2593 // Emit a nop in the branch delay slot if required.
2594 if (bdslot == PROTECT)
2595 nop();
2596
2597 return true;
2598}
2599
2600
2601bool MacroAssembler::BranchShortCheck(int32_t offset, Label* L, Condition cond,
2602 Register rs, const Operand& rt,
2603 BranchDelaySlot bdslot) {
2604 BRANCH_ARGS_CHECK(cond, rs, rt);
2605
2606 if (!L) {
2607 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2608 DCHECK(is_int26(offset));
2609 return BranchShortHelperR6(offset, nullptr, cond, rs, rt);
2610 } else {
2611 DCHECK(is_int16(offset));
2612 return BranchShortHelper(offset, nullptr, cond, rs, rt, bdslot);
2613 }
2614 } else {
2615 DCHECK(offset == 0);
2616 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2617 return BranchShortHelperR6(0, L, cond, rs, rt);
2618 } else {
2619 return BranchShortHelper(0, L, cond, rs, rt, bdslot);
2620 }
2621 }
2622 return false;
2623}
2624
2625
2626void MacroAssembler::BranchShort(int32_t offset, Condition cond, Register rs,
2627 const Operand& rt, BranchDelaySlot bdslot) {
2628 BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2629}
2630
2631
2632void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs,
2633 const Operand& rt, BranchDelaySlot bdslot) {
2634 BranchShortCheck(0, L, cond, rs, rt, bdslot);
2635}
2636
2637
2638void MacroAssembler::BranchAndLink(int32_t offset, BranchDelaySlot bdslot) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002639 BranchAndLinkShort(offset, bdslot);
2640}
2641
2642
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002643void MacroAssembler::BranchAndLink(int32_t offset, Condition cond, Register rs,
2644 const Operand& rt, BranchDelaySlot bdslot) {
2645 bool is_near = BranchAndLinkShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2646 DCHECK(is_near);
2647 USE(is_near);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002648}
2649
2650
2651void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002652 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002653 if (is_near_branch(L)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002654 BranchAndLinkShort(L, bdslot);
2655 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002656 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002657 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002658 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002659 if (is_trampoline_emitted()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002660 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002661 } else {
2662 BranchAndLinkShort(L, bdslot);
2663 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002664 }
2665}
2666
2667
2668void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs,
2669 const Operand& rt,
2670 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002671 if (L->is_bound()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002672 if (!BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002673 Label skip;
2674 Condition neg_cond = NegateCondition(cond);
2675 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002676 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002677 bind(&skip);
2678 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002679 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002680 if (is_trampoline_emitted()) {
2681 Label skip;
2682 Condition neg_cond = NegateCondition(cond);
2683 BranchShort(&skip, neg_cond, rs, rt);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002684 BranchAndLinkLong(L, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002685 bind(&skip);
2686 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002687 BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002688 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002689 }
2690}
2691
2692
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002693void MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
2694 BranchDelaySlot bdslot) {
2695 DCHECK(L == nullptr || offset == 0);
2696 offset = GetOffset(offset, L, OffsetSize::kOffset16);
Steve Block44f0eee2011-05-26 01:26:41 +01002697 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00002698
Steve Block44f0eee2011-05-26 01:26:41 +01002699 // Emit a nop in the branch delay slot if required.
2700 if (bdslot == PROTECT)
2701 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00002702}
2703
2704
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002705void MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L) {
2706 DCHECK(L == nullptr || offset == 0);
2707 offset = GetOffset(offset, L, OffsetSize::kOffset26);
2708 balc(offset);
2709}
2710
2711
2712void MacroAssembler::BranchAndLinkShort(int32_t offset,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002713 BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002714 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2715 DCHECK(is_int26(offset));
2716 BranchAndLinkShortHelperR6(offset, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002717 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002718 DCHECK(is_int16(offset));
2719 BranchAndLinkShortHelper(offset, nullptr, bdslot);
Andrei Popescu31002712010-02-23 13:46:05 +00002720 }
Steve Block44f0eee2011-05-26 01:26:41 +01002721}
2722
2723
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002724void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002725 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2726 BranchAndLinkShortHelperR6(0, L);
2727 } else {
2728 BranchAndLinkShortHelper(0, L, bdslot);
2729 }
Steve Block44f0eee2011-05-26 01:26:41 +01002730}
2731
2732
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002733bool MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L,
2734 Condition cond, Register rs,
2735 const Operand& rt) {
2736 DCHECK(L == nullptr || offset == 0);
2737 Register scratch = rs.is(at) ? t8 : at;
2738 OffsetSize bits = OffsetSize::kOffset16;
Steve Block44f0eee2011-05-26 01:26:41 +01002739
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002740 BlockTrampolinePoolScope block_trampoline_pool(this);
2741 DCHECK((cond == cc_always && is_int26(offset)) || is_int16(offset));
2742 switch (cond) {
2743 case cc_always:
2744 bits = OffsetSize::kOffset26;
2745 if (!is_near(L, bits)) return false;
2746 offset = GetOffset(offset, L, bits);
2747 balc(offset);
2748 break;
2749 case eq:
2750 if (!is_near(L, bits)) return false;
2751 Subu(scratch, rs, rt);
2752 offset = GetOffset(offset, L, bits);
2753 beqzalc(scratch, offset);
2754 break;
2755 case ne:
2756 if (!is_near(L, bits)) return false;
2757 Subu(scratch, rs, rt);
2758 offset = GetOffset(offset, L, bits);
2759 bnezalc(scratch, offset);
2760 break;
2761
2762 // Signed comparison.
2763 case greater:
2764 // rs > rt
2765 if (rs.code() == rt.rm_.reg_code) {
2766 break; // No code needs to be emitted.
2767 } else if (rs.is(zero_reg)) {
2768 if (!is_near(L, bits)) return false;
2769 scratch = GetRtAsRegisterHelper(rt, scratch);
2770 offset = GetOffset(offset, L, bits);
2771 bltzalc(scratch, offset);
2772 } else if (IsZero(rt)) {
2773 if (!is_near(L, bits)) return false;
2774 offset = GetOffset(offset, L, bits);
2775 bgtzalc(rs, offset);
2776 } else {
2777 if (!is_near(L, bits)) return false;
2778 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2779 offset = GetOffset(offset, L, bits);
2780 bnezalc(scratch, offset);
2781 }
2782 break;
2783 case greater_equal:
2784 // rs >= rt
2785 if (rs.code() == rt.rm_.reg_code) {
2786 bits = OffsetSize::kOffset26;
2787 if (!is_near(L, bits)) return false;
2788 offset = GetOffset(offset, L, bits);
2789 balc(offset);
2790 } else if (rs.is(zero_reg)) {
2791 if (!is_near(L, bits)) return false;
2792 scratch = GetRtAsRegisterHelper(rt, scratch);
2793 offset = GetOffset(offset, L, bits);
2794 blezalc(scratch, offset);
2795 } else if (IsZero(rt)) {
2796 if (!is_near(L, bits)) return false;
2797 offset = GetOffset(offset, L, bits);
2798 bgezalc(rs, offset);
2799 } else {
2800 if (!is_near(L, bits)) return false;
2801 Slt(scratch, rs, rt);
2802 offset = GetOffset(offset, L, bits);
2803 beqzalc(scratch, offset);
2804 }
2805 break;
2806 case less:
2807 // rs < rt
2808 if (rs.code() == rt.rm_.reg_code) {
2809 break; // No code needs to be emitted.
2810 } else if (rs.is(zero_reg)) {
2811 if (!is_near(L, bits)) return false;
2812 scratch = GetRtAsRegisterHelper(rt, scratch);
2813 offset = GetOffset(offset, L, bits);
2814 bgtzalc(scratch, offset);
2815 } else if (IsZero(rt)) {
2816 if (!is_near(L, bits)) return false;
2817 offset = GetOffset(offset, L, bits);
2818 bltzalc(rs, offset);
2819 } else {
2820 if (!is_near(L, bits)) return false;
2821 Slt(scratch, rs, rt);
2822 offset = GetOffset(offset, L, bits);
2823 bnezalc(scratch, offset);
2824 }
2825 break;
2826 case less_equal:
2827 // rs <= r2
2828 if (rs.code() == rt.rm_.reg_code) {
2829 bits = OffsetSize::kOffset26;
2830 if (!is_near(L, bits)) return false;
2831 offset = GetOffset(offset, L, bits);
2832 balc(offset);
2833 } else if (rs.is(zero_reg)) {
2834 if (!is_near(L, bits)) return false;
2835 scratch = GetRtAsRegisterHelper(rt, scratch);
2836 offset = GetOffset(offset, L, bits);
2837 bgezalc(scratch, offset);
2838 } else if (IsZero(rt)) {
2839 if (!is_near(L, bits)) return false;
2840 offset = GetOffset(offset, L, bits);
2841 blezalc(rs, offset);
2842 } else {
2843 if (!is_near(L, bits)) return false;
2844 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2845 offset = GetOffset(offset, L, bits);
2846 beqzalc(scratch, offset);
2847 }
2848 break;
2849
2850
2851 // Unsigned comparison.
2852 case Ugreater:
2853 // rs > r2
2854 if (!is_near(L, bits)) return false;
2855 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2856 offset = GetOffset(offset, L, bits);
2857 bnezalc(scratch, offset);
2858 break;
2859 case Ugreater_equal:
2860 // rs >= r2
2861 if (!is_near(L, bits)) return false;
2862 Sltu(scratch, rs, rt);
2863 offset = GetOffset(offset, L, bits);
2864 beqzalc(scratch, offset);
2865 break;
2866 case Uless:
2867 // rs < r2
2868 if (!is_near(L, bits)) return false;
2869 Sltu(scratch, rs, rt);
2870 offset = GetOffset(offset, L, bits);
2871 bnezalc(scratch, offset);
2872 break;
2873 case Uless_equal:
2874 // rs <= r2
2875 if (!is_near(L, bits)) return false;
2876 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2877 offset = GetOffset(offset, L, bits);
2878 beqzalc(scratch, offset);
2879 break;
2880 default:
2881 UNREACHABLE();
Steve Block44f0eee2011-05-26 01:26:41 +01002882 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002883 return true;
2884}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002885
2886
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002887// Pre r6 we need to use a bgezal or bltzal, but they can't be used directly
2888// with the slt instructions. We could use sub or add instead but we would miss
2889// overflow cases, so we keep slt and add an intermediate third instruction.
2890bool MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
2891 Condition cond, Register rs,
2892 const Operand& rt,
2893 BranchDelaySlot bdslot) {
2894 DCHECK(L == nullptr || offset == 0);
2895 if (!is_near(L, OffsetSize::kOffset16)) return false;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002897 Register scratch = t8;
2898 BlockTrampolinePoolScope block_trampoline_pool(this);
2899
2900 switch (cond) {
2901 case cc_always:
2902 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2903 bal(offset);
2904 break;
2905 case eq:
2906 bne(rs, GetRtAsRegisterHelper(rt, scratch), 2);
2907 nop();
2908 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2909 bal(offset);
2910 break;
2911 case ne:
2912 beq(rs, GetRtAsRegisterHelper(rt, scratch), 2);
2913 nop();
2914 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2915 bal(offset);
2916 break;
2917
2918 // Signed comparison.
2919 case greater:
2920 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2921 addiu(scratch, scratch, -1);
2922 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2923 bgezal(scratch, offset);
2924 break;
2925 case greater_equal:
2926 Slt(scratch, rs, rt);
2927 addiu(scratch, scratch, -1);
2928 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2929 bltzal(scratch, offset);
2930 break;
2931 case less:
2932 Slt(scratch, rs, rt);
2933 addiu(scratch, scratch, -1);
2934 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2935 bgezal(scratch, offset);
2936 break;
2937 case less_equal:
2938 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2939 addiu(scratch, scratch, -1);
2940 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2941 bltzal(scratch, offset);
2942 break;
2943
2944 // Unsigned comparison.
2945 case Ugreater:
2946 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2947 addiu(scratch, scratch, -1);
2948 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2949 bgezal(scratch, offset);
2950 break;
2951 case Ugreater_equal:
2952 Sltu(scratch, rs, rt);
2953 addiu(scratch, scratch, -1);
2954 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2955 bltzal(scratch, offset);
2956 break;
2957 case Uless:
2958 Sltu(scratch, rs, rt);
2959 addiu(scratch, scratch, -1);
2960 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2961 bgezal(scratch, offset);
2962 break;
2963 case Uless_equal:
2964 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2965 addiu(scratch, scratch, -1);
2966 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2967 bltzal(scratch, offset);
2968 break;
2969
2970 default:
2971 UNREACHABLE();
Steve Block44f0eee2011-05-26 01:26:41 +01002972 }
2973
Steve Block44f0eee2011-05-26 01:26:41 +01002974 // Emit a nop in the branch delay slot if required.
2975 if (bdslot == PROTECT)
2976 nop();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002977
2978 return true;
2979}
2980
2981
2982bool MacroAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
2983 Condition cond, Register rs,
2984 const Operand& rt,
2985 BranchDelaySlot bdslot) {
2986 BRANCH_ARGS_CHECK(cond, rs, rt);
2987
2988 if (!L) {
2989 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2990 DCHECK(is_int26(offset));
2991 return BranchAndLinkShortHelperR6(offset, nullptr, cond, rs, rt);
2992 } else {
2993 DCHECK(is_int16(offset));
2994 return BranchAndLinkShortHelper(offset, nullptr, cond, rs, rt, bdslot);
2995 }
2996 } else {
2997 DCHECK(offset == 0);
2998 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) {
2999 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt);
3000 } else {
3001 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot);
3002 }
3003 }
3004 return false;
Steve Block44f0eee2011-05-26 01:26:41 +01003005}
3006
3007
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003008void MacroAssembler::Jump(Register target,
Steve Block44f0eee2011-05-26 01:26:41 +01003009 Condition cond,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003010 Register rs,
3011 const Operand& rt,
3012 BranchDelaySlot bd) {
3013 BlockTrampolinePoolScope block_trampoline_pool(this);
3014 if (cond == cc_always) {
3015 jr(target);
3016 } else {
3017 BRANCH_ARGS_CHECK(cond, rs, rt);
3018 Branch(2, NegateCondition(cond), rs, rt);
3019 jr(target);
3020 }
3021 // Emit a nop in the branch delay slot if required.
3022 if (bd == PROTECT)
3023 nop();
3024}
3025
3026
3027void MacroAssembler::Jump(intptr_t target,
3028 RelocInfo::Mode rmode,
3029 Condition cond,
3030 Register rs,
3031 const Operand& rt,
3032 BranchDelaySlot bd) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003033 Label skip;
3034 if (cond != cc_always) {
3035 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt);
3036 }
3037 // The first instruction of 'li' may be placed in the delay slot.
3038 // This is not an issue, t9 is expected to be clobbered anyway.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003039 li(t9, Operand(target, rmode));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003040 Jump(t9, al, zero_reg, Operand(zero_reg), bd);
3041 bind(&skip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003042}
3043
3044
3045void MacroAssembler::Jump(Address target,
3046 RelocInfo::Mode rmode,
3047 Condition cond,
3048 Register rs,
3049 const Operand& rt,
3050 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003051 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003052 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
3053}
3054
3055
3056void MacroAssembler::Jump(Handle<Code> code,
3057 RelocInfo::Mode rmode,
3058 Condition cond,
3059 Register rs,
3060 const Operand& rt,
3061 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003062 DCHECK(RelocInfo::IsCodeTarget(rmode));
3063 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003064 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
3065}
3066
3067
3068int MacroAssembler::CallSize(Register target,
3069 Condition cond,
3070 Register rs,
3071 const Operand& rt,
3072 BranchDelaySlot bd) {
3073 int size = 0;
3074
3075 if (cond == cc_always) {
3076 size += 1;
3077 } else {
3078 size += 3;
Steve Block44f0eee2011-05-26 01:26:41 +01003079 }
3080
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003081 if (bd == PROTECT)
3082 size += 1;
Steve Block44f0eee2011-05-26 01:26:41 +01003083
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003084 return size * kInstrSize;
3085}
Steve Block44f0eee2011-05-26 01:26:41 +01003086
Steve Block44f0eee2011-05-26 01:26:41 +01003087
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003088// Note: To call gcc-compiled C code on mips, you must call thru t9.
3089void MacroAssembler::Call(Register target,
3090 Condition cond,
3091 Register rs,
3092 const Operand& rt,
3093 BranchDelaySlot bd) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003094#ifdef DEBUG
3095 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0;
3096#endif
3097
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003098 BlockTrampolinePoolScope block_trampoline_pool(this);
3099 Label start;
3100 bind(&start);
3101 if (cond == cc_always) {
3102 jalr(target);
3103 } else {
3104 BRANCH_ARGS_CHECK(cond, rs, rt);
3105 Branch(2, NegateCondition(cond), rs, rt);
3106 jalr(target);
Steve Block44f0eee2011-05-26 01:26:41 +01003107 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003108 // Emit a nop in the branch delay slot if required.
3109 if (bd == PROTECT)
3110 nop();
3111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003112#ifdef DEBUG
3113 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd),
3114 SizeOfCodeGeneratedSince(&start));
3115#endif
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003116}
3117
3118
3119int MacroAssembler::CallSize(Address target,
3120 RelocInfo::Mode rmode,
3121 Condition cond,
3122 Register rs,
3123 const Operand& rt,
3124 BranchDelaySlot bd) {
3125 int size = CallSize(t9, cond, rs, rt, bd);
3126 return size + 2 * kInstrSize;
3127}
3128
3129
3130void MacroAssembler::Call(Address target,
3131 RelocInfo::Mode rmode,
3132 Condition cond,
3133 Register rs,
3134 const Operand& rt,
3135 BranchDelaySlot bd) {
3136 BlockTrampolinePoolScope block_trampoline_pool(this);
3137 Label start;
3138 bind(&start);
3139 int32_t target_int = reinterpret_cast<int32_t>(target);
3140 // Must record previous source positions before the
3141 // li() generates a new code target.
3142 positions_recorder()->WriteRecordedPositions();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003143 li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003144 Call(t9, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003145 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003146 SizeOfCodeGeneratedSince(&start));
3147}
3148
3149
3150int MacroAssembler::CallSize(Handle<Code> code,
3151 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003152 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003153 Condition cond,
3154 Register rs,
3155 const Operand& rt,
3156 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003157 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003158 return CallSize(reinterpret_cast<Address>(code.location()),
3159 rmode, cond, rs, rt, bd);
3160}
3161
3162
3163void MacroAssembler::Call(Handle<Code> code,
3164 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003165 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003166 Condition cond,
3167 Register rs,
3168 const Operand& rt,
3169 BranchDelaySlot bd) {
3170 BlockTrampolinePoolScope block_trampoline_pool(this);
3171 Label start;
3172 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003173 DCHECK(RelocInfo::IsCodeTarget(rmode));
3174 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003175 SetRecordedAstId(ast_id);
3176 rmode = RelocInfo::CODE_TARGET_WITH_ID;
3177 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003178 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003179 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003180 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003181 SizeOfCodeGeneratedSince(&start));
3182}
3183
3184
3185void MacroAssembler::Ret(Condition cond,
3186 Register rs,
3187 const Operand& rt,
3188 BranchDelaySlot bd) {
3189 Jump(ra, cond, rs, rt, bd);
3190}
3191
3192
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003193void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) {
3194 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT &&
3195 (!L->is_bound() || is_near_r6(L))) {
3196 BranchShortHelperR6(0, L);
3197 } else {
3198 BlockTrampolinePoolScope block_trampoline_pool(this);
3199 uint32_t imm32;
3200 imm32 = jump_address(L);
3201 {
3202 BlockGrowBufferScope block_buf_growth(this);
3203 // Buffer growth (and relocation) must be blocked for internal references
3204 // until associated instructions are emitted and available to be patched.
3205 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3206 lui(at, (imm32 & kHiMask) >> kLuiShift);
3207 ori(at, at, (imm32 & kImm16Mask));
3208 }
3209 jr(at);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003210
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003211 // Emit a nop in the branch delay slot if required.
3212 if (bdslot == PROTECT) nop();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003213 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003214}
3215
3216
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003217void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) {
3218 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT &&
3219 (!L->is_bound() || is_near_r6(L))) {
3220 BranchAndLinkShortHelperR6(0, L);
3221 } else {
3222 BlockTrampolinePoolScope block_trampoline_pool(this);
3223 uint32_t imm32;
3224 imm32 = jump_address(L);
3225 {
3226 BlockGrowBufferScope block_buf_growth(this);
3227 // Buffer growth (and relocation) must be blocked for internal references
3228 // until associated instructions are emitted and available to be patched.
3229 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3230 lui(at, (imm32 & kHiMask) >> kLuiShift);
3231 ori(at, at, (imm32 & kImm16Mask));
3232 }
3233 jalr(at);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003234
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003235 // Emit a nop in the branch delay slot if required.
3236 if (bdslot == PROTECT) nop();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003237 }
Steve Block44f0eee2011-05-26 01:26:41 +01003238}
3239
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003240
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003241void MacroAssembler::DropAndRet(int drop) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003242 DCHECK(is_int16(drop * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003243 Ret(USE_DELAY_SLOT);
3244 addiu(sp, sp, drop * kPointerSize);
3245}
Steve Block44f0eee2011-05-26 01:26:41 +01003246
3247void MacroAssembler::DropAndRet(int drop,
3248 Condition cond,
3249 Register r1,
3250 const Operand& r2) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003251 // Both Drop and Ret need to be conditional.
Steve Block44f0eee2011-05-26 01:26:41 +01003252 Label skip;
3253 if (cond != cc_always) {
3254 Branch(&skip, NegateCondition(cond), r1, r2);
3255 }
3256
3257 Drop(drop);
3258 Ret();
3259
3260 if (cond != cc_always) {
3261 bind(&skip);
3262 }
3263}
3264
3265
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003266void MacroAssembler::Drop(int count,
3267 Condition cond,
3268 Register reg,
3269 const Operand& op) {
3270 if (count <= 0) {
3271 return;
3272 }
3273
3274 Label skip;
3275
3276 if (cond != al) {
3277 Branch(&skip, NegateCondition(cond), reg, op);
3278 }
3279
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003280 Addu(sp, sp, Operand(count * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003281
3282 if (cond != al) {
3283 bind(&skip);
3284 }
3285}
3286
3287
3288
Steve Block44f0eee2011-05-26 01:26:41 +01003289void MacroAssembler::Swap(Register reg1,
3290 Register reg2,
3291 Register scratch) {
3292 if (scratch.is(no_reg)) {
3293 Xor(reg1, reg1, Operand(reg2));
3294 Xor(reg2, reg2, Operand(reg1));
3295 Xor(reg1, reg1, Operand(reg2));
3296 } else {
3297 mov(scratch, reg1);
3298 mov(reg1, reg2);
3299 mov(reg2, scratch);
3300 }
Andrei Popescu31002712010-02-23 13:46:05 +00003301}
3302
3303
3304void MacroAssembler::Call(Label* target) {
Steve Block44f0eee2011-05-26 01:26:41 +01003305 BranchAndLink(target);
3306}
3307
3308
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003309void MacroAssembler::Push(Handle<Object> handle) {
3310 li(at, Operand(handle));
3311 push(at);
3312}
3313
3314
Steve Block44f0eee2011-05-26 01:26:41 +01003315void MacroAssembler::DebugBreak() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003316 PrepareCEntryArgs(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003317 PrepareCEntryFunction(
3318 ExternalReference(Runtime::kHandleDebuggerStatement, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003319 CEntryStub ces(isolate(), 1);
3320 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003321 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Steve Block44f0eee2011-05-26 01:26:41 +01003322}
3323
Steve Block6ded16b2010-05-10 14:33:55 +01003324
Andrei Popescu31002712010-02-23 13:46:05 +00003325// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00003326// Exception handling.
Andrei Popescu31002712010-02-23 13:46:05 +00003327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003328void MacroAssembler::PushStackHandler() {
Steve Block6ded16b2010-05-10 14:33:55 +01003329 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003330 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003331 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003332
3333 // Link the current handler as the next handler.
3334 li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
3335 lw(t1, MemOperand(t2));
3336 push(t1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003337
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003338 // Set this new handler as the current one.
3339 sw(sp, MemOperand(t2));
Andrei Popescu31002712010-02-23 13:46:05 +00003340}
3341
3342
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003343void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003344 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003345 pop(a1);
3346 Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
Ben Murdoch589d6972011-11-30 16:04:58 +00003347 li(at, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01003348 sw(a1, MemOperand(at));
Andrei Popescu31002712010-02-23 13:46:05 +00003349}
3350
3351
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003352void MacroAssembler::Allocate(int object_size,
3353 Register result,
3354 Register scratch1,
3355 Register scratch2,
3356 Label* gc_required,
3357 AllocationFlags flags) {
3358 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003359 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003360 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003361 // Trash the registers to simulate an allocation failure.
3362 li(result, 0x7091);
3363 li(scratch1, 0x7191);
3364 li(scratch2, 0x7291);
3365 }
3366 jmp(gc_required);
3367 return;
Steve Block6ded16b2010-05-10 14:33:55 +01003368 }
3369
Ben Murdoch097c5b22016-05-18 11:27:45 +01003370 DCHECK(!AreAliased(result, scratch1, scratch2, t9, at));
Steve Block6ded16b2010-05-10 14:33:55 +01003371
Steve Block44f0eee2011-05-26 01:26:41 +01003372 // Make object size into bytes.
3373 if ((flags & SIZE_IN_WORDS) != 0) {
3374 object_size *= kPointerSize;
3375 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003376 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +01003377
Steve Block44f0eee2011-05-26 01:26:41 +01003378 // Check relative positions of allocation top and limit addresses.
3379 // ARM adds additional checks to make sure the ldm instruction can be
3380 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003381 ExternalReference allocation_top =
3382 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3383 ExternalReference allocation_limit =
3384 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
3385
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003386 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
3387 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003388 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003389
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003390 // Set up allocation top address and allocation limit registers.
3391 Register top_address = scratch1;
Steve Block44f0eee2011-05-26 01:26:41 +01003392 // This code stores a temporary value in t9.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003393 Register alloc_limit = t9;
3394 Register result_end = scratch2;
3395 li(top_address, Operand(allocation_top));
3396
Steve Block44f0eee2011-05-26 01:26:41 +01003397 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003398 // Load allocation top into result and allocation limit into alloc_limit.
3399 lw(result, MemOperand(top_address));
3400 lw(alloc_limit, MemOperand(top_address, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003401 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003402 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003403 // Assert that result actually contains top on entry.
3404 lw(alloc_limit, MemOperand(top_address));
3405 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01003406 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003407 // Load allocation limit. Result already contains allocation top.
3408 lw(alloc_limit, MemOperand(top_address, limit - top));
Steve Block44f0eee2011-05-26 01:26:41 +01003409 }
3410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003411 if ((flags & DOUBLE_ALIGNMENT) != 0) {
3412 // Align the next allocation. Storing the filler map without checking top is
3413 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003414 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003415 And(result_end, result, Operand(kDoubleAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003416 Label aligned;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003417 Branch(&aligned, eq, result_end, Operand(zero_reg));
3418 if ((flags & PRETENURE) != 0) {
3419 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003420 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003421 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
3422 sw(result_end, MemOperand(result));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003423 Addu(result, result, Operand(kDoubleSize / 2));
3424 bind(&aligned);
3425 }
3426
Steve Block44f0eee2011-05-26 01:26:41 +01003427 // Calculate new top and bail out if new space is exhausted. Use result
3428 // to calculate the new top.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003429 Addu(result_end, result, Operand(object_size));
3430 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit));
3431 sw(result_end, MemOperand(top_address));
Steve Block44f0eee2011-05-26 01:26:41 +01003432
3433 // Tag object if requested.
3434 if ((flags & TAG_OBJECT) != 0) {
3435 Addu(result, result, Operand(kHeapObjectTag));
3436 }
Steve Block6ded16b2010-05-10 14:33:55 +01003437}
3438
3439
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003440void MacroAssembler::Allocate(Register object_size, Register result,
3441 Register result_end, Register scratch,
3442 Label* gc_required, AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01003443 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003444 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003445 // Trash the registers to simulate an allocation failure.
3446 li(result, 0x7091);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003447 li(scratch, 0x7191);
3448 li(result_end, 0x7291);
Steve Block44f0eee2011-05-26 01:26:41 +01003449 }
3450 jmp(gc_required);
3451 return;
3452 }
3453
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003454 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
3455 // is not specified. Other registers must not overlap.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003456 DCHECK(!AreAliased(object_size, result, scratch, t9, at));
3457 DCHECK(!AreAliased(result_end, result, scratch, t9, at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003458 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Block44f0eee2011-05-26 01:26:41 +01003459
3460 // Check relative positions of allocation top and limit addresses.
3461 // ARM adds additional checks to make sure the ldm instruction can be
3462 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003463 ExternalReference allocation_top =
3464 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3465 ExternalReference allocation_limit =
3466 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003467 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
3468 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003469 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003470
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003471 // Set up allocation top address and allocation limit registers.
3472 Register top_address = scratch;
Steve Block44f0eee2011-05-26 01:26:41 +01003473 // This code stores a temporary value in t9.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003474 Register alloc_limit = t9;
3475 li(top_address, Operand(allocation_top));
3476
Steve Block44f0eee2011-05-26 01:26:41 +01003477 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003478 // Load allocation top into result and allocation limit into alloc_limit.
3479 lw(result, MemOperand(top_address));
3480 lw(alloc_limit, MemOperand(top_address, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003481 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003482 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003483 // Assert that result actually contains top on entry.
3484 lw(alloc_limit, MemOperand(top_address));
3485 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01003486 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003487 // Load allocation limit. Result already contains allocation top.
3488 lw(alloc_limit, MemOperand(top_address, limit - top));
Steve Block44f0eee2011-05-26 01:26:41 +01003489 }
3490
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003491 if ((flags & DOUBLE_ALIGNMENT) != 0) {
3492 // Align the next allocation. Storing the filler map without checking top is
3493 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003494 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003495 And(result_end, result, Operand(kDoubleAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003496 Label aligned;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003497 Branch(&aligned, eq, result_end, Operand(zero_reg));
3498 if ((flags & PRETENURE) != 0) {
3499 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003500 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003501 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
3502 sw(result_end, MemOperand(result));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003503 Addu(result, result, Operand(kDoubleSize / 2));
3504 bind(&aligned);
3505 }
3506
Steve Block44f0eee2011-05-26 01:26:41 +01003507 // Calculate new top and bail out if new space is exhausted. Use result
3508 // to calculate the new top. Object size may be in words so a shift is
3509 // required to get the number of bytes.
3510 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003511 Lsa(result_end, result, object_size, kPointerSizeLog2);
Steve Block44f0eee2011-05-26 01:26:41 +01003512 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003513 Addu(result_end, result, Operand(object_size));
Steve Block44f0eee2011-05-26 01:26:41 +01003514 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003515 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit));
Steve Block44f0eee2011-05-26 01:26:41 +01003516
3517 // Update allocation top. result temporarily holds the new top.
Ben Murdoch257744e2011-11-30 15:57:28 +00003518 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003519 And(alloc_limit, result_end, Operand(kObjectAlignmentMask));
3520 Check(eq, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01003521 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003522 sw(result_end, MemOperand(top_address));
Steve Block44f0eee2011-05-26 01:26:41 +01003523
3524 // Tag object if requested.
3525 if ((flags & TAG_OBJECT) != 0) {
3526 Addu(result, result, Operand(kHeapObjectTag));
3527 }
3528}
3529
3530
Steve Block44f0eee2011-05-26 01:26:41 +01003531void MacroAssembler::AllocateTwoByteString(Register result,
3532 Register length,
3533 Register scratch1,
3534 Register scratch2,
3535 Register scratch3,
3536 Label* gc_required) {
3537 // Calculate the number of bytes needed for the characters in the string while
3538 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003539 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003540 sll(scratch1, length, 1); // Length in bytes, not chars.
3541 addiu(scratch1, scratch1,
3542 kObjectAlignmentMask + SeqTwoByteString::kHeaderSize);
3543 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
3544
3545 // Allocate two-byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003546 Allocate(scratch1,
3547 result,
3548 scratch2,
3549 scratch3,
3550 gc_required,
3551 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003552
3553 // Set the map, length and hash field.
3554 InitializeNewString(result,
3555 length,
3556 Heap::kStringMapRootIndex,
3557 scratch1,
3558 scratch2);
3559}
3560
3561
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003562void MacroAssembler::AllocateOneByteString(Register result, Register length,
3563 Register scratch1, Register scratch2,
3564 Register scratch3,
3565 Label* gc_required) {
Steve Block44f0eee2011-05-26 01:26:41 +01003566 // Calculate the number of bytes needed for the characters in the string
3567 // while observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003568 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
3569 DCHECK(kCharSize == 1);
3570 addiu(scratch1, length, kObjectAlignmentMask + SeqOneByteString::kHeaderSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003571 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
3572
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003573 // Allocate one-byte string in new space.
3574 Allocate(scratch1,
3575 result,
3576 scratch2,
3577 scratch3,
3578 gc_required,
3579 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003580
3581 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003582 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
3583 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01003584}
3585
3586
3587void MacroAssembler::AllocateTwoByteConsString(Register result,
3588 Register length,
3589 Register scratch1,
3590 Register scratch2,
3591 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003592 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
3593 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003594 InitializeNewString(result,
3595 length,
3596 Heap::kConsStringMapRootIndex,
3597 scratch1,
3598 scratch2);
3599}
3600
3601
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003602void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
3603 Register scratch1,
3604 Register scratch2,
3605 Label* gc_required) {
3606 Allocate(ConsString::kSize,
3607 result,
3608 scratch1,
3609 scratch2,
3610 gc_required,
3611 TAG_OBJECT);
3612
3613 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
3614 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01003615}
3616
3617
Ben Murdoch589d6972011-11-30 16:04:58 +00003618void MacroAssembler::AllocateTwoByteSlicedString(Register result,
3619 Register length,
3620 Register scratch1,
3621 Register scratch2,
3622 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003623 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3624 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00003625
3626 InitializeNewString(result,
3627 length,
3628 Heap::kSlicedStringMapRootIndex,
3629 scratch1,
3630 scratch2);
3631}
3632
3633
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003634void MacroAssembler::AllocateOneByteSlicedString(Register result,
3635 Register length,
3636 Register scratch1,
3637 Register scratch2,
3638 Label* gc_required) {
3639 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3640 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00003641
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003642 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
3643 scratch1, scratch2);
3644}
3645
3646
3647void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3648 Label* not_unique_name) {
3649 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3650 Label succeed;
3651 And(at, reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3652 Branch(&succeed, eq, at, Operand(zero_reg));
3653 Branch(not_unique_name, ne, reg, Operand(SYMBOL_TYPE));
3654
3655 bind(&succeed);
Ben Murdoch589d6972011-11-30 16:04:58 +00003656}
3657
3658
Steve Block44f0eee2011-05-26 01:26:41 +01003659// Allocates a heap number or jumps to the label if the young space is full and
3660// a scavenge is needed.
3661void MacroAssembler::AllocateHeapNumber(Register result,
3662 Register scratch1,
3663 Register scratch2,
3664 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003665 Label* need_gc,
3666 TaggingMode tagging_mode,
3667 MutableMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003668 // Allocate an object in the heap for the heap number and tag it as a heap
3669 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003670 Allocate(HeapNumber::kSize, result, scratch1, scratch2, need_gc,
3671 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
3672
3673 Heap::RootListIndex map_index = mode == MUTABLE
3674 ? Heap::kMutableHeapNumberMapRootIndex
3675 : Heap::kHeapNumberMapRootIndex;
3676 AssertIsRoot(heap_number_map, map_index);
Steve Block44f0eee2011-05-26 01:26:41 +01003677
3678 // Store heap number map in the allocated object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003679 if (tagging_mode == TAG_RESULT) {
3680 sw(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
3681 } else {
3682 sw(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
3683 }
Steve Block44f0eee2011-05-26 01:26:41 +01003684}
3685
3686
3687void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3688 FPURegister value,
3689 Register scratch1,
3690 Register scratch2,
3691 Label* gc_required) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003692 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
3693 AllocateHeapNumber(result, scratch1, scratch2, t8, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003694 sdc1(value, FieldMemOperand(result, HeapNumber::kValueOffset));
3695}
3696
3697
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003698void MacroAssembler::AllocateJSValue(Register result, Register constructor,
3699 Register value, Register scratch1,
3700 Register scratch2, Label* gc_required) {
3701 DCHECK(!result.is(constructor));
3702 DCHECK(!result.is(scratch1));
3703 DCHECK(!result.is(scratch2));
3704 DCHECK(!result.is(value));
Steve Block44f0eee2011-05-26 01:26:41 +01003705
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003706 // Allocate JSValue in new space.
3707 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required, TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003709 // Initialize the JSValue.
3710 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
3711 sw(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
3712 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
3713 sw(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
3714 sw(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
3715 sw(value, FieldMemOperand(result, JSValue::kValueOffset));
3716 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003717}
3718
3719
Ben Murdoch257744e2011-11-30 15:57:28 +00003720void MacroAssembler::CopyBytes(Register src,
3721 Register dst,
3722 Register length,
3723 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003724 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003725
3726 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003727 Branch(&byte_loop, le, length, Operand(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003728 bind(&align_loop_1);
3729 And(scratch, src, kPointerSize - 1);
3730 Branch(&word_loop, eq, scratch, Operand(zero_reg));
3731 lbu(scratch, MemOperand(src));
3732 Addu(src, src, 1);
3733 sb(scratch, MemOperand(dst));
3734 Addu(dst, dst, 1);
3735 Subu(length, length, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003736 Branch(&align_loop_1, ne, length, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003737
3738 // Copy bytes in word size chunks.
3739 bind(&word_loop);
3740 if (emit_debug_code()) {
3741 And(scratch, src, kPointerSize - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003742 Assert(eq, kExpectingAlignmentForCopyBytes,
Ben Murdoch257744e2011-11-30 15:57:28 +00003743 scratch, Operand(zero_reg));
3744 }
3745 Branch(&byte_loop, lt, length, Operand(kPointerSize));
3746 lw(scratch, MemOperand(src));
3747 Addu(src, src, kPointerSize);
3748
3749 // TODO(kalmard) check if this can be optimized to use sw in most cases.
3750 // Can't use unaligned access - copy byte by byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003751 if (kArchEndian == kLittle) {
3752 sb(scratch, MemOperand(dst, 0));
3753 srl(scratch, scratch, 8);
3754 sb(scratch, MemOperand(dst, 1));
3755 srl(scratch, scratch, 8);
3756 sb(scratch, MemOperand(dst, 2));
3757 srl(scratch, scratch, 8);
3758 sb(scratch, MemOperand(dst, 3));
3759 } else {
3760 sb(scratch, MemOperand(dst, 3));
3761 srl(scratch, scratch, 8);
3762 sb(scratch, MemOperand(dst, 2));
3763 srl(scratch, scratch, 8);
3764 sb(scratch, MemOperand(dst, 1));
3765 srl(scratch, scratch, 8);
3766 sb(scratch, MemOperand(dst, 0));
3767 }
3768
Ben Murdoch257744e2011-11-30 15:57:28 +00003769 Addu(dst, dst, 4);
3770
3771 Subu(length, length, Operand(kPointerSize));
3772 Branch(&word_loop);
3773
3774 // Copy the last bytes if any left.
3775 bind(&byte_loop);
3776 Branch(&done, eq, length, Operand(zero_reg));
3777 bind(&byte_loop_1);
3778 lbu(scratch, MemOperand(src));
3779 Addu(src, src, 1);
3780 sb(scratch, MemOperand(dst));
3781 Addu(dst, dst, 1);
3782 Subu(length, length, Operand(1));
3783 Branch(&byte_loop_1, ne, length, Operand(zero_reg));
3784 bind(&done);
3785}
3786
3787
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003788void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
3789 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003790 Register filler) {
3791 Label loop, entry;
3792 Branch(&entry);
3793 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003794 sw(filler, MemOperand(current_address));
3795 Addu(current_address, current_address, kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003796 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003797 Branch(&loop, ult, current_address, Operand(end_address));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003798}
3799
3800
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003801void MacroAssembler::CheckFastElements(Register map,
3802 Register scratch,
3803 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003804 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3805 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3806 STATIC_ASSERT(FAST_ELEMENTS == 2);
3807 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003808 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003809 Branch(fail, hi, scratch,
3810 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003811}
3812
3813
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003814void MacroAssembler::CheckFastObjectElements(Register map,
3815 Register scratch,
3816 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003817 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3818 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3819 STATIC_ASSERT(FAST_ELEMENTS == 2);
3820 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003821 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
3822 Branch(fail, ls, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003823 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003824 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003825 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003826}
3827
3828
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003829void MacroAssembler::CheckFastSmiElements(Register map,
3830 Register scratch,
3831 Label* fail) {
3832 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3833 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003834 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
3835 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003836 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003837}
3838
3839
3840void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
3841 Register key_reg,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003842 Register elements_reg,
3843 Register scratch1,
3844 Register scratch2,
3845 Register scratch3,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003846 Label* fail,
3847 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003848 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1, scratch2,
3849 scratch3));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003850 Label smi_value, maybe_nan, have_double_value, is_nan, done;
3851 Register mantissa_reg = scratch2;
3852 Register exponent_reg = scratch3;
3853
3854 // Handle smi values specially.
3855 JumpIfSmi(value_reg, &smi_value);
3856
3857 // Ensure that the object is a heap number
3858 CheckMap(value_reg,
3859 scratch1,
3860 Heap::kHeapNumberMapRootIndex,
3861 fail,
3862 DONT_DO_SMI_CHECK);
3863
3864 // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000
3865 // in the exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003866 li(scratch1, Operand(kHoleNanUpper32 & HeapNumber::kExponentMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003867 lw(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
3868 Branch(&maybe_nan, ge, exponent_reg, Operand(scratch1));
3869
3870 lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
3871
3872 bind(&have_double_value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003873 Lsa(scratch1, elements_reg, key_reg, kDoubleSizeLog2 - kSmiTagSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003874 sw(mantissa_reg,
3875 FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset
3876 + kHoleNanLower32Offset));
3877 sw(exponent_reg,
3878 FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset
3879 + kHoleNanUpper32Offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003880 jmp(&done);
3881
3882 bind(&maybe_nan);
3883 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
3884 // it's an Infinity, and the non-NaN code path applies.
3885 Branch(&is_nan, gt, exponent_reg, Operand(scratch1));
3886 lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
3887 Branch(&have_double_value, eq, mantissa_reg, Operand(zero_reg));
3888 bind(&is_nan);
3889 // Load canonical NaN for storing into the double array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003890 LoadRoot(at, Heap::kNanValueRootIndex);
3891 lw(mantissa_reg, FieldMemOperand(at, HeapNumber::kMantissaOffset));
3892 lw(exponent_reg, FieldMemOperand(at, HeapNumber::kExponentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003893 jmp(&have_double_value);
3894
3895 bind(&smi_value);
3896 Addu(scratch1, elements_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003897 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag -
3898 elements_offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003899 Lsa(scratch1, scratch1, key_reg, kDoubleSizeLog2 - kSmiTagSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003900 // scratch1 is now effective address of the double element
3901
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003902 Register untagged_value = scratch2;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003903 SmiUntag(untagged_value, value_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003904 mtc1(untagged_value, f2);
3905 cvt_d_w(f0, f2);
3906 sdc1(f0, MemOperand(scratch1, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003907 bind(&done);
3908}
3909
3910
3911void MacroAssembler::CompareMapAndBranch(Register obj,
3912 Register scratch,
3913 Handle<Map> map,
3914 Label* early_success,
3915 Condition cond,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003916 Label* branch_to) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003917 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003918 CompareMapAndBranch(scratch, map, early_success, cond, branch_to);
3919}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003920
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003921
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003922void MacroAssembler::CompareMapAndBranch(Register obj_map,
3923 Handle<Map> map,
3924 Label* early_success,
3925 Condition cond,
3926 Label* branch_to) {
3927 Branch(branch_to, cond, obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003928}
3929
3930
Steve Block44f0eee2011-05-26 01:26:41 +01003931void MacroAssembler::CheckMap(Register obj,
3932 Register scratch,
3933 Handle<Map> map,
3934 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003935 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003936 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01003937 JumpIfSmi(obj, fail);
3938 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003939 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003940 CompareMapAndBranch(obj, scratch, map, &success, ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003941 bind(&success);
Steve Block44f0eee2011-05-26 01:26:41 +01003942}
3943
3944
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003945void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
3946 Register scratch2, Handle<WeakCell> cell,
3947 Handle<Code> success,
3948 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003949 Label fail;
3950 if (smi_check_type == DO_SMI_CHECK) {
3951 JumpIfSmi(obj, &fail);
3952 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003953 lw(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
3954 GetWeakValue(scratch2, cell);
3955 Jump(success, RelocInfo::CODE_TARGET, eq, scratch1, Operand(scratch2));
Ben Murdoch257744e2011-11-30 15:57:28 +00003956 bind(&fail);
3957}
3958
3959
Steve Block44f0eee2011-05-26 01:26:41 +01003960void MacroAssembler::CheckMap(Register obj,
3961 Register scratch,
3962 Heap::RootListIndex index,
3963 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00003964 SmiCheckType smi_check_type) {
3965 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01003966 JumpIfSmi(obj, fail);
3967 }
3968 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
3969 LoadRoot(at, index);
3970 Branch(fail, ne, scratch, Operand(at));
Steve Block6ded16b2010-05-10 14:33:55 +01003971}
3972
3973
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003974void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
3975 li(value, Operand(cell));
3976 lw(value, FieldMemOperand(value, WeakCell::kValueOffset));
3977}
3978
3979
3980void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
3981 Label* miss) {
3982 GetWeakValue(value, cell);
3983 JumpIfSmi(value, miss);
3984}
3985
3986
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003987void MacroAssembler::MovFromFloatResult(DoubleRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003988 if (IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003989 if (kArchEndian == kLittle) {
3990 Move(dst, v0, v1);
3991 } else {
3992 Move(dst, v1, v0);
3993 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003994 } else {
3995 Move(dst, f0); // Reg f0 is o32 ABI FP return value.
3996 }
3997}
3998
3999
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004000void MacroAssembler::MovFromFloatParameter(DoubleRegister dst) {
4001 if (IsMipsSoftFloatABI) {
4002 if (kArchEndian == kLittle) {
4003 Move(dst, a0, a1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004004 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004005 Move(dst, a1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004006 }
4007 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004008 Move(dst, f12); // Reg f12 is o32 ABI FP first argument value.
Ben Murdoch257744e2011-11-30 15:57:28 +00004009 }
4010}
4011
4012
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004013void MacroAssembler::MovToFloatParameter(DoubleRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004014 if (!IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004015 Move(f12, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00004016 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004017 if (kArchEndian == kLittle) {
4018 Move(a0, a1, src);
4019 } else {
4020 Move(a1, a0, src);
4021 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004022 }
4023}
4024
4025
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004026void MacroAssembler::MovToFloatResult(DoubleRegister src) {
4027 if (!IsMipsSoftFloatABI) {
4028 Move(f0, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00004029 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004030 if (kArchEndian == kLittle) {
4031 Move(v0, v1, src);
4032 } else {
4033 Move(v1, v0, src);
4034 }
4035 }
4036}
4037
4038
4039void MacroAssembler::MovToFloatParameters(DoubleRegister src1,
4040 DoubleRegister src2) {
4041 if (!IsMipsSoftFloatABI) {
4042 if (src2.is(f12)) {
4043 DCHECK(!src1.is(f14));
4044 Move(f14, src2);
4045 Move(f12, src1);
4046 } else {
4047 Move(f12, src1);
4048 Move(f14, src2);
4049 }
4050 } else {
4051 if (kArchEndian == kLittle) {
4052 Move(a0, a1, src1);
4053 Move(a2, a3, src2);
4054 } else {
4055 Move(a1, a0, src1);
4056 Move(a3, a2, src2);
4057 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004058 }
4059}
4060
4061
Steve Block6ded16b2010-05-10 14:33:55 +01004062// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004063// JavaScript invokes.
Steve Block6ded16b2010-05-10 14:33:55 +01004064
4065void MacroAssembler::InvokePrologue(const ParameterCount& expected,
4066 const ParameterCount& actual,
Steve Block6ded16b2010-05-10 14:33:55 +01004067 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004068 bool* definitely_mismatches,
Steve Block44f0eee2011-05-26 01:26:41 +01004069 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004070 const CallWrapper& call_wrapper) {
Steve Block6ded16b2010-05-10 14:33:55 +01004071 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004072 *definitely_mismatches = false;
Steve Block6ded16b2010-05-10 14:33:55 +01004073 Label regular_invoke;
4074
4075 // Check whether the expected and actual arguments count match. If not,
4076 // setup registers according to contract with ArgumentsAdaptorTrampoline:
4077 // a0: actual arguments count
4078 // a1: function (passed through to callee)
4079 // a2: expected arguments count
Steve Block6ded16b2010-05-10 14:33:55 +01004080
4081 // The code below is made a lot easier because the calling code already sets
4082 // up actual and expected registers according to the contract if values are
4083 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004084 DCHECK(actual.is_immediate() || actual.reg().is(a0));
4085 DCHECK(expected.is_immediate() || expected.reg().is(a2));
Steve Block6ded16b2010-05-10 14:33:55 +01004086
4087 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004088 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004089 li(a0, Operand(actual.immediate()));
Steve Block6ded16b2010-05-10 14:33:55 +01004090 if (expected.immediate() == actual.immediate()) {
4091 definitely_matches = true;
4092 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004093 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
4094 if (expected.immediate() == sentinel) {
4095 // Don't worry about adapting arguments for builtins that
4096 // don't want that done. Skip adaption code by making it look
4097 // like we have a match between expected and actual number of
4098 // arguments.
4099 definitely_matches = true;
4100 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004101 *definitely_mismatches = true;
Steve Block6ded16b2010-05-10 14:33:55 +01004102 li(a2, Operand(expected.immediate()));
4103 }
4104 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004105 } else if (actual.is_immediate()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004106 li(a0, Operand(actual.immediate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004107 Branch(&regular_invoke, eq, expected.reg(), Operand(a0));
Steve Block6ded16b2010-05-10 14:33:55 +01004108 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004109 Branch(&regular_invoke, eq, expected.reg(), Operand(actual.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01004110 }
4111
4112 if (!definitely_matches) {
Steve Block44f0eee2011-05-26 01:26:41 +01004113 Handle<Code> adaptor =
4114 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Block6ded16b2010-05-10 14:33:55 +01004115 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004116 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004117 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00004118 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004119 if (!*definitely_mismatches) {
4120 Branch(done);
4121 }
Steve Block6ded16b2010-05-10 14:33:55 +01004122 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004123 Jump(adaptor, RelocInfo::CODE_TARGET);
Steve Block6ded16b2010-05-10 14:33:55 +01004124 }
4125 bind(&regular_invoke);
4126 }
4127}
4128
Steve Block44f0eee2011-05-26 01:26:41 +01004129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004130void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
4131 const ParameterCount& expected,
4132 const ParameterCount& actual) {
4133 Label skip_flooding;
4134 ExternalReference step_in_enabled =
4135 ExternalReference::debug_step_in_enabled_address(isolate());
4136 li(t0, Operand(step_in_enabled));
4137 lb(t0, MemOperand(t0));
4138 Branch(&skip_flooding, eq, t0, Operand(zero_reg));
4139 {
4140 FrameScope frame(this,
4141 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
4142 if (expected.is_reg()) {
4143 SmiTag(expected.reg());
4144 Push(expected.reg());
4145 }
4146 if (actual.is_reg()) {
4147 SmiTag(actual.reg());
4148 Push(actual.reg());
4149 }
4150 if (new_target.is_valid()) {
4151 Push(new_target);
4152 }
4153 Push(fun);
4154 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004155 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004156 Pop(fun);
4157 if (new_target.is_valid()) {
4158 Pop(new_target);
4159 }
4160 if (actual.is_reg()) {
4161 Pop(actual.reg());
4162 SmiUntag(actual.reg());
4163 }
4164 if (expected.is_reg()) {
4165 Pop(expected.reg());
4166 SmiUntag(expected.reg());
4167 }
4168 }
4169 bind(&skip_flooding);
4170}
4171
4172
4173void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
4174 const ParameterCount& expected,
4175 const ParameterCount& actual,
4176 InvokeFlag flag,
4177 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004178 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004179 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004180 DCHECK(function.is(a1));
4181 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(a3));
4182
4183 if (call_wrapper.NeedsDebugStepCheck()) {
4184 FloodFunctionIfStepping(function, new_target, expected, actual);
4185 }
4186
4187 // Clear the new.target register if not given.
4188 if (!new_target.is_valid()) {
4189 LoadRoot(a3, Heap::kUndefinedValueRootIndex);
4190 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004191
Steve Block6ded16b2010-05-10 14:33:55 +01004192 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004193 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004194 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004195 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004196 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004197 // We call indirectly through the code field in the function to
4198 // allow recompilation to take effect without changing any of the
4199 // call sites.
4200 Register code = t0;
4201 lw(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004202 if (flag == CALL_FUNCTION) {
4203 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004204 Call(code);
4205 call_wrapper.AfterCall();
4206 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004207 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004208 Jump(code);
4209 }
4210 // Continue here if InvokePrologue does handle the invocation due to
4211 // mismatched parameter counts.
4212 bind(&done);
Steve Block6ded16b2010-05-10 14:33:55 +01004213 }
Steve Block6ded16b2010-05-10 14:33:55 +01004214}
4215
4216
Steve Block6ded16b2010-05-10 14:33:55 +01004217void MacroAssembler::InvokeFunction(Register function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004218 Register new_target,
Steve Block6ded16b2010-05-10 14:33:55 +01004219 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +01004220 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004221 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004222 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004223 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004224
Steve Block6ded16b2010-05-10 14:33:55 +01004225 // Contract with called JS functions requires that function is passed in a1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004226 DCHECK(function.is(a1));
Steve Block6ded16b2010-05-10 14:33:55 +01004227 Register expected_reg = a2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004228 Register temp_reg = t0;
Steve Block6ded16b2010-05-10 14:33:55 +01004229
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004230 lw(temp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004231 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4232 lw(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004233 FieldMemOperand(temp_reg,
4234 SharedFunctionInfo::kFormalParameterCountOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004235 sra(expected_reg, expected_reg, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01004236
4237 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004238 InvokeFunctionCode(function, new_target, expected, actual, flag,
4239 call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01004240}
4241
4242
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004243void MacroAssembler::InvokeFunction(Register function,
4244 const ParameterCount& expected,
Steve Block44f0eee2011-05-26 01:26:41 +01004245 const ParameterCount& actual,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004246 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004247 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004248 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004249 DCHECK(flag == JUMP_FUNCTION || has_frame());
4250
4251 // Contract with called JS functions requires that function is passed in a1.
4252 DCHECK(function.is(a1));
Steve Block44f0eee2011-05-26 01:26:41 +01004253
4254 // Get the function and setup the context.
Steve Block44f0eee2011-05-26 01:26:41 +01004255 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4256
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004257 InvokeFunctionCode(a1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004258}
4259
4260
4261void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
4262 const ParameterCount& expected,
4263 const ParameterCount& actual,
4264 InvokeFlag flag,
4265 const CallWrapper& call_wrapper) {
4266 li(a1, function);
4267 InvokeFunction(a1, expected, actual, flag, call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01004268}
4269
4270
Steve Block44f0eee2011-05-26 01:26:41 +01004271void MacroAssembler::IsObjectJSStringType(Register object,
4272 Register scratch,
4273 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004274 DCHECK(kNotStringTag != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004275
4276 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4277 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4278 And(scratch, scratch, Operand(kIsNotStringMask));
4279 Branch(fail, ne, scratch, Operand(zero_reg));
Steve Block6ded16b2010-05-10 14:33:55 +01004280}
4281
4282
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004283void MacroAssembler::IsObjectNameType(Register object,
4284 Register scratch,
4285 Label* fail) {
4286 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4287 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4288 Branch(fail, hi, scratch, Operand(LAST_NAME_TYPE));
4289}
4290
4291
Steve Block6ded16b2010-05-10 14:33:55 +01004292// ---------------------------------------------------------------------------
4293// Support functions.
4294
Steve Block44f0eee2011-05-26 01:26:41 +01004295
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004296void MacroAssembler::GetMapConstructor(Register result, Register map,
4297 Register temp, Register temp2) {
4298 Label done, loop;
4299 lw(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
4300 bind(&loop);
4301 JumpIfSmi(result, &done);
4302 GetObjectType(result, temp, temp2);
4303 Branch(&done, ne, temp2, Operand(MAP_TYPE));
4304 lw(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
4305 Branch(&loop);
4306 bind(&done);
4307}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004308
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004309
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004310void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
4311 Register scratch, Label* miss) {
Steve Block44f0eee2011-05-26 01:26:41 +01004312 // Get the prototype or initial map from the function.
4313 lw(result,
4314 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4315
4316 // If the prototype or initial map is the hole, don't return it and
4317 // simply miss the cache instead. This will allow us to allocate a
4318 // prototype object on-demand in the runtime system.
4319 LoadRoot(t8, Heap::kTheHoleValueRootIndex);
4320 Branch(miss, eq, result, Operand(t8));
4321
4322 // If the function does not have an initial map, we're done.
4323 Label done;
4324 GetObjectType(result, scratch, scratch);
4325 Branch(&done, ne, scratch, Operand(MAP_TYPE));
4326
4327 // Get the prototype from the initial map.
4328 lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004329
Steve Block44f0eee2011-05-26 01:26:41 +01004330 // All done.
4331 bind(&done);
4332}
Steve Block6ded16b2010-05-10 14:33:55 +01004333
4334
Steve Block44f0eee2011-05-26 01:26:41 +01004335void MacroAssembler::GetObjectType(Register object,
4336 Register map,
4337 Register type_reg) {
4338 lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
4339 lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
4340}
Steve Block6ded16b2010-05-10 14:33:55 +01004341
4342
4343// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004344// Runtime calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004345
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004346void MacroAssembler::CallStub(CodeStub* stub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004347 TypeFeedbackId ast_id,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004348 Condition cond,
4349 Register r1,
4350 const Operand& r2,
4351 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004352 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
4353 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id,
4354 cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004355}
4356
4357
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004358void MacroAssembler::TailCallStub(CodeStub* stub,
4359 Condition cond,
4360 Register r1,
4361 const Operand& r2,
4362 BranchDelaySlot bd) {
4363 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004364}
4365
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004366
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004367bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004368 return has_frame_ || !stub->SometimesSetsUpAFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +00004369}
4370
Andrei Popescu31002712010-02-23 13:46:05 +00004371
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004372void MacroAssembler::IndexFromHash(Register hash, Register index) {
Steve Block44f0eee2011-05-26 01:26:41 +01004373 // If the hash field contains an array index pick it out. The assert checks
4374 // that the constants for the maximum number of digits for an array index
4375 // cached in the hash field and the number of bits reserved for it does not
4376 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004377 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Steve Block44f0eee2011-05-26 01:26:41 +01004378 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004379 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Steve Block44f0eee2011-05-26 01:26:41 +01004380}
4381
4382
4383void MacroAssembler::ObjectToDoubleFPURegister(Register object,
4384 FPURegister result,
4385 Register scratch1,
4386 Register scratch2,
4387 Register heap_number_map,
4388 Label* not_number,
4389 ObjectToDoubleFlags flags) {
4390 Label done;
4391 if ((flags & OBJECT_NOT_SMI) == 0) {
4392 Label not_smi;
4393 JumpIfNotSmi(object, &not_smi);
4394 // Remove smi tag and convert to double.
4395 sra(scratch1, object, kSmiTagSize);
4396 mtc1(scratch1, result);
4397 cvt_d_w(result, result);
4398 Branch(&done);
4399 bind(&not_smi);
4400 }
4401 // Check for heap number and load double value from it.
4402 lw(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
4403 Branch(not_number, ne, scratch1, Operand(heap_number_map));
4404
4405 if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
4406 // If exponent is all ones the number is either a NaN or +/-Infinity.
4407 Register exponent = scratch1;
4408 Register mask_reg = scratch2;
4409 lw(exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
4410 li(mask_reg, HeapNumber::kExponentMask);
4411
4412 And(exponent, exponent, mask_reg);
4413 Branch(not_number, eq, exponent, Operand(mask_reg));
4414 }
4415 ldc1(result, FieldMemOperand(object, HeapNumber::kValueOffset));
4416 bind(&done);
4417}
4418
4419
Steve Block44f0eee2011-05-26 01:26:41 +01004420void MacroAssembler::SmiToDoubleFPURegister(Register smi,
4421 FPURegister value,
4422 Register scratch1) {
4423 sra(scratch1, smi, kSmiTagSize);
4424 mtc1(scratch1, value);
4425 cvt_d_w(value, value);
4426}
4427
4428
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004429static inline void BranchOvfHelper(MacroAssembler* masm, Register overflow_dst,
4430 Label* overflow_label,
4431 Label* no_overflow_label) {
4432 DCHECK(overflow_label || no_overflow_label);
4433 if (!overflow_label) {
4434 DCHECK(no_overflow_label);
4435 masm->Branch(no_overflow_label, ge, overflow_dst, Operand(zero_reg));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004436 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004437 masm->Branch(overflow_label, lt, overflow_dst, Operand(zero_reg));
4438 if (no_overflow_label) masm->Branch(no_overflow_label);
4439 }
4440}
4441
4442
4443void MacroAssembler::AddBranchOvf(Register dst, Register left,
4444 const Operand& right, Label* overflow_label,
4445 Label* no_overflow_label, Register scratch) {
4446 if (right.is_reg()) {
4447 AddBranchOvf(dst, left, right.rm(), overflow_label, no_overflow_label,
4448 scratch);
4449 } else {
4450 if (IsMipsArchVariant(kMips32r6)) {
4451 Register right_reg = t9;
4452 DCHECK(!left.is(right_reg));
4453 li(right_reg, Operand(right));
4454 AddBranchOvf(dst, left, right_reg, overflow_label, no_overflow_label);
4455 } else {
4456 Register overflow_dst = t9;
4457 DCHECK(!dst.is(scratch));
4458 DCHECK(!dst.is(overflow_dst));
4459 DCHECK(!scratch.is(overflow_dst));
4460 DCHECK(!left.is(overflow_dst));
4461 if (dst.is(left)) {
4462 mov(scratch, left); // Preserve left.
4463 Addu(dst, left, right.immediate()); // Left is overwritten.
4464 xor_(scratch, dst, scratch); // Original left.
4465 // Load right since xori takes uint16 as immediate.
4466 Addu(overflow_dst, zero_reg, right);
4467 xor_(overflow_dst, dst, overflow_dst);
4468 and_(overflow_dst, overflow_dst, scratch);
4469 } else {
4470 Addu(dst, left, right.immediate());
4471 xor_(overflow_dst, dst, left);
4472 // Load right since xori takes uint16 as immediate.
4473 Addu(scratch, zero_reg, right);
4474 xor_(scratch, dst, scratch);
4475 and_(overflow_dst, scratch, overflow_dst);
4476 }
4477 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
4478 }
4479 }
4480}
4481
4482
4483void MacroAssembler::AddBranchOvf(Register dst, Register left, Register right,
4484 Label* overflow_label,
4485 Label* no_overflow_label, Register scratch) {
4486 if (IsMipsArchVariant(kMips32r6)) {
4487 if (!overflow_label) {
4488 DCHECK(no_overflow_label);
4489 DCHECK(!dst.is(scratch));
4490 Register left_reg = left.is(dst) ? scratch : left;
4491 Register right_reg = right.is(dst) ? t9 : right;
4492 DCHECK(!dst.is(left_reg));
4493 DCHECK(!dst.is(right_reg));
4494 Move(left_reg, left);
4495 Move(right_reg, right);
4496 addu(dst, left, right);
4497 bnvc(left_reg, right_reg, no_overflow_label);
4498 } else {
4499 bovc(left, right, overflow_label);
4500 addu(dst, left, right);
4501 if (no_overflow_label) bc(no_overflow_label);
4502 }
4503 } else {
4504 Register overflow_dst = t9;
4505 DCHECK(!dst.is(scratch));
4506 DCHECK(!dst.is(overflow_dst));
4507 DCHECK(!scratch.is(overflow_dst));
4508 DCHECK(!left.is(overflow_dst));
4509 DCHECK(!right.is(overflow_dst));
4510 DCHECK(!left.is(scratch));
4511 DCHECK(!right.is(scratch));
4512
4513 if (left.is(right) && dst.is(left)) {
4514 mov(overflow_dst, right);
4515 right = overflow_dst;
4516 }
4517
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004518 if (dst.is(left)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004519 mov(scratch, left); // Preserve left.
4520 addu(dst, left, right); // Left is overwritten.
4521 xor_(scratch, dst, scratch); // Original left.
4522 xor_(overflow_dst, dst, right);
4523 and_(overflow_dst, overflow_dst, scratch);
4524 } else if (dst.is(right)) {
4525 mov(scratch, right); // Preserve right.
4526 addu(dst, left, right); // Right is overwritten.
4527 xor_(scratch, dst, scratch); // Original right.
4528 xor_(overflow_dst, dst, left);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004529 and_(overflow_dst, overflow_dst, scratch);
4530 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004531 addu(dst, left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004532 xor_(overflow_dst, dst, left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004533 xor_(scratch, dst, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004534 and_(overflow_dst, scratch, overflow_dst);
4535 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004536 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004537 }
4538}
4539
4540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004541void MacroAssembler::SubBranchOvf(Register dst, Register left,
4542 const Operand& right, Label* overflow_label,
4543 Label* no_overflow_label, Register scratch) {
4544 DCHECK(overflow_label || no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004545 if (right.is_reg()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004546 SubBranchOvf(dst, left, right.rm(), overflow_label, no_overflow_label,
4547 scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004548 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004549 Register overflow_dst = t9;
4550 DCHECK(!dst.is(scratch));
4551 DCHECK(!dst.is(overflow_dst));
4552 DCHECK(!scratch.is(overflow_dst));
4553 DCHECK(!left.is(overflow_dst));
4554 DCHECK(!left.is(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004555 if (dst.is(left)) {
4556 mov(scratch, left); // Preserve left.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004557 Subu(dst, left, right.immediate()); // Left is overwritten.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004558 // Load right since xori takes uint16 as immediate.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004559 Addu(overflow_dst, zero_reg, right);
4560 xor_(overflow_dst, scratch, overflow_dst); // scratch is original left.
4561 xor_(scratch, dst, scratch); // scratch is original left.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004562 and_(overflow_dst, scratch, overflow_dst);
4563 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004564 Subu(dst, left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004565 xor_(overflow_dst, dst, left);
4566 // Load right since xori takes uint16 as immediate.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004567 Addu(scratch, zero_reg, right);
4568 xor_(scratch, left, scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004569 and_(overflow_dst, scratch, overflow_dst);
4570 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004571 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004572 }
4573}
4574
4575
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004576void MacroAssembler::SubBranchOvf(Register dst, Register left, Register right,
4577 Label* overflow_label,
4578 Label* no_overflow_label, Register scratch) {
4579 DCHECK(overflow_label || no_overflow_label);
4580 Register overflow_dst = t9;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004581 DCHECK(!dst.is(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004582 DCHECK(!dst.is(overflow_dst));
4583 DCHECK(!scratch.is(overflow_dst));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004584 DCHECK(!overflow_dst.is(left));
4585 DCHECK(!overflow_dst.is(right));
4586 DCHECK(!scratch.is(left));
4587 DCHECK(!scratch.is(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00004588
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004589 // This happens with some crankshaft code. Since Subu works fine if
4590 // left == right, let's not make that restriction here.
4591 if (left.is(right)) {
4592 mov(dst, zero_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004593 if (no_overflow_label) {
4594 Branch(no_overflow_label);
4595 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004596 }
4597
Ben Murdoch257744e2011-11-30 15:57:28 +00004598 if (dst.is(left)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004599 mov(scratch, left); // Preserve left.
4600 subu(dst, left, right); // Left is overwritten.
4601 xor_(overflow_dst, dst, scratch); // scratch is original left.
4602 xor_(scratch, scratch, right); // scratch is original left.
4603 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00004604 } else if (dst.is(right)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004605 mov(scratch, right); // Preserve right.
4606 subu(dst, left, right); // Right is overwritten.
4607 xor_(overflow_dst, dst, left);
4608 xor_(scratch, left, scratch); // Original right.
4609 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00004610 } else {
4611 subu(dst, left, right);
4612 xor_(overflow_dst, dst, left);
4613 xor_(scratch, left, right);
4614 and_(overflow_dst, scratch, overflow_dst);
4615 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004616 BranchOvfHelper(this, overflow_dst, overflow_label, no_overflow_label);
Ben Murdoch257744e2011-11-30 15:57:28 +00004617}
4618
4619
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004620void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
4621 SaveFPRegsMode save_doubles,
4622 BranchDelaySlot bd) {
Steve Block6ded16b2010-05-10 14:33:55 +01004623 // All parameters are on the stack. v0 has the return value after call.
4624
4625 // If the expected number of arguments of the runtime function is
4626 // constant, we check that the actual number of arguments match the
4627 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004628 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01004629
4630 // TODO(1236192): Most runtime routines don't need the number of
4631 // arguments passed in because it is constant. At some point we
4632 // should remove this need and make the runtime routine entry code
4633 // smarter.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004634 PrepareCEntryArgs(num_arguments);
4635 PrepareCEntryFunction(ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004636 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004637 CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004638}
4639
4640
Steve Block44f0eee2011-05-26 01:26:41 +01004641void MacroAssembler::CallExternalReference(const ExternalReference& ext,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004642 int num_arguments,
4643 BranchDelaySlot bd) {
4644 PrepareCEntryArgs(num_arguments);
4645 PrepareCEntryFunction(ext);
Steve Block44f0eee2011-05-26 01:26:41 +01004646
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004647 CEntryStub stub(isolate(), 1);
4648 CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
Steve Block44f0eee2011-05-26 01:26:41 +01004649}
4650
4651
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004652void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
4653 const Runtime::Function* function = Runtime::FunctionForId(fid);
4654 DCHECK_EQ(1, function->result_size);
4655 if (function->nargs >= 0) {
4656 PrepareCEntryArgs(function->nargs);
4657 }
4658 JumpToExternalReference(ExternalReference(fid, isolate()));
Andrei Popescu31002712010-02-23 13:46:05 +00004659}
4660
4661
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004662void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
4663 BranchDelaySlot bd) {
4664 PrepareCEntryFunction(builtin);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004665 CEntryStub stub(isolate(), 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004666 Jump(stub.GetCode(),
4667 RelocInfo::CODE_TARGET,
4668 al,
4669 zero_reg,
4670 Operand(zero_reg),
4671 bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004672}
4673
4674
Andrei Popescu31002712010-02-23 13:46:05 +00004675void MacroAssembler::SetCounter(StatsCounter* counter, int value,
4676 Register scratch1, Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +01004677 if (FLAG_native_code_counters && counter->Enabled()) {
4678 li(scratch1, Operand(value));
4679 li(scratch2, Operand(ExternalReference(counter)));
4680 sw(scratch1, MemOperand(scratch2));
4681 }
Andrei Popescu31002712010-02-23 13:46:05 +00004682}
4683
4684
4685void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
4686 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004687 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004688 if (FLAG_native_code_counters && counter->Enabled()) {
4689 li(scratch2, Operand(ExternalReference(counter)));
4690 lw(scratch1, MemOperand(scratch2));
4691 Addu(scratch1, scratch1, Operand(value));
4692 sw(scratch1, MemOperand(scratch2));
4693 }
Andrei Popescu31002712010-02-23 13:46:05 +00004694}
4695
4696
4697void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
4698 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004699 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004700 if (FLAG_native_code_counters && counter->Enabled()) {
4701 li(scratch2, Operand(ExternalReference(counter)));
4702 lw(scratch1, MemOperand(scratch2));
4703 Subu(scratch1, scratch1, Operand(value));
4704 sw(scratch1, MemOperand(scratch2));
4705 }
Andrei Popescu31002712010-02-23 13:46:05 +00004706}
4707
4708
Steve Block6ded16b2010-05-10 14:33:55 +01004709// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004710// Debugging.
Andrei Popescu31002712010-02-23 13:46:05 +00004711
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004712void MacroAssembler::Assert(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00004713 Register rs, Operand rt) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004714 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004715 Check(cc, reason, rs, rt);
Steve Block44f0eee2011-05-26 01:26:41 +01004716}
4717
4718
4719void MacroAssembler::AssertFastElements(Register elements) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004720 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004721 DCHECK(!elements.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01004722 Label ok;
Ben Murdoch257744e2011-11-30 15:57:28 +00004723 push(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01004724 lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
4725 LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4726 Branch(&ok, eq, elements, Operand(at));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004727 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
4728 Branch(&ok, eq, elements, Operand(at));
Steve Block44f0eee2011-05-26 01:26:41 +01004729 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
4730 Branch(&ok, eq, elements, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004731 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Steve Block44f0eee2011-05-26 01:26:41 +01004732 bind(&ok);
Ben Murdoch257744e2011-11-30 15:57:28 +00004733 pop(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01004734 }
Andrei Popescu31002712010-02-23 13:46:05 +00004735}
4736
4737
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004738void MacroAssembler::Check(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00004739 Register rs, Operand rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01004740 Label L;
4741 Branch(&L, cc, rs, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004742 Abort(reason);
Ben Murdoch257744e2011-11-30 15:57:28 +00004743 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01004744 bind(&L);
Andrei Popescu31002712010-02-23 13:46:05 +00004745}
4746
4747
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004748void MacroAssembler::Abort(BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01004749 Label abort_start;
4750 bind(&abort_start);
Steve Block44f0eee2011-05-26 01:26:41 +01004751#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004752 const char* msg = GetBailoutReason(reason);
Steve Block44f0eee2011-05-26 01:26:41 +01004753 if (msg != NULL) {
4754 RecordComment("Abort message: ");
4755 RecordComment(msg);
4756 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004757
4758 if (FLAG_trap_on_abort) {
4759 stop(msg);
4760 return;
4761 }
Steve Block44f0eee2011-05-26 01:26:41 +01004762#endif
Steve Block44f0eee2011-05-26 01:26:41 +01004763
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004764 li(a0, Operand(Smi::FromInt(reason)));
Ben Murdoch257744e2011-11-30 15:57:28 +00004765 push(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004766 // Disable stub call restrictions to always allow calls to abort.
4767 if (!has_frame_) {
4768 // We don't actually want to generate a pile of code for this, so just
4769 // claim there is a stack frame, without generating one.
4770 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004771 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004772 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01004773 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004774 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004775 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01004776 if (is_trampoline_pool_blocked()) {
4777 // If the calling code cares about the exact number of
4778 // instructions generated, we insert padding here to keep the size
4779 // of the Abort macro constant.
4780 // Currently in debug mode with debug_code enabled the number of
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004781 // generated instructions is 10, so we use this as a maximum value.
4782 static const int kExpectedAbortInstructions = 10;
Steve Block44f0eee2011-05-26 01:26:41 +01004783 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004784 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block44f0eee2011-05-26 01:26:41 +01004785 while (abort_instructions++ < kExpectedAbortInstructions) {
4786 nop();
4787 }
4788 }
4789}
4790
4791
4792void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4793 if (context_chain_length > 0) {
4794 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004795 lw(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01004796 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004797 lw(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01004798 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004799 } else {
4800 // Slot is in the current function context. Move it into the
4801 // destination register in case we store into it (the write barrier
4802 // cannot be allowed to destroy the context in esi).
4803 Move(dst, cp);
4804 }
Steve Block44f0eee2011-05-26 01:26:41 +01004805}
4806
4807
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004808void MacroAssembler::LoadTransitionedArrayMapConditional(
4809 ElementsKind expected_kind,
4810 ElementsKind transitioned_kind,
4811 Register map_in_out,
4812 Register scratch,
4813 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004814 DCHECK(IsFastElementsKind(expected_kind));
4815 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004816
4817 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004818 lw(scratch, NativeContextMemOperand());
4819 lw(at, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004820 Branch(no_map_match, ne, map_in_out, Operand(at));
4821
4822 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004823 lw(map_in_out,
4824 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004825}
4826
4827
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004828void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
4829 lw(dst, NativeContextMemOperand());
4830 lw(dst, ContextMemOperand(dst, index));
Steve Block44f0eee2011-05-26 01:26:41 +01004831}
4832
4833
4834void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4835 Register map,
4836 Register scratch) {
4837 // Load the initial map. The global functions all have initial maps.
4838 lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004839 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004840 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00004841 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Steve Block44f0eee2011-05-26 01:26:41 +01004842 Branch(&ok);
4843 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004844 Abort(kGlobalFunctionsMustHaveInitialMap);
Steve Block44f0eee2011-05-26 01:26:41 +01004845 bind(&ok);
4846 }
Andrei Popescu31002712010-02-23 13:46:05 +00004847}
4848
Steve Block6ded16b2010-05-10 14:33:55 +01004849
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004850void MacroAssembler::StubPrologue() {
4851 Push(ra, fp, cp);
4852 Push(Smi::FromInt(StackFrame::STUB));
4853 // Adjust FP to point to saved FP.
4854 Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
4855}
4856
4857
4858void MacroAssembler::Prologue(bool code_pre_aging) {
4859 PredictableCodeSizeScope predictible_code_size_scope(
4860 this, kNoCodeAgeSequenceLength);
4861 // The following three instructions must remain together and unmodified
4862 // for code aging to work properly.
4863 if (code_pre_aging) {
4864 // Pre-age the code.
4865 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
4866 nop(Assembler::CODE_AGE_MARKER_NOP);
4867 // Load the stub address to t9 and call it,
4868 // GetCodeAgeAndParity() extracts the stub address from this instruction.
4869 li(t9,
4870 Operand(reinterpret_cast<uint32_t>(stub->instruction_start())),
4871 CONSTANT_SIZE);
4872 nop(); // Prevent jalr to jal optimization.
4873 jalr(t9, a0);
4874 nop(); // Branch delay slot nop.
4875 nop(); // Pad the empty space.
4876 } else {
4877 Push(ra, fp, cp, a1);
4878 nop(Assembler::CODE_AGE_SEQUENCE_NOP);
4879 // Adjust fp to point to caller's fp.
4880 Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
4881 }
4882}
4883
4884
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004885void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
4886 lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4887 lw(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
4888 lw(vector,
4889 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
4890}
4891
4892
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004893void MacroAssembler::EnterFrame(StackFrame::Type type,
4894 bool load_constant_pool_pointer_reg) {
4895 // Out-of-line constant pool not implemented on mips.
4896 UNREACHABLE();
4897}
4898
4899
Steve Block6ded16b2010-05-10 14:33:55 +01004900void MacroAssembler::EnterFrame(StackFrame::Type type) {
4901 addiu(sp, sp, -5 * kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01004902 li(t8, Operand(Smi::FromInt(type)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004903 li(t9, Operand(CodeObject()), CONSTANT_SIZE);
Steve Block6ded16b2010-05-10 14:33:55 +01004904 sw(ra, MemOperand(sp, 4 * kPointerSize));
4905 sw(fp, MemOperand(sp, 3 * kPointerSize));
4906 sw(cp, MemOperand(sp, 2 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004907 sw(t8, MemOperand(sp, 1 * kPointerSize));
4908 sw(t9, MemOperand(sp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004909 // Adjust FP to point to saved FP.
4910 Addu(fp, sp,
4911 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01004912}
4913
4914
4915void MacroAssembler::LeaveFrame(StackFrame::Type type) {
4916 mov(sp, fp);
4917 lw(fp, MemOperand(sp, 0 * kPointerSize));
4918 lw(ra, MemOperand(sp, 1 * kPointerSize));
4919 addiu(sp, sp, 2 * kPointerSize);
4920}
4921
4922
Ben Murdoch257744e2011-11-30 15:57:28 +00004923void MacroAssembler::EnterExitFrame(bool save_doubles,
4924 int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004925 // Set up the frame structure on the stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00004926 STATIC_ASSERT(2 * kPointerSize == ExitFrameConstants::kCallerSPDisplacement);
4927 STATIC_ASSERT(1 * kPointerSize == ExitFrameConstants::kCallerPCOffset);
4928 STATIC_ASSERT(0 * kPointerSize == ExitFrameConstants::kCallerFPOffset);
Steve Block6ded16b2010-05-10 14:33:55 +01004929
Ben Murdoch257744e2011-11-30 15:57:28 +00004930 // This is how the stack will look:
4931 // fp + 2 (==kCallerSPDisplacement) - old stack's end
4932 // [fp + 1 (==kCallerPCOffset)] - saved old ra
4933 // [fp + 0 (==kCallerFPOffset)] - saved old fp
4934 // [fp - 1 (==kSPOffset)] - sp of the called function
4935 // [fp - 2 (==kCodeOffset)] - CodeObject
4936 // fp - (2 + stack_space + alignment) == sp == [fp - kSPOffset] - top of the
4937 // new stack (will contain saved ra)
Steve Block6ded16b2010-05-10 14:33:55 +01004938
4939 // Save registers.
Ben Murdoch257744e2011-11-30 15:57:28 +00004940 addiu(sp, sp, -4 * kPointerSize);
4941 sw(ra, MemOperand(sp, 3 * kPointerSize));
4942 sw(fp, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004943 addiu(fp, sp, 2 * kPointerSize); // Set up new frame pointer.
Steve Block6ded16b2010-05-10 14:33:55 +01004944
Ben Murdoch257744e2011-11-30 15:57:28 +00004945 if (emit_debug_code()) {
4946 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset));
4947 }
4948
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004949 // Accessed from ExitFrame::code_slot.
4950 li(t8, Operand(CodeObject()), CONSTANT_SIZE);
Ben Murdoch257744e2011-11-30 15:57:28 +00004951 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004952
4953 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00004954 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004955 sw(fp, MemOperand(t8));
Ben Murdoch589d6972011-11-30 16:04:58 +00004956 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004957 sw(cp, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01004958
Ben Murdoch257744e2011-11-30 15:57:28 +00004959 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01004960 if (save_doubles) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004961 // The stack must be allign to 0 modulo 8 for stores with sdc1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004962 DCHECK(kDoubleSize == frame_alignment);
Ben Murdoch257744e2011-11-30 15:57:28 +00004963 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004964 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00004965 And(sp, sp, Operand(-frame_alignment)); // Align stack.
4966 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004967 int space = FPURegister::kMaxNumRegisters * kDoubleSize;
Steve Block44f0eee2011-05-26 01:26:41 +01004968 Subu(sp, sp, Operand(space));
4969 // Remember: we only need to save every 2nd double FPU value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004970 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01004971 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00004972 sdc1(reg, MemOperand(sp, i * kDoubleSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004973 }
Steve Block44f0eee2011-05-26 01:26:41 +01004974 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004975
4976 // Reserve place for the return address, stack space and an optional slot
4977 // (used by the DirectCEntryStub to hold the return value if a struct is
4978 // returned) and align the frame preparing for calling the runtime function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004979 DCHECK(stack_space >= 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004980 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize));
4981 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004982 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00004983 And(sp, sp, Operand(-frame_alignment)); // Align stack.
4984 }
4985
4986 // Set the exit frame sp value to point just before the return address
4987 // location.
4988 addiu(at, sp, kPointerSize);
4989 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004990}
4991
4992
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004993void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
4994 bool restore_context, bool do_return,
4995 bool argument_count_is_length) {
Steve Block44f0eee2011-05-26 01:26:41 +01004996 // Optionally restore all double registers.
4997 if (save_doubles) {
Steve Block44f0eee2011-05-26 01:26:41 +01004998 // Remember: we only need to restore every 2nd double FPU value.
Ben Murdoch257744e2011-11-30 15:57:28 +00004999 lw(t8, MemOperand(fp, ExitFrameConstants::kSPOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005000 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01005001 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005002 ldc1(reg, MemOperand(t8, i * kDoubleSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005003 }
5004 }
5005
Steve Block6ded16b2010-05-10 14:33:55 +01005006 // Clear top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00005007 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005008 sw(zero_reg, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005009
5010 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005011 if (restore_context) {
5012 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
5013 lw(cp, MemOperand(t8));
5014 }
Steve Block6ded16b2010-05-10 14:33:55 +01005015#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005016 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005017 sw(a3, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005018#endif
5019
5020 // Pop the arguments, restore registers, and return.
5021 mov(sp, fp); // Respect ABI stack constraint.
Ben Murdoch257744e2011-11-30 15:57:28 +00005022 lw(fp, MemOperand(sp, ExitFrameConstants::kCallerFPOffset));
5023 lw(ra, MemOperand(sp, ExitFrameConstants::kCallerPCOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005024
Ben Murdoch257744e2011-11-30 15:57:28 +00005025 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005026 if (argument_count_is_length) {
5027 addu(sp, sp, argument_count);
5028 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005029 Lsa(sp, sp, argument_count, kPointerSizeLog2, t8);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005030 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005031 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005032
5033 if (do_return) {
5034 Ret(USE_DELAY_SLOT);
5035 // If returning, the instruction in the delay slot will be the addiu below.
5036 }
5037 addiu(sp, sp, 8);
Steve Block6ded16b2010-05-10 14:33:55 +01005038}
5039
5040
Steve Block44f0eee2011-05-26 01:26:41 +01005041void MacroAssembler::InitializeNewString(Register string,
5042 Register length,
5043 Heap::RootListIndex map_index,
5044 Register scratch1,
5045 Register scratch2) {
5046 sll(scratch1, length, kSmiTagSize);
5047 LoadRoot(scratch2, map_index);
5048 sw(scratch1, FieldMemOperand(string, String::kLengthOffset));
5049 li(scratch1, Operand(String::kEmptyHashField));
5050 sw(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
5051 sw(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
5052}
5053
5054
5055int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005056#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005057 // Running on the real platform. Use the alignment as mandated by the local
5058 // environment.
5059 // Note: This will break if we ever start generating snapshots on one Mips
5060 // platform for another Mips platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005061 return base::OS::ActivationFrameAlignment();
5062#else // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005063 // If we are using the simulator then we should always align to the expected
5064 // alignment. As the simulator is used to generate snapshots we do not know
5065 // if the target platform will need alignment, so this is controlled from a
5066 // flag.
5067 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005068#endif // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005069}
5070
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005071
Ben Murdoch257744e2011-11-30 15:57:28 +00005072void MacroAssembler::AssertStackIsAligned() {
5073 if (emit_debug_code()) {
5074 const int frame_alignment = ActivationFrameAlignment();
5075 const int frame_alignment_mask = frame_alignment - 1;
Steve Block44f0eee2011-05-26 01:26:41 +01005076
Ben Murdoch257744e2011-11-30 15:57:28 +00005077 if (frame_alignment > kPointerSize) {
5078 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005079 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00005080 andi(at, sp, frame_alignment_mask);
5081 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
5082 // Don't use Check here, as it will call Runtime_Abort re-entering here.
5083 stop("Unexpected stack alignment");
5084 bind(&alignment_as_expected);
5085 }
Steve Block6ded16b2010-05-10 14:33:55 +01005086 }
Steve Block6ded16b2010-05-10 14:33:55 +01005087}
5088
Steve Block44f0eee2011-05-26 01:26:41 +01005089
Steve Block44f0eee2011-05-26 01:26:41 +01005090void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
5091 Register reg,
5092 Register scratch,
5093 Label* not_power_of_two_or_zero) {
5094 Subu(scratch, reg, Operand(1));
5095 Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt,
5096 scratch, Operand(zero_reg));
5097 and_(at, scratch, reg); // In the delay slot.
5098 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg));
5099}
5100
5101
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005102void MacroAssembler::SmiTagCheckOverflow(Register reg, Register overflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005103 DCHECK(!reg.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005104 mov(overflow, reg); // Save original value.
5105 SmiTag(reg);
5106 xor_(overflow, overflow, reg); // Overflow if (value ^ 2 * value) < 0.
5107}
5108
5109
5110void MacroAssembler::SmiTagCheckOverflow(Register dst,
5111 Register src,
5112 Register overflow) {
5113 if (dst.is(src)) {
5114 // Fall back to slower case.
5115 SmiTagCheckOverflow(dst, overflow);
5116 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005117 DCHECK(!dst.is(src));
5118 DCHECK(!dst.is(overflow));
5119 DCHECK(!src.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005120 SmiTag(dst, src);
5121 xor_(overflow, dst, src); // Overflow if (value ^ 2 * value) < 0.
5122 }
5123}
5124
5125
5126void MacroAssembler::UntagAndJumpIfSmi(Register dst,
5127 Register src,
5128 Label* smi_case) {
5129 JumpIfSmi(src, smi_case, at, USE_DELAY_SLOT);
5130 SmiUntag(dst, src);
5131}
5132
5133
5134void MacroAssembler::UntagAndJumpIfNotSmi(Register dst,
5135 Register src,
5136 Label* non_smi_case) {
5137 JumpIfNotSmi(src, non_smi_case, at, USE_DELAY_SLOT);
5138 SmiUntag(dst, src);
5139}
5140
5141void MacroAssembler::JumpIfSmi(Register value,
5142 Label* smi_label,
5143 Register scratch,
5144 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005145 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005146 andi(scratch, value, kSmiTagMask);
5147 Branch(bd, smi_label, eq, scratch, Operand(zero_reg));
5148}
5149
5150void MacroAssembler::JumpIfNotSmi(Register value,
5151 Label* not_smi_label,
5152 Register scratch,
5153 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005154 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005155 andi(scratch, value, kSmiTagMask);
5156 Branch(bd, not_smi_label, ne, scratch, Operand(zero_reg));
5157}
5158
5159
Steve Block44f0eee2011-05-26 01:26:41 +01005160void MacroAssembler::JumpIfNotBothSmi(Register reg1,
5161 Register reg2,
5162 Label* on_not_both_smi) {
5163 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005164 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01005165 or_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005166 JumpIfNotSmi(at, on_not_both_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01005167}
5168
5169
5170void MacroAssembler::JumpIfEitherSmi(Register reg1,
5171 Register reg2,
5172 Label* on_either_smi) {
5173 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005174 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01005175 // Both Smi tags must be 1 (not Smi).
5176 and_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005177 JumpIfSmi(at, on_either_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01005178}
5179
5180
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005181void MacroAssembler::AssertNotSmi(Register object) {
5182 if (emit_debug_code()) {
5183 STATIC_ASSERT(kSmiTag == 0);
5184 andi(at, object, kSmiTagMask);
5185 Check(ne, kOperandIsASmi, at, Operand(zero_reg));
5186 }
Steve Block44f0eee2011-05-26 01:26:41 +01005187}
5188
5189
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005190void MacroAssembler::AssertSmi(Register object) {
5191 if (emit_debug_code()) {
5192 STATIC_ASSERT(kSmiTag == 0);
5193 andi(at, object, kSmiTagMask);
5194 Check(eq, kOperandIsASmi, at, Operand(zero_reg));
5195 }
Steve Block44f0eee2011-05-26 01:26:41 +01005196}
5197
5198
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005199void MacroAssembler::AssertString(Register object) {
5200 if (emit_debug_code()) {
5201 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005202 SmiTst(object, t8);
5203 Check(ne, kOperandIsASmiAndNotAString, t8, Operand(zero_reg));
5204 GetObjectType(object, t8, t8);
5205 Check(lo, kOperandIsNotAString, t8, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005206 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005207}
5208
5209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005210void MacroAssembler::AssertName(Register object) {
5211 if (emit_debug_code()) {
5212 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005213 SmiTst(object, t8);
5214 Check(ne, kOperandIsASmiAndNotAName, t8, Operand(zero_reg));
5215 GetObjectType(object, t8, t8);
5216 Check(le, kOperandIsNotAName, t8, Operand(LAST_NAME_TYPE));
5217 }
5218}
5219
5220
5221void MacroAssembler::AssertFunction(Register object) {
5222 if (emit_debug_code()) {
5223 STATIC_ASSERT(kSmiTag == 0);
5224 SmiTst(object, t8);
5225 Check(ne, kOperandIsASmiAndNotAFunction, t8, Operand(zero_reg));
5226 GetObjectType(object, t8, t8);
5227 Check(eq, kOperandIsNotAFunction, t8, Operand(JS_FUNCTION_TYPE));
5228 }
5229}
5230
5231
5232void MacroAssembler::AssertBoundFunction(Register object) {
5233 if (emit_debug_code()) {
5234 STATIC_ASSERT(kSmiTag == 0);
5235 SmiTst(object, t8);
5236 Check(ne, kOperandIsASmiAndNotABoundFunction, t8, Operand(zero_reg));
5237 GetObjectType(object, t8, t8);
5238 Check(eq, kOperandIsNotABoundFunction, t8, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005239 }
5240}
5241
5242
Ben Murdoch097c5b22016-05-18 11:27:45 +01005243void MacroAssembler::AssertReceiver(Register object) {
5244 if (emit_debug_code()) {
5245 STATIC_ASSERT(kSmiTag == 0);
5246 SmiTst(object, t8);
5247 Check(ne, kOperandIsASmiAndNotAReceiver, t8, Operand(zero_reg));
5248 GetObjectType(object, t8, t8);
5249 Check(ge, kOperandIsNotAReceiver, t8, Operand(FIRST_JS_RECEIVER_TYPE));
5250 }
5251}
5252
5253
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005254void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
5255 Register scratch) {
5256 if (emit_debug_code()) {
5257 Label done_checking;
5258 AssertNotSmi(object);
5259 LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5260 Branch(&done_checking, eq, object, Operand(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005261 lw(t8, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005262 LoadRoot(scratch, Heap::kAllocationSiteMapRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005263 Assert(eq, kExpectedUndefinedOrCell, t8, Operand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005264 bind(&done_checking);
5265 }
5266}
5267
5268
5269void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
5270 if (emit_debug_code()) {
5271 DCHECK(!reg.is(at));
5272 LoadRoot(at, index);
5273 Check(eq, kHeapNumberMapRegisterClobbered, reg, Operand(at));
5274 }
Steve Block44f0eee2011-05-26 01:26:41 +01005275}
5276
5277
5278void MacroAssembler::JumpIfNotHeapNumber(Register object,
5279 Register heap_number_map,
5280 Register scratch,
5281 Label* on_not_heap_number) {
5282 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005283 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01005284 Branch(on_not_heap_number, ne, scratch, Operand(heap_number_map));
5285}
5286
5287
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005288void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
5289 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01005290 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005291 // Test that both first and second are sequential one-byte strings.
Steve Block44f0eee2011-05-26 01:26:41 +01005292 // Assume that they are non-smis.
5293 lw(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
5294 lw(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
5295 lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
5296 lbu(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
5297
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005298 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
5299 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01005300}
5301
5302
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005303void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
5304 Register second,
5305 Register scratch1,
5306 Register scratch2,
5307 Label* failure) {
Steve Block44f0eee2011-05-26 01:26:41 +01005308 // Check that neither is a smi.
5309 STATIC_ASSERT(kSmiTag == 0);
5310 And(scratch1, first, Operand(second));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005311 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005312 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
5313 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01005314}
5315
5316
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005317void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
5318 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01005319 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005320 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01005321 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005322 const int kFlatOneByteStringTag =
5323 kStringTag | kOneByteStringTag | kSeqStringTag;
5324 DCHECK(kFlatOneByteStringTag <= 0xffff); // Ensure this fits 16-bit immed.
5325 andi(scratch1, first, kFlatOneByteStringMask);
5326 Branch(failure, ne, scratch1, Operand(kFlatOneByteStringTag));
5327 andi(scratch2, second, kFlatOneByteStringMask);
5328 Branch(failure, ne, scratch2, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01005329}
5330
5331
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005332void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
5333 Register scratch,
5334 Label* failure) {
5335 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01005336 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005337 const int kFlatOneByteStringTag =
5338 kStringTag | kOneByteStringTag | kSeqStringTag;
5339 And(scratch, type, Operand(kFlatOneByteStringMask));
5340 Branch(failure, ne, scratch, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01005341}
5342
5343
5344static const int kRegisterPassedArguments = 4;
5345
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005346int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
5347 int num_double_arguments) {
5348 int stack_passed_words = 0;
5349 num_reg_arguments += 2 * num_double_arguments;
5350
5351 // Up to four simple arguments are passed in registers a0..a3.
5352 if (num_reg_arguments > kRegisterPassedArguments) {
5353 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
5354 }
5355 stack_passed_words += kCArgSlotCount;
5356 return stack_passed_words;
5357}
5358
5359
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005360void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
5361 Register index,
5362 Register value,
5363 Register scratch,
5364 uint32_t encoding_mask) {
5365 Label is_object;
5366 SmiTst(string, at);
5367 Check(ne, kNonObject, at, Operand(zero_reg));
5368
5369 lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
5370 lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
5371
5372 andi(at, at, kStringRepresentationMask | kStringEncodingMask);
5373 li(scratch, Operand(encoding_mask));
5374 Check(eq, kUnexpectedStringType, at, Operand(scratch));
5375
5376 // The index is assumed to be untagged coming in, tag it to compare with the
5377 // string length without using a temp register, it is restored at the end of
5378 // this function.
5379 Label index_tag_ok, index_tag_bad;
5380 TrySmiTag(index, scratch, &index_tag_bad);
5381 Branch(&index_tag_ok);
5382 bind(&index_tag_bad);
5383 Abort(kIndexIsTooLarge);
5384 bind(&index_tag_ok);
5385
5386 lw(at, FieldMemOperand(string, String::kLengthOffset));
5387 Check(lt, kIndexIsTooLarge, index, Operand(at));
5388
5389 DCHECK(Smi::FromInt(0) == 0);
5390 Check(ge, kIndexIsNegative, index, Operand(zero_reg));
5391
5392 SmiUntag(index, index);
5393}
5394
5395
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005396void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
5397 int num_double_arguments,
5398 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01005399 int frame_alignment = ActivationFrameAlignment();
5400
Steve Block44f0eee2011-05-26 01:26:41 +01005401 // Up to four simple arguments are passed in registers a0..a3.
5402 // Those four arguments must have reserved argument slots on the stack for
5403 // mips, even though those argument slots are not normally used.
5404 // Remaining arguments are pushed on the stack, above (higher address than)
5405 // the argument slots.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005406 int stack_passed_arguments = CalculateStackPassedWords(
5407 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005408 if (frame_alignment > kPointerSize) {
5409 // Make stack end at alignment and make room for num_arguments - 4 words
5410 // and the original value of sp.
5411 mov(scratch, sp);
5412 Subu(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005413 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01005414 And(sp, sp, Operand(-frame_alignment));
5415 sw(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
5416 } else {
5417 Subu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
5418 }
5419}
5420
5421
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005422void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
5423 Register scratch) {
5424 PrepareCallCFunction(num_reg_arguments, 0, scratch);
5425}
5426
5427
Steve Block44f0eee2011-05-26 01:26:41 +01005428void MacroAssembler::CallCFunction(ExternalReference function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005429 int num_reg_arguments,
5430 int num_double_arguments) {
5431 li(t8, Operand(function));
5432 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005433}
5434
5435
5436void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005437 int num_reg_arguments,
5438 int num_double_arguments) {
5439 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
5440}
5441
5442
5443void MacroAssembler::CallCFunction(ExternalReference function,
Steve Block44f0eee2011-05-26 01:26:41 +01005444 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005445 CallCFunction(function, num_arguments, 0);
5446}
5447
5448
5449void MacroAssembler::CallCFunction(Register function,
5450 int num_arguments) {
5451 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005452}
5453
5454
5455void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005456 int num_reg_arguments,
5457 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005458 DCHECK(has_frame());
Steve Block44f0eee2011-05-26 01:26:41 +01005459 // Make sure that the stack is aligned before calling a C function unless
5460 // running in the simulator. The simulator has its own alignment check which
5461 // provides more information.
5462 // The argument stots are presumed to have been set up by
5463 // PrepareCallCFunction. The C function must be called via t9, for mips ABI.
5464
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005465#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005466 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005467 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01005468 int frame_alignment_mask = frame_alignment - 1;
5469 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005470 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01005471 Label alignment_as_expected;
5472 And(at, sp, Operand(frame_alignment_mask));
5473 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
5474 // Don't use Check here, as it will call Runtime_Abort possibly
5475 // re-entering here.
5476 stop("Unexpected alignment in CallCFunction");
5477 bind(&alignment_as_expected);
5478 }
5479 }
5480#endif // V8_HOST_ARCH_MIPS
5481
5482 // Just call directly. The function called cannot cause a GC, or
5483 // allow preemption, so the return address in the link register
5484 // stays correct.
Steve Block44f0eee2011-05-26 01:26:41 +01005485
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005486 if (!function.is(t9)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005487 mov(t9, function);
Steve Block44f0eee2011-05-26 01:26:41 +01005488 function = t9;
5489 }
5490
5491 Call(function);
5492
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005493 int stack_passed_arguments = CalculateStackPassedWords(
5494 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005495
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005496 if (base::OS::ActivationFrameAlignment() > kPointerSize) {
Steve Block44f0eee2011-05-26 01:26:41 +01005497 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
5498 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005499 Addu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005500 }
5501}
5502
5503
5504#undef BRANCH_ARGS_CHECK
5505
5506
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005507void MacroAssembler::CheckPageFlag(
5508 Register object,
5509 Register scratch,
5510 int mask,
5511 Condition cc,
5512 Label* condition_met) {
5513 And(scratch, object, Operand(~Page::kPageAlignmentMask));
5514 lw(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
5515 And(scratch, scratch, Operand(mask));
5516 Branch(condition_met, cc, scratch, Operand(zero_reg));
5517}
5518
5519
5520void MacroAssembler::JumpIfBlack(Register object,
5521 Register scratch0,
5522 Register scratch1,
5523 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005524 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
5525 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005526}
5527
5528
5529void MacroAssembler::HasColor(Register object,
5530 Register bitmap_scratch,
5531 Register mask_scratch,
5532 Label* has_color,
5533 int first_bit,
5534 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005535 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t8));
5536 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005537
5538 GetMarkBits(object, bitmap_scratch, mask_scratch);
5539
5540 Label other_color, word_boundary;
5541 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5542 And(t8, t9, Operand(mask_scratch));
5543 Branch(&other_color, first_bit == 1 ? eq : ne, t8, Operand(zero_reg));
5544 // Shift left 1 by adding.
5545 Addu(mask_scratch, mask_scratch, Operand(mask_scratch));
5546 Branch(&word_boundary, eq, mask_scratch, Operand(zero_reg));
5547 And(t8, t9, Operand(mask_scratch));
5548 Branch(has_color, second_bit == 1 ? ne : eq, t8, Operand(zero_reg));
5549 jmp(&other_color);
5550
5551 bind(&word_boundary);
5552 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
5553 And(t9, t9, Operand(1));
5554 Branch(has_color, second_bit == 1 ? ne : eq, t9, Operand(zero_reg));
5555 bind(&other_color);
5556}
5557
5558
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005559void MacroAssembler::GetMarkBits(Register addr_reg,
5560 Register bitmap_reg,
5561 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005562 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005563 And(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
5564 Ext(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
5565 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
5566 Ext(t8, addr_reg, kLowBits, kPageSizeBits - kLowBits);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005567 Lsa(bitmap_reg, bitmap_reg, t8, kPointerSizeLog2, t8);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005568 li(t8, Operand(1));
5569 sllv(mask_reg, t8, mask_reg);
5570}
5571
5572
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005573void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
5574 Register mask_scratch, Register load_scratch,
5575 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005576 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005577 GetMarkBits(value, bitmap_scratch, mask_scratch);
5578
5579 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005580 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005581 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
5582 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005583 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005584
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005585 // Since both black and grey have a 1 in the first position and white does
5586 // not have a 1 there we only need to check one bit.
5587 lw(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5588 And(t8, mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005589 Branch(value_is_white, eq, t8, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005590}
5591
5592
Ben Murdoch257744e2011-11-30 15:57:28 +00005593void MacroAssembler::LoadInstanceDescriptors(Register map,
5594 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005595 lw(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
5596}
5597
5598
5599void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
5600 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
5601 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
5602}
5603
5604
5605void MacroAssembler::EnumLength(Register dst, Register map) {
5606 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
5607 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
5608 And(dst, dst, Operand(Map::EnumLengthBits::kMask));
5609 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00005610}
5611
5612
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005613void MacroAssembler::LoadAccessor(Register dst, Register holder,
5614 int accessor_index,
5615 AccessorComponent accessor) {
5616 lw(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
5617 LoadInstanceDescriptors(dst, dst);
5618 lw(dst,
5619 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
5620 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
5621 : AccessorPair::kSetterOffset;
5622 lw(dst, FieldMemOperand(dst, offset));
5623}
5624
5625
Ben Murdoch097c5b22016-05-18 11:27:45 +01005626void MacroAssembler::CheckEnumCache(Label* call_runtime) {
5627 Register null_value = t1;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005628 Register empty_fixed_array_value = t2;
5629 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005630 Label next, start;
5631 mov(a2, a0);
5632
5633 // Check if the enum length field is properly initialized, indicating that
5634 // there is an enum cache.
5635 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
5636
5637 EnumLength(a3, a1);
5638 Branch(
5639 call_runtime, eq, a3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
5640
Ben Murdoch097c5b22016-05-18 11:27:45 +01005641 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005642 jmp(&start);
5643
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005644 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005645 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005646
5647 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005648 EnumLength(a3, a1);
5649 Branch(call_runtime, ne, a3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005650
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005651 bind(&start);
5652
5653 // Check that there are no elements. Register a2 contains the current JS
5654 // object we've reached through the prototype chain.
5655 Label no_elements;
5656 lw(a2, FieldMemOperand(a2, JSObject::kElementsOffset));
5657 Branch(&no_elements, eq, a2, Operand(empty_fixed_array_value));
5658
5659 // Second chance, the object may be using the empty slow element dictionary.
5660 LoadRoot(at, Heap::kEmptySlowElementDictionaryRootIndex);
5661 Branch(call_runtime, ne, a2, Operand(at));
5662
5663 bind(&no_elements);
5664 lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
5665 Branch(&next, ne, a2, Operand(null_value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005666}
5667
5668
5669void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005670 DCHECK(!output_reg.is(input_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005671 Label done;
5672 li(output_reg, Operand(255));
5673 // Normal branch: nop in delay slot.
5674 Branch(&done, gt, input_reg, Operand(output_reg));
5675 // Use delay slot in this branch.
5676 Branch(USE_DELAY_SLOT, &done, lt, input_reg, Operand(zero_reg));
5677 mov(output_reg, zero_reg); // In delay slot.
5678 mov(output_reg, input_reg); // Value is in range 0..255.
5679 bind(&done);
5680}
5681
5682
5683void MacroAssembler::ClampDoubleToUint8(Register result_reg,
5684 DoubleRegister input_reg,
5685 DoubleRegister temp_double_reg) {
5686 Label above_zero;
5687 Label done;
5688 Label in_bounds;
5689
5690 Move(temp_double_reg, 0.0);
5691 BranchF(&above_zero, NULL, gt, input_reg, temp_double_reg);
5692
5693 // Double value is less than zero, NaN or Inf, return 0.
5694 mov(result_reg, zero_reg);
5695 Branch(&done);
5696
5697 // Double value is >= 255, return 255.
5698 bind(&above_zero);
5699 Move(temp_double_reg, 255.0);
5700 BranchF(&in_bounds, NULL, le, input_reg, temp_double_reg);
5701 li(result_reg, Operand(255));
5702 Branch(&done);
5703
5704 // In 0-255 range, round and truncate.
5705 bind(&in_bounds);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005706 cvt_w_d(temp_double_reg, input_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005707 mfc1(result_reg, temp_double_reg);
5708 bind(&done);
5709}
5710
5711
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005712void MacroAssembler::TestJSArrayForAllocationMemento(
5713 Register receiver_reg,
5714 Register scratch_reg,
5715 Label* no_memento_found,
5716 Condition cond,
5717 Label* allocation_memento_present) {
5718 ExternalReference new_space_start =
5719 ExternalReference::new_space_start(isolate());
5720 ExternalReference new_space_allocation_top =
5721 ExternalReference::new_space_allocation_top_address(isolate());
5722 Addu(scratch_reg, receiver_reg,
5723 Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
5724 Branch(no_memento_found, lt, scratch_reg, Operand(new_space_start));
5725 li(at, Operand(new_space_allocation_top));
5726 lw(at, MemOperand(at));
5727 Branch(no_memento_found, gt, scratch_reg, Operand(at));
5728 lw(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
5729 if (allocation_memento_present) {
5730 Branch(allocation_memento_present, cond, scratch_reg,
5731 Operand(isolate()->factory()->allocation_memento_map()));
5732 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005733}
5734
5735
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005736Register GetRegisterThatIsNotOneOf(Register reg1,
5737 Register reg2,
5738 Register reg3,
5739 Register reg4,
5740 Register reg5,
5741 Register reg6) {
5742 RegList regs = 0;
5743 if (reg1.is_valid()) regs |= reg1.bit();
5744 if (reg2.is_valid()) regs |= reg2.bit();
5745 if (reg3.is_valid()) regs |= reg3.bit();
5746 if (reg4.is_valid()) regs |= reg4.bit();
5747 if (reg5.is_valid()) regs |= reg5.bit();
5748 if (reg6.is_valid()) regs |= reg6.bit();
5749
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005750 const RegisterConfiguration* config =
5751 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
5752 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
5753 int code = config->GetAllocatableGeneralCode(i);
5754 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005755 if (regs & candidate.bit()) continue;
5756 return candidate;
5757 }
5758 UNREACHABLE();
5759 return no_reg;
5760}
5761
5762
5763void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5764 Register object,
5765 Register scratch0,
5766 Register scratch1,
5767 Label* found) {
5768 DCHECK(!scratch1.is(scratch0));
5769 Factory* factory = isolate()->factory();
5770 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005771 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005772
5773 // Scratch contained elements pointer.
5774 Move(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005775 lw(current, FieldMemOperand(current, HeapObject::kMapOffset));
5776 lw(current, FieldMemOperand(current, Map::kPrototypeOffset));
5777 Branch(&end, eq, current, Operand(factory->null_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005778
5779 // Loop based on the map going up the prototype chain.
5780 bind(&loop_again);
5781 lw(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005782 lbu(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
5783 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
5784 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
5785 Branch(found, lo, scratch1, Operand(JS_OBJECT_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005786 lb(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
5787 DecodeField<Map::ElementsKindBits>(scratch1);
5788 Branch(found, eq, scratch1, Operand(DICTIONARY_ELEMENTS));
5789 lw(current, FieldMemOperand(current, Map::kPrototypeOffset));
5790 Branch(&loop_again, ne, current, Operand(factory->null_value()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005791
5792 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005793}
5794
5795
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005796bool AreAliased(Register reg1, Register reg2, Register reg3, Register reg4,
5797 Register reg5, Register reg6, Register reg7, Register reg8,
5798 Register reg9, Register reg10) {
5799 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + reg3.is_valid() +
5800 reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
5801 reg7.is_valid() + reg8.is_valid() + reg9.is_valid() +
5802 reg10.is_valid();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005803
5804 RegList regs = 0;
5805 if (reg1.is_valid()) regs |= reg1.bit();
5806 if (reg2.is_valid()) regs |= reg2.bit();
5807 if (reg3.is_valid()) regs |= reg3.bit();
5808 if (reg4.is_valid()) regs |= reg4.bit();
5809 if (reg5.is_valid()) regs |= reg5.bit();
5810 if (reg6.is_valid()) regs |= reg6.bit();
5811 if (reg7.is_valid()) regs |= reg7.bit();
5812 if (reg8.is_valid()) regs |= reg8.bit();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005813 if (reg9.is_valid()) regs |= reg9.bit();
5814 if (reg10.is_valid()) regs |= reg10.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005815 int n_of_non_aliasing_regs = NumRegs(regs);
5816
5817 return n_of_valid_regs != n_of_non_aliasing_regs;
5818}
5819
5820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005821CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005822 FlushICache flush_cache)
Steve Block44f0eee2011-05-26 01:26:41 +01005823 : address_(address),
Steve Block44f0eee2011-05-26 01:26:41 +01005824 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005825 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005826 flush_cache_(flush_cache) {
Steve Block44f0eee2011-05-26 01:26:41 +01005827 // Create a new macro assembler pointing to the address of the code to patch.
5828 // The size is adjusted with kGap on order for the assembler to generate size
5829 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005830 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01005831}
5832
5833
5834CodePatcher::~CodePatcher() {
5835 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005836 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005837 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005838 }
Steve Block44f0eee2011-05-26 01:26:41 +01005839
5840 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005841 DCHECK(masm_.pc_ == address_ + size_);
5842 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01005843}
5844
5845
Ben Murdoch257744e2011-11-30 15:57:28 +00005846void CodePatcher::Emit(Instr instr) {
5847 masm()->emit(instr);
Steve Block44f0eee2011-05-26 01:26:41 +01005848}
5849
5850
5851void CodePatcher::Emit(Address addr) {
5852 masm()->emit(reinterpret_cast<Instr>(addr));
5853}
5854
5855
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005856void CodePatcher::ChangeBranchCondition(Instr current_instr,
5857 uint32_t new_opcode) {
5858 current_instr = (current_instr & ~kOpcodeMask) | new_opcode;
5859 masm_.emit(current_instr);
Ben Murdoch257744e2011-11-30 15:57:28 +00005860}
Steve Block44f0eee2011-05-26 01:26:41 +01005861
5862
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005863void MacroAssembler::TruncatingDiv(Register result,
5864 Register dividend,
5865 int32_t divisor) {
5866 DCHECK(!dividend.is(result));
5867 DCHECK(!dividend.is(at));
5868 DCHECK(!result.is(at));
5869 base::MagicNumbersForDivision<uint32_t> mag =
5870 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
5871 li(at, Operand(mag.multiplier));
5872 Mulh(result, dividend, Operand(at));
5873 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
5874 if (divisor > 0 && neg) {
5875 Addu(result, result, Operand(dividend));
5876 }
5877 if (divisor < 0 && !neg && mag.multiplier > 0) {
5878 Subu(result, result, Operand(dividend));
5879 }
5880 if (mag.shift > 0) sra(result, result, mag.shift);
5881 srl(at, dividend, 31);
5882 Addu(result, result, Operand(at));
5883}
5884
5885
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005886} // namespace internal
5887} // namespace v8
Andrei Popescu31002712010-02-23 13:46:05 +00005888
Leon Clarkef7060e22010-06-03 12:02:55 +01005889#endif // V8_TARGET_ARCH_MIPS