blob: 604293bb3106642b9729f1da96a091c7cf0f34b0 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
Ben Murdoch257744e2011-11-30 15:57:28 +00005#include <limits.h> // For LONG_MIN, LONG_MAX.
Andrei Popescu31002712010-02-23 13:46:05 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/v8.h"
Andrei Popescu31002712010-02-23 13:46:05 +00008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +010010
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/base/bits.h"
12#include "src/base/division-by-constant.h"
13#include "src/bootstrapper.h"
14#include "src/codegen.h"
15#include "src/cpu-profiler.h"
16#include "src/debug.h"
17#include "src/isolate-inl.h"
18#include "src/runtime.h"
Andrei Popescu31002712010-02-23 13:46:05 +000019
20namespace v8 {
21namespace internal {
22
Ben Murdoch257744e2011-11-30 15:57:28 +000023MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
24 : Assembler(arg_isolate, buffer, size),
Andrei Popescu31002712010-02-23 13:46:05 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch257744e2011-11-30 15:57:28 +000027 if (isolate() != NULL) {
28 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
29 isolate());
30 }
Andrei Popescu31002712010-02-23 13:46:05 +000031}
32
33
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034void MacroAssembler::Load(Register dst,
35 const MemOperand& src,
36 Representation r) {
37 DCHECK(!r.IsDouble());
38 if (r.IsInteger8()) {
39 lb(dst, src);
40 } else if (r.IsUInteger8()) {
41 lbu(dst, src);
42 } else if (r.IsInteger16()) {
43 lh(dst, src);
44 } else if (r.IsUInteger16()) {
45 lhu(dst, src);
46 } else {
47 lw(dst, src);
48 }
49}
50
51
52void MacroAssembler::Store(Register src,
53 const MemOperand& dst,
54 Representation r) {
55 DCHECK(!r.IsDouble());
56 if (r.IsInteger8() || r.IsUInteger8()) {
57 sb(src, dst);
58 } else if (r.IsInteger16() || r.IsUInteger16()) {
59 sh(src, dst);
60 } else {
61 if (r.IsHeapObject()) {
62 AssertNotSmi(src);
63 } else if (r.IsSmi()) {
64 AssertSmi(src);
65 }
66 sw(src, dst);
67 }
68}
69
70
Andrei Popescu31002712010-02-23 13:46:05 +000071void MacroAssembler::LoadRoot(Register destination,
72 Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010073 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000074}
75
Steve Block44f0eee2011-05-26 01:26:41 +010076
Andrei Popescu31002712010-02-23 13:46:05 +000077void MacroAssembler::LoadRoot(Register destination,
78 Heap::RootListIndex index,
79 Condition cond,
80 Register src1, const Operand& src2) {
Steve Block44f0eee2011-05-26 01:26:41 +010081 Branch(2, NegateCondition(cond), src1, src2);
Steve Block6ded16b2010-05-10 14:33:55 +010082 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000083}
84
85
Steve Block44f0eee2011-05-26 01:26:41 +010086void MacroAssembler::StoreRoot(Register source,
87 Heap::RootListIndex index) {
88 sw(source, MemOperand(s6, index << kPointerSizeLog2));
89}
90
91
92void MacroAssembler::StoreRoot(Register source,
93 Heap::RootListIndex index,
94 Condition cond,
95 Register src1, const Operand& src2) {
96 Branch(2, NegateCondition(cond), src1, src2);
97 sw(source, MemOperand(s6, index << kPointerSizeLog2));
98}
99
100
Ben Murdoch257744e2011-11-30 15:57:28 +0000101// Push and pop all registers that can hold pointers.
102void MacroAssembler::PushSafepointRegisters() {
103 // Safepoints expect a block of kNumSafepointRegisters values on the
104 // stack, so adjust the stack for unsaved registers.
105 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 DCHECK(num_unsaved >= 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100107 if (num_unsaved > 0) {
108 Subu(sp, sp, Operand(num_unsaved * kPointerSize));
109 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000110 MultiPush(kSafepointSavedRegisters);
111}
112
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000113
Ben Murdoch257744e2011-11-30 15:57:28 +0000114void MacroAssembler::PopSafepointRegisters() {
115 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
116 MultiPop(kSafepointSavedRegisters);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100117 if (num_unsaved > 0) {
118 Addu(sp, sp, Operand(num_unsaved * kPointerSize));
119 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000120}
121
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000122
Ben Murdoch257744e2011-11-30 15:57:28 +0000123void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
124 sw(src, SafepointRegisterSlot(dst));
125}
126
127
128void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
129 lw(dst, SafepointRegisterSlot(src));
130}
131
132
133int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
134 // The registers are pushed starting with the highest encoding,
135 // which means that lowest encodings are closest to the stack pointer.
136 return kSafepointRegisterStackIndexMap[reg_code];
137}
138
139
140MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
141 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
142}
143
144
145MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100146 UNIMPLEMENTED_MIPS();
Ben Murdoch257744e2011-11-30 15:57:28 +0000147 // General purpose registers are pushed last on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000148 int doubles_size = FPURegister::NumAllocatableRegisters() * kDoubleSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000149 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
150 return MemOperand(sp, doubles_size + register_offset);
151}
152
153
Steve Block44f0eee2011-05-26 01:26:41 +0100154void MacroAssembler::InNewSpace(Register object,
155 Register scratch,
156 Condition cc,
157 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000158 DCHECK(cc == eq || cc == ne);
Steve Block44f0eee2011-05-26 01:26:41 +0100159 And(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
160 Branch(branch, cc, scratch,
161 Operand(ExternalReference::new_space_start(isolate())));
162}
163
164
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100165void MacroAssembler::RecordWriteField(
166 Register object,
167 int offset,
168 Register value,
169 Register dst,
170 RAStatus ra_status,
171 SaveFPRegsMode save_fp,
172 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000173 SmiCheck smi_check,
174 PointersToHereCheck pointers_to_here_check_for_value) {
175 DCHECK(!AreAliased(value, dst, t8, object));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100176 // First, check if a write barrier is even needed. The tests below
177 // catch stores of Smis.
Steve Block44f0eee2011-05-26 01:26:41 +0100178 Label done;
179
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100180 // Skip barrier if writing a smi.
181 if (smi_check == INLINE_SMI_CHECK) {
182 JumpIfSmi(value, &done);
183 }
Steve Block44f0eee2011-05-26 01:26:41 +0100184
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100185 // Although the object register is tagged, the offset is relative to the start
186 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 DCHECK(IsAligned(offset, kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100188
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100189 Addu(dst, object, Operand(offset - kHeapObjectTag));
190 if (emit_debug_code()) {
191 Label ok;
192 And(t8, dst, Operand((1 << kPointerSizeLog2) - 1));
193 Branch(&ok, eq, t8, Operand(zero_reg));
194 stop("Unaligned cell in write barrier");
195 bind(&ok);
196 }
197
198 RecordWrite(object,
199 dst,
200 value,
201 ra_status,
202 save_fp,
203 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 OMIT_SMI_CHECK,
205 pointers_to_here_check_for_value);
Steve Block44f0eee2011-05-26 01:26:41 +0100206
207 bind(&done);
208
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100209 // Clobber clobbered input registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100210 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000211 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 li(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
213 li(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
214 }
215}
216
217
218// Will clobber 4 registers: object, map, dst, ip. The
219// register 'object' contains a heap object pointer.
220void MacroAssembler::RecordWriteForMap(Register object,
221 Register map,
222 Register dst,
223 RAStatus ra_status,
224 SaveFPRegsMode fp_mode) {
225 if (emit_debug_code()) {
226 DCHECK(!dst.is(at));
227 lw(dst, FieldMemOperand(map, HeapObject::kMapOffset));
228 Check(eq,
229 kWrongAddressOrValuePassedToRecordWrite,
230 dst,
231 Operand(isolate()->factory()->meta_map()));
232 }
233
234 if (!FLAG_incremental_marking) {
235 return;
236 }
237
238 if (emit_debug_code()) {
239 lw(at, FieldMemOperand(object, HeapObject::kMapOffset));
240 Check(eq,
241 kWrongAddressOrValuePassedToRecordWrite,
242 map,
243 Operand(at));
244 }
245
246 Label done;
247
248 // A single check of the map's pages interesting flag suffices, since it is
249 // only set during incremental collection, and then it's also guaranteed that
250 // the from object's page's interesting flag is also set. This optimization
251 // relies on the fact that maps can never be in new space.
252 CheckPageFlag(map,
253 map, // Used as scratch.
254 MemoryChunk::kPointersToHereAreInterestingMask,
255 eq,
256 &done);
257
258 Addu(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
259 if (emit_debug_code()) {
260 Label ok;
261 And(at, dst, Operand((1 << kPointerSizeLog2) - 1));
262 Branch(&ok, eq, at, Operand(zero_reg));
263 stop("Unaligned cell in write barrier");
264 bind(&ok);
265 }
266
267 // Record the actual write.
268 if (ra_status == kRAHasNotBeenSaved) {
269 push(ra);
270 }
271 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
272 fp_mode);
273 CallStub(&stub);
274 if (ra_status == kRAHasNotBeenSaved) {
275 pop(ra);
276 }
277
278 bind(&done);
279
280 // Count number of write barriers in generated code.
281 isolate()->counters()->write_barriers_static()->Increment();
282 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at, dst);
283
284 // Clobber clobbered registers when running with the debug-code flag
285 // turned on to provoke errors.
286 if (emit_debug_code()) {
287 li(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
288 li(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Steve Block44f0eee2011-05-26 01:26:41 +0100289 }
290}
291
292
293// Will clobber 4 registers: object, address, scratch, ip. The
294// register 'object' contains a heap object pointer. The heap object
295// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000296void MacroAssembler::RecordWrite(
297 Register object,
298 Register address,
299 Register value,
300 RAStatus ra_status,
301 SaveFPRegsMode fp_mode,
302 RememberedSetAction remembered_set_action,
303 SmiCheck smi_check,
304 PointersToHereCheck pointers_to_here_check_for_value) {
305 DCHECK(!AreAliased(object, address, value, t8));
306 DCHECK(!AreAliased(object, address, value, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100307
308 if (emit_debug_code()) {
309 lw(at, MemOperand(address));
310 Assert(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000311 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100312 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000313
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314 if (remembered_set_action == OMIT_REMEMBERED_SET &&
315 !FLAG_incremental_marking) {
316 return;
317 }
318
319 // First, check if a write barrier is even needed. The tests below
320 // catch stores of smis and stores into the young generation.
Steve Block44f0eee2011-05-26 01:26:41 +0100321 Label done;
322
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100323 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000324 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100325 JumpIfSmi(value, &done);
326 }
327
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
329 CheckPageFlag(value,
330 value, // Used as scratch.
331 MemoryChunk::kPointersToHereAreInterestingMask,
332 eq,
333 &done);
334 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100335 CheckPageFlag(object,
336 value, // Used as scratch.
337 MemoryChunk::kPointersFromHereAreInterestingMask,
338 eq,
339 &done);
Steve Block44f0eee2011-05-26 01:26:41 +0100340
341 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100342 if (ra_status == kRAHasNotBeenSaved) {
343 push(ra);
344 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
346 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100347 CallStub(&stub);
348 if (ra_status == kRAHasNotBeenSaved) {
349 pop(ra);
350 }
Steve Block44f0eee2011-05-26 01:26:41 +0100351
352 bind(&done);
353
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 // Count number of write barriers in generated code.
355 isolate()->counters()->write_barriers_static()->Increment();
356 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at,
357 value);
358
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100359 // Clobber clobbered registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100360 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000361 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000362 li(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
363 li(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100364 }
365}
366
367
368void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
369 Register address,
370 Register scratch,
371 SaveFPRegsMode fp_mode,
372 RememberedSetFinalAction and_then) {
373 Label done;
374 if (emit_debug_code()) {
375 Label ok;
376 JumpIfNotInNewSpace(object, scratch, &ok);
377 stop("Remembered set pointer is in new space");
378 bind(&ok);
379 }
380 // Load store buffer top.
381 ExternalReference store_buffer =
382 ExternalReference::store_buffer_top(isolate());
383 li(t8, Operand(store_buffer));
384 lw(scratch, MemOperand(t8));
385 // Store pointer to buffer and increment buffer top.
386 sw(address, MemOperand(scratch));
387 Addu(scratch, scratch, kPointerSize);
388 // Write back new top of buffer.
389 sw(scratch, MemOperand(t8));
390 // Call stub on end of buffer.
391 // Check for end of buffer.
392 And(t8, scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
393 if (and_then == kFallThroughAtEnd) {
394 Branch(&done, eq, t8, Operand(zero_reg));
395 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000396 DCHECK(and_then == kReturnAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100397 Ret(eq, t8, Operand(zero_reg));
398 }
399 push(ra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100401 CallStub(&store_buffer_overflow);
402 pop(ra);
403 bind(&done);
404 if (and_then == kReturnAtEnd) {
405 Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100406 }
407}
408
409
410// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000411// Allocation support.
Steve Block44f0eee2011-05-26 01:26:41 +0100412
413
414void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
415 Register scratch,
416 Label* miss) {
417 Label same_contexts;
418
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000419 DCHECK(!holder_reg.is(scratch));
420 DCHECK(!holder_reg.is(at));
421 DCHECK(!scratch.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100422
423 // Load current lexical context from the stack frame.
424 lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
425 // In debug mode, make sure the lexical context is set.
426#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100428 scratch, Operand(zero_reg));
429#endif
430
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000431 // Load the native context of the current context.
432 int offset =
433 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
Steve Block44f0eee2011-05-26 01:26:41 +0100434 lw(scratch, FieldMemOperand(scratch, offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435 lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100436
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000438 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000439 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000440 // Read the first word and compare to the native_context_map.
Steve Block44f0eee2011-05-26 01:26:41 +0100441 lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000442 LoadRoot(at, Heap::kNativeContextMapRootIndex);
443 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100444 holder_reg, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +0000445 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100446 }
447
448 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100450 Branch(&same_contexts, eq, scratch, Operand(at));
451
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 // Check the context is a native context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000453 if (emit_debug_code()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000454 push(holder_reg); // Temporarily save holder on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +0100455 mov(holder_reg, at); // Move at to its holding place.
456 LoadRoot(at, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 Check(ne, kJSGlobalProxyContextShouldNotBeNull,
Steve Block44f0eee2011-05-26 01:26:41 +0100458 holder_reg, Operand(at));
459
460 lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000461 LoadRoot(at, Heap::kNativeContextMapRootIndex);
462 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
Steve Block44f0eee2011-05-26 01:26:41 +0100463 holder_reg, Operand(at));
464 // Restore at is not needed. at is reloaded below.
Ben Murdoch257744e2011-11-30 15:57:28 +0000465 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100466 // Restore at to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100468 }
469
470 // Check that the security token in the calling global object is
471 // compatible with the security token in the receiving global
472 // object.
473 int token_offset = Context::kHeaderSize +
474 Context::SECURITY_TOKEN_INDEX * kPointerSize;
475
476 lw(scratch, FieldMemOperand(scratch, token_offset));
477 lw(at, FieldMemOperand(at, token_offset));
478 Branch(miss, ne, scratch, Operand(at));
479
480 bind(&same_contexts);
Andrei Popescu31002712010-02-23 13:46:05 +0000481}
482
483
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000484// Compute the hash code from the untagged key. This must be kept in sync with
485// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
486// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +0000487void MacroAssembler::GetNumberHash(Register reg0, Register scratch) {
488 // First of all we assign the hash seed to scratch.
489 LoadRoot(scratch, Heap::kHashSeedRootIndex);
490 SmiUntag(scratch);
491
492 // Xor original key with a seed.
493 xor_(reg0, reg0, scratch);
494
495 // Compute the hash code from the untagged key. This must be kept in sync
496 // with ComputeIntegerHash in utils.h.
497 //
498 // hash = ~hash + (hash << 15);
499 nor(scratch, reg0, zero_reg);
500 sll(at, reg0, 15);
501 addu(reg0, scratch, at);
502
503 // hash = hash ^ (hash >> 12);
504 srl(at, reg0, 12);
505 xor_(reg0, reg0, at);
506
507 // hash = hash + (hash << 2);
508 sll(at, reg0, 2);
509 addu(reg0, reg0, at);
510
511 // hash = hash ^ (hash >> 4);
512 srl(at, reg0, 4);
513 xor_(reg0, reg0, at);
514
515 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100516 sll(scratch, reg0, 11);
517 sll(at, reg0, 3);
518 addu(reg0, reg0, at);
519 addu(reg0, reg0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000520
521 // hash = hash ^ (hash >> 16);
522 srl(at, reg0, 16);
523 xor_(reg0, reg0, at);
524}
525
526
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000527void MacroAssembler::LoadFromNumberDictionary(Label* miss,
528 Register elements,
529 Register key,
530 Register result,
531 Register reg0,
532 Register reg1,
533 Register reg2) {
534 // Register use:
535 //
536 // elements - holds the slow-case elements of the receiver on entry.
537 // Unchanged unless 'result' is the same register.
538 //
539 // key - holds the smi key on entry.
540 // Unchanged unless 'result' is the same register.
541 //
542 //
543 // result - holds the result on exit if the load succeeded.
544 // Allowed to be the same as 'key' or 'result'.
545 // Unchanged on bailout so 'key' or 'result' can be used
546 // in further computation.
547 //
548 // Scratch registers:
549 //
550 // reg0 - holds the untagged key on entry and holds the hash once computed.
551 //
552 // reg1 - Used to hold the capacity mask of the dictionary.
553 //
554 // reg2 - Used for the index into the dictionary.
555 // at - Temporary (avoid MacroAssembler instructions also using 'at').
556 Label done;
557
Ben Murdochc7cc0282012-03-05 14:35:55 +0000558 GetNumberHash(reg0, reg1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000559
560 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +0000561 lw(reg1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000562 sra(reg1, reg1, kSmiTagSize);
563 Subu(reg1, reg1, Operand(1));
564
565 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000566 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000567 // Use reg2 for index calculations and keep the hash intact in reg0.
568 mov(reg2, reg0);
569 // Compute the masked index: (hash + i + i * i) & mask.
570 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +0000571 Addu(reg2, reg2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000572 }
573 and_(reg2, reg2, reg1);
574
575 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000577 sll(at, reg2, 1); // 2x.
578 addu(reg2, reg2, at); // reg2 = reg2 * 3.
579
580 // Check if the key is identical to the name.
581 sll(at, reg2, kPointerSizeLog2);
582 addu(reg2, elements, at);
583
Ben Murdochc7cc0282012-03-05 14:35:55 +0000584 lw(at, FieldMemOperand(reg2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000586 Branch(&done, eq, key, Operand(at));
587 } else {
588 Branch(miss, ne, key, Operand(at));
589 }
590 }
591
592 bind(&done);
593 // Check that the value is a normal property.
594 // reg2: elements + (index * kPointerSize).
595 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000596 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000597 lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +0000598 And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000599 Branch(miss, ne, at, Operand(zero_reg));
600
601 // Get the value at the masked, scaled index and return.
602 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000603 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000604 lw(result, FieldMemOperand(reg2, kValueOffset));
605}
606
607
Andrei Popescu31002712010-02-23 13:46:05 +0000608// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000609// Instruction macros.
Andrei Popescu31002712010-02-23 13:46:05 +0000610
Andrei Popescu31002712010-02-23 13:46:05 +0000611void MacroAssembler::Addu(Register rd, Register rs, const Operand& rt) {
612 if (rt.is_reg()) {
613 addu(rd, rs, rt.rm());
614 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100615 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000616 addiu(rd, rs, rt.imm32_);
617 } else {
618 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000620 li(at, rt);
621 addu(rd, rs, at);
622 }
623 }
624}
625
626
Steve Block44f0eee2011-05-26 01:26:41 +0100627void MacroAssembler::Subu(Register rd, Register rs, const Operand& rt) {
628 if (rt.is_reg()) {
629 subu(rd, rs, rt.rm());
630 } else {
631 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
632 addiu(rd, rs, -rt.imm32_); // No subiu instr, use addiu(x, y, -imm).
633 } else {
634 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000635 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100636 li(at, rt);
637 subu(rd, rs, at);
638 }
639 }
640}
641
642
Andrei Popescu31002712010-02-23 13:46:05 +0000643void MacroAssembler::Mul(Register rd, Register rs, const Operand& rt) {
644 if (rt.is_reg()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100646 mult(rs, rt.rm());
647 mflo(rd);
648 } else {
649 mul(rd, rs, rt.rm());
650 }
Andrei Popescu31002712010-02-23 13:46:05 +0000651 } else {
652 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000653 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000654 li(at, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100656 mult(rs, at);
657 mflo(rd);
658 } else {
659 mul(rd, rs, at);
660 }
Andrei Popescu31002712010-02-23 13:46:05 +0000661 }
662}
663
664
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000665void MacroAssembler::Mul(Register rd_hi, Register rd_lo,
666 Register rs, const Operand& rt) {
667 if (rt.is_reg()) {
668 if (!IsMipsArchVariant(kMips32r6)) {
669 mult(rs, rt.rm());
670 mflo(rd_lo);
671 mfhi(rd_hi);
672 } else {
673 if (rd_lo.is(rs)) {
674 DCHECK(!rd_hi.is(rs));
675 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
676 muh(rd_hi, rs, rt.rm());
677 mul(rd_lo, rs, rt.rm());
678 } else {
679 DCHECK(!rd_hi.is(rt.rm()) && !rd_lo.is(rt.rm()));
680 mul(rd_lo, rs, rt.rm());
681 muh(rd_hi, rs, rt.rm());
682 }
683 }
684 } else {
685 // li handles the relocation.
686 DCHECK(!rs.is(at));
687 li(at, rt);
688 if (!IsMipsArchVariant(kMips32r6)) {
689 mult(rs, at);
690 mflo(rd_lo);
691 mfhi(rd_hi);
692 } else {
693 if (rd_lo.is(rs)) {
694 DCHECK(!rd_hi.is(rs));
695 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
696 muh(rd_hi, rs, at);
697 mul(rd_lo, rs, at);
698 } else {
699 DCHECK(!rd_hi.is(at) && !rd_lo.is(at));
700 mul(rd_lo, rs, at);
701 muh(rd_hi, rs, at);
702 }
703 }
704 }
705}
706
707
708void MacroAssembler::Mulh(Register rd, Register rs, const Operand& rt) {
709 if (rt.is_reg()) {
710 if (!IsMipsArchVariant(kMips32r6)) {
711 mult(rs, rt.rm());
712 mfhi(rd);
713 } else {
714 muh(rd, rs, rt.rm());
715 }
716 } else {
717 // li handles the relocation.
718 DCHECK(!rs.is(at));
719 li(at, rt);
720 if (!IsMipsArchVariant(kMips32r6)) {
721 mult(rs, at);
722 mfhi(rd);
723 } else {
724 muh(rd, rs, at);
725 }
726 }
727}
728
729
Andrei Popescu31002712010-02-23 13:46:05 +0000730void MacroAssembler::Mult(Register rs, const Operand& rt) {
731 if (rt.is_reg()) {
732 mult(rs, rt.rm());
733 } else {
734 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000736 li(at, rt);
737 mult(rs, at);
738 }
739}
740
741
742void MacroAssembler::Multu(Register rs, const Operand& rt) {
743 if (rt.is_reg()) {
744 multu(rs, rt.rm());
745 } else {
746 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000748 li(at, rt);
749 multu(rs, at);
750 }
751}
752
753
754void MacroAssembler::Div(Register rs, const Operand& rt) {
755 if (rt.is_reg()) {
756 div(rs, rt.rm());
757 } else {
758 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000760 li(at, rt);
761 div(rs, at);
762 }
763}
764
765
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000766void MacroAssembler::Div(Register rem, Register res,
767 Register rs, const Operand& rt) {
768 if (rt.is_reg()) {
769 if (!IsMipsArchVariant(kMips32r6)) {
770 div(rs, rt.rm());
771 mflo(res);
772 mfhi(rem);
773 } else {
774 div(res, rs, rt.rm());
775 mod(rem, rs, rt.rm());
776 }
777 } else {
778 // li handles the relocation.
779 DCHECK(!rs.is(at));
780 li(at, rt);
781 if (!IsMipsArchVariant(kMips32r6)) {
782 div(rs, at);
783 mflo(res);
784 mfhi(rem);
785 } else {
786 div(res, rs, at);
787 mod(rem, rs, at);
788 }
789 }
790}
791
792
793void MacroAssembler::Mod(Register rd, Register rs, const Operand& rt) {
794 if (rt.is_reg()) {
795 if (!IsMipsArchVariant(kMips32r6)) {
796 div(rs, rt.rm());
797 mfhi(rd);
798 } else {
799 mod(rd, rs, rt.rm());
800 }
801 } else {
802 // li handles the relocation.
803 DCHECK(!rs.is(at));
804 li(at, rt);
805 if (!IsMipsArchVariant(kMips32r6)) {
806 div(rs, at);
807 mfhi(rd);
808 } else {
809 mod(rd, rs, at);
810 }
811 }
812}
813
814
Andrei Popescu31002712010-02-23 13:46:05 +0000815void MacroAssembler::Divu(Register rs, const Operand& rt) {
816 if (rt.is_reg()) {
817 divu(rs, rt.rm());
818 } else {
819 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000820 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000821 li(at, rt);
822 divu(rs, at);
823 }
824}
825
826
827void MacroAssembler::And(Register rd, Register rs, const Operand& rt) {
828 if (rt.is_reg()) {
829 and_(rd, rs, rt.rm());
830 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100831 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000832 andi(rd, rs, rt.imm32_);
833 } else {
834 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000835 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000836 li(at, rt);
837 and_(rd, rs, at);
838 }
839 }
840}
841
842
843void MacroAssembler::Or(Register rd, Register rs, const Operand& rt) {
844 if (rt.is_reg()) {
845 or_(rd, rs, rt.rm());
846 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100847 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000848 ori(rd, rs, rt.imm32_);
849 } else {
850 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000852 li(at, rt);
853 or_(rd, rs, at);
854 }
855 }
856}
857
858
859void MacroAssembler::Xor(Register rd, Register rs, const Operand& rt) {
860 if (rt.is_reg()) {
861 xor_(rd, rs, rt.rm());
862 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100863 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000864 xori(rd, rs, rt.imm32_);
865 } else {
866 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000867 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000868 li(at, rt);
869 xor_(rd, rs, at);
870 }
871 }
872}
873
874
875void MacroAssembler::Nor(Register rd, Register rs, const Operand& rt) {
876 if (rt.is_reg()) {
877 nor(rd, rs, rt.rm());
878 } else {
879 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000881 li(at, rt);
882 nor(rd, rs, at);
883 }
884}
885
886
Ben Murdoch257744e2011-11-30 15:57:28 +0000887void MacroAssembler::Neg(Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000888 DCHECK(rt.is_reg());
889 DCHECK(!at.is(rs));
890 DCHECK(!at.is(rt.rm()));
Ben Murdoch257744e2011-11-30 15:57:28 +0000891 li(at, -1);
892 xor_(rs, rt.rm(), at);
893}
894
895
Andrei Popescu31002712010-02-23 13:46:05 +0000896void MacroAssembler::Slt(Register rd, Register rs, const Operand& rt) {
897 if (rt.is_reg()) {
898 slt(rd, rs, rt.rm());
899 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100900 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000901 slti(rd, rs, rt.imm32_);
902 } else {
903 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000904 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000905 li(at, rt);
906 slt(rd, rs, at);
907 }
908 }
909}
910
911
912void MacroAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
913 if (rt.is_reg()) {
914 sltu(rd, rs, rt.rm());
915 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100916 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000917 sltiu(rd, rs, rt.imm32_);
918 } else {
919 // li handles the relocation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 DCHECK(!rs.is(at));
Andrei Popescu31002712010-02-23 13:46:05 +0000921 li(at, rt);
922 sltu(rd, rs, at);
923 }
924 }
925}
926
927
Steve Block44f0eee2011-05-26 01:26:41 +0100928void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100930 if (rt.is_reg()) {
931 rotrv(rd, rs, rt.rm());
932 } else {
933 rotr(rd, rs, rt.imm32_);
934 }
935 } else {
936 if (rt.is_reg()) {
937 subu(at, zero_reg, rt.rm());
938 sllv(at, rs, at);
939 srlv(rd, rs, rt.rm());
940 or_(rd, rd, at);
941 } else {
942 if (rt.imm32_ == 0) {
943 srl(rd, rs, 0);
944 } else {
945 srl(at, rs, rt.imm32_);
946 sll(rd, rs, (0x20 - rt.imm32_) & 0x1f);
947 or_(rd, rd, at);
948 }
949 }
950 }
Andrei Popescu31002712010-02-23 13:46:05 +0000951}
952
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953
954void MacroAssembler::Pref(int32_t hint, const MemOperand& rs) {
955 if (IsMipsArchVariant(kLoongson)) {
956 lw(zero_reg, rs);
957 } else {
958 pref(hint, rs);
959 }
960}
961
962
963// ------------Pseudo-instructions-------------
964
965void MacroAssembler::Ulw(Register rd, const MemOperand& rs) {
966 lwr(rd, rs);
967 lwl(rd, MemOperand(rs.rm(), rs.offset() + 3));
968}
969
970
971void MacroAssembler::Usw(Register rd, const MemOperand& rs) {
972 swr(rd, rs);
973 swl(rd, MemOperand(rs.rm(), rs.offset() + 3));
974}
975
976
977void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) {
978 AllowDeferredHandleDereference smi_check;
979 if (value->IsSmi()) {
980 li(dst, Operand(value), mode);
981 } else {
982 DCHECK(value->IsHeapObject());
983 if (isolate()->heap()->InNewSpace(*value)) {
984 Handle<Cell> cell = isolate()->factory()->NewCell(value);
985 li(dst, Operand(cell));
986 lw(dst, FieldMemOperand(dst, Cell::kValueOffset));
987 } else {
988 li(dst, Operand(value));
989 }
990 }
991}
992
Steve Block44f0eee2011-05-26 01:26:41 +0100993
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100994void MacroAssembler::li(Register rd, Operand j, LiFlags mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 DCHECK(!j.is_reg());
Steve Block44f0eee2011-05-26 01:26:41 +0100996 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100997 if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) {
Andrei Popescu31002712010-02-23 13:46:05 +0000998 // Normal load of an immediate value which does not need Relocation Info.
999 if (is_int16(j.imm32_)) {
1000 addiu(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001001 } else if (!(j.imm32_ & kHiMask)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001002 ori(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +01001003 } else if (!(j.imm32_ & kImm16Mask)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001004 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +00001005 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001006 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Steve Block44f0eee2011-05-26 01:26:41 +01001007 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001008 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001009 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001010 if (MustUseReg(j.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +00001011 RecordRelocInfo(j.rmode_, j.imm32_);
1012 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001013 // We always need the same number of instructions as we may need to patch
Andrei Popescu31002712010-02-23 13:46:05 +00001014 // this code to load another value which may need 2 instructions to load.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001015 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
Ben Murdoch257744e2011-11-30 15:57:28 +00001016 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +00001017 }
1018}
1019
1020
Andrei Popescu31002712010-02-23 13:46:05 +00001021void MacroAssembler::MultiPush(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001022 int16_t num_to_push = NumberOfBitsSet(regs);
1023 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001024
Ben Murdoch589d6972011-11-30 16:04:58 +00001025 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001026 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001027 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001028 stack_offset -= kPointerSize;
1029 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001030 }
1031 }
1032}
1033
1034
1035void MacroAssembler::MultiPushReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001036 int16_t num_to_push = NumberOfBitsSet(regs);
1037 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001038
Ben Murdoch589d6972011-11-30 16:04:58 +00001039 Subu(sp, sp, Operand(stack_offset));
Steve Block6ded16b2010-05-10 14:33:55 +01001040 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001041 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001042 stack_offset -= kPointerSize;
1043 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +00001044 }
1045 }
1046}
1047
1048
1049void MacroAssembler::MultiPop(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001050 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001051
Steve Block6ded16b2010-05-10 14:33:55 +01001052 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +00001053 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001054 lw(ToRegister(i), MemOperand(sp, stack_offset));
1055 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001056 }
1057 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001058 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001059}
1060
1061
1062void MacroAssembler::MultiPopReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001063 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +00001064
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001065 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +00001066 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001067 lw(ToRegister(i), MemOperand(sp, stack_offset));
1068 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +00001069 }
1070 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001071 addiu(sp, sp, stack_offset);
1072}
1073
1074
1075void MacroAssembler::MultiPushFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001076 int16_t num_to_push = NumberOfBitsSet(regs);
1077 int16_t stack_offset = num_to_push * kDoubleSize;
1078
1079 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001080 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001081 if ((regs & (1 << i)) != 0) {
1082 stack_offset -= kDoubleSize;
1083 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1084 }
1085 }
1086}
1087
1088
1089void MacroAssembler::MultiPushReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001090 int16_t num_to_push = NumberOfBitsSet(regs);
1091 int16_t stack_offset = num_to_push * kDoubleSize;
1092
1093 Subu(sp, sp, Operand(stack_offset));
1094 for (int16_t i = 0; i < kNumRegisters; i++) {
1095 if ((regs & (1 << i)) != 0) {
1096 stack_offset -= kDoubleSize;
1097 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1098 }
1099 }
1100}
1101
1102
1103void MacroAssembler::MultiPopFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001104 int16_t stack_offset = 0;
1105
1106 for (int16_t i = 0; i < kNumRegisters; i++) {
1107 if ((regs & (1 << i)) != 0) {
1108 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1109 stack_offset += kDoubleSize;
1110 }
1111 }
1112 addiu(sp, sp, stack_offset);
1113}
1114
1115
1116void MacroAssembler::MultiPopReversedFPU(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001117 int16_t stack_offset = 0;
1118
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001119 for (int16_t i = kNumRegisters - 1; i >= 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001120 if ((regs & (1 << i)) != 0) {
1121 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
1122 stack_offset += kDoubleSize;
1123 }
1124 }
1125 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001126}
1127
1128
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001129void MacroAssembler::FlushICache(Register address, unsigned instructions) {
1130 RegList saved_regs = kJSCallerSaved | ra.bit();
1131 MultiPush(saved_regs);
1132 AllowExternalCallThatCantCauseGC scope(this);
1133
1134 // Save to a0 in case address == t0.
1135 Move(a0, address);
1136 PrepareCallCFunction(2, t0);
1137
1138 li(a1, instructions * kInstrSize);
1139 CallCFunction(ExternalReference::flush_icache_function(isolate()), 2);
1140 MultiPop(saved_regs);
1141}
1142
1143
Steve Block44f0eee2011-05-26 01:26:41 +01001144void MacroAssembler::Ext(Register rt,
1145 Register rs,
1146 uint16_t pos,
1147 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 DCHECK(pos < 32);
1149 DCHECK(pos + size < 33);
Andrei Popescu31002712010-02-23 13:46:05 +00001150
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001152 ext_(rt, rs, pos, size);
1153 } else {
1154 // Move rs to rt and shift it left then right to get the
1155 // desired bitfield on the right side and zeroes on the left.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001156 int shift_left = 32 - (pos + size);
1157 sll(rt, rs, shift_left); // Acts as a move if shift_left == 0.
1158
1159 int shift_right = 32 - size;
1160 if (shift_right > 0) {
1161 srl(rt, rt, shift_right);
1162 }
Steve Block44f0eee2011-05-26 01:26:41 +01001163 }
1164}
1165
1166
1167void MacroAssembler::Ins(Register rt,
1168 Register rs,
1169 uint16_t pos,
1170 uint16_t size) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001171 DCHECK(pos < 32);
1172 DCHECK(pos + size <= 32);
1173 DCHECK(size != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01001174
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001175 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001176 ins_(rt, rs, pos, size);
1177 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178 DCHECK(!rt.is(t8) && !rs.is(t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001179 Subu(at, zero_reg, Operand(1));
1180 srl(at, at, 32 - size);
1181 and_(t8, rs, at);
1182 sll(t8, t8, pos);
1183 sll(at, at, pos);
1184 nor(at, at, zero_reg);
1185 and_(at, rt, at);
1186 or_(rt, t8, at);
Steve Block44f0eee2011-05-26 01:26:41 +01001187 }
1188}
1189
1190
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001191void MacroAssembler::Cvt_d_uw(FPURegister fd,
1192 FPURegister fs,
1193 FPURegister scratch) {
1194 // Move the data from fs to t8.
1195 mfc1(t8, fs);
1196 Cvt_d_uw(fd, t8, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001197}
1198
1199
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001200void MacroAssembler::Cvt_d_uw(FPURegister fd,
1201 Register rs,
1202 FPURegister scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01001203 // Convert rs to a FP value in fd (and fd + 1).
1204 // We do this by converting rs minus the MSB to avoid sign conversion,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001205 // then adding 2^31 to the result (if needed).
Steve Block44f0eee2011-05-26 01:26:41 +01001206
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001207 DCHECK(!fd.is(scratch));
1208 DCHECK(!rs.is(t9));
1209 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01001210
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001211 // Save rs's MSB to t9.
1212 Ext(t9, rs, 31, 1);
Steve Block44f0eee2011-05-26 01:26:41 +01001213 // Remove rs's MSB.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001214 Ext(at, rs, 0, 31);
1215 // Move the result to fd.
1216 mtc1(at, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001217
1218 // Convert fd to a real FP value.
1219 cvt_d_w(fd, fd);
1220
1221 Label conversion_done;
1222
1223 // If rs's MSB was 0, it's done.
1224 // Otherwise we need to add that to the FP register.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001225 Branch(&conversion_done, eq, t9, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01001226
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001227 // Load 2^31 into f20 as its float representation.
1228 li(at, 0x41E00000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001229 mtc1(zero_reg, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230 Mthc1(at, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001231 // Add it to fd.
1232 add_d(fd, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001233
Steve Block44f0eee2011-05-26 01:26:41 +01001234 bind(&conversion_done);
1235}
1236
1237
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001238void MacroAssembler::Trunc_uw_d(FPURegister fd,
1239 FPURegister fs,
1240 FPURegister scratch) {
1241 Trunc_uw_d(fs, t8, scratch);
1242 mtc1(t8, fd);
Steve Block44f0eee2011-05-26 01:26:41 +01001243}
1244
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001245
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001246void MacroAssembler::Trunc_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001247 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1248 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001249 trunc_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001250 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001251 } else {
1252 trunc_w_d(fd, fs);
1253 }
1254}
1255
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001256
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001257void MacroAssembler::Round_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1259 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001260 round_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001261 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001262 } else {
1263 round_w_d(fd, fs);
1264 }
1265}
1266
1267
1268void MacroAssembler::Floor_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1270 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001271 floor_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001272 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001273 } else {
1274 floor_w_d(fd, fs);
1275 }
1276}
1277
1278
1279void MacroAssembler::Ceil_w_d(FPURegister fd, FPURegister fs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001280 if (IsMipsArchVariant(kLoongson) && fd.is(fs)) {
1281 Mfhc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001282 ceil_w_d(fd, fs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001283 Mthc1(t8, fs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001284 } else {
1285 ceil_w_d(fd, fs);
1286 }
1287}
1288
Steve Block44f0eee2011-05-26 01:26:41 +01001289
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001290void MacroAssembler::Trunc_uw_d(FPURegister fd,
1291 Register rs,
1292 FPURegister scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 DCHECK(!fd.is(scratch));
1294 DCHECK(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01001295
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001296 // Load 2^31 into scratch as its float representation.
1297 li(at, 0x41E00000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001298 mtc1(zero_reg, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001299 Mthc1(at, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001300 // Test if scratch > fd.
Ben Murdoch85b71792012-04-11 18:30:58 +01001301 // If fd < 2^31 we can convert it normally.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001302 Label simple_convert;
1303 BranchF(&simple_convert, NULL, lt, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001304
1305 // First we subtract 2^31 from fd, then trunc it to rs
1306 // and add 2^31 to rs.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001307 sub_d(scratch, fd, scratch);
1308 trunc_w_d(scratch, scratch);
1309 mfc1(rs, scratch);
1310 Or(rs, rs, 1 << 31);
Steve Block44f0eee2011-05-26 01:26:41 +01001311
1312 Label done;
1313 Branch(&done);
1314 // Simple conversion.
1315 bind(&simple_convert);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001316 trunc_w_d(scratch, fd);
1317 mfc1(rs, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +01001318
1319 bind(&done);
1320}
1321
1322
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001323void MacroAssembler::Mthc1(Register rt, FPURegister fs) {
1324 if (IsFp64Mode()) {
1325 mthc1(rt, fs);
1326 } else {
1327 mtc1(rt, fs.high());
1328 }
1329}
1330
1331
1332void MacroAssembler::Mfhc1(Register rt, FPURegister fs) {
1333 if (IsFp64Mode()) {
1334 mfhc1(rt, fs);
1335 } else {
1336 mfc1(rt, fs.high());
1337 }
1338}
1339
1340
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001341void MacroAssembler::BranchF(Label* target,
1342 Label* nan,
1343 Condition cc,
1344 FPURegister cmp1,
1345 FPURegister cmp2,
1346 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001347 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001348 if (cc == al) {
1349 Branch(bd, target);
1350 return;
1351 }
1352
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001353 DCHECK(nan || target);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001354 // Check for unordered (NaN) cases.
1355 if (nan) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001356 if (!IsMipsArchVariant(kMips32r6)) {
1357 c(UN, D, cmp1, cmp2);
1358 bc1t(nan);
1359 } else {
1360 // Use kDoubleCompareReg for comparison result. It has to be unavailable
1361 // to lithium register allocator.
1362 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
1363 cmp(UN, L, kDoubleCompareReg, cmp1, cmp2);
1364 bc1nez(nan, kDoubleCompareReg);
1365 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001366 }
1367
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001368 if (!IsMipsArchVariant(kMips32r6)) {
1369 if (target) {
1370 // Here NaN cases were either handled by this function or are assumed to
1371 // have been handled by the caller.
1372 switch (cc) {
1373 case lt:
1374 c(OLT, D, cmp1, cmp2);
1375 bc1t(target);
1376 break;
1377 case gt:
1378 c(ULE, D, cmp1, cmp2);
1379 bc1f(target);
1380 break;
1381 case ge:
1382 c(ULT, D, cmp1, cmp2);
1383 bc1f(target);
1384 break;
1385 case le:
1386 c(OLE, D, cmp1, cmp2);
1387 bc1t(target);
1388 break;
1389 case eq:
1390 c(EQ, D, cmp1, cmp2);
1391 bc1t(target);
1392 break;
1393 case ueq:
1394 c(UEQ, D, cmp1, cmp2);
1395 bc1t(target);
1396 break;
1397 case ne:
1398 c(EQ, D, cmp1, cmp2);
1399 bc1f(target);
1400 break;
1401 case nue:
1402 c(UEQ, D, cmp1, cmp2);
1403 bc1f(target);
1404 break;
1405 default:
1406 CHECK(0);
1407 }
1408 }
1409 } else {
1410 if (target) {
1411 // Here NaN cases were either handled by this function or are assumed to
1412 // have been handled by the caller.
1413 // Unsigned conditions are treated as their signed counterpart.
1414 // Use kDoubleCompareReg for comparison result, it is
1415 // valid in fp64 (FR = 1) mode which is implied for mips32r6.
1416 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
1417 switch (cc) {
1418 case lt:
1419 cmp(OLT, L, kDoubleCompareReg, cmp1, cmp2);
1420 bc1nez(target, kDoubleCompareReg);
1421 break;
1422 case gt:
1423 cmp(ULE, L, kDoubleCompareReg, cmp1, cmp2);
1424 bc1eqz(target, kDoubleCompareReg);
1425 break;
1426 case ge:
1427 cmp(ULT, L, kDoubleCompareReg, cmp1, cmp2);
1428 bc1eqz(target, kDoubleCompareReg);
1429 break;
1430 case le:
1431 cmp(OLE, L, kDoubleCompareReg, cmp1, cmp2);
1432 bc1nez(target, kDoubleCompareReg);
1433 break;
1434 case eq:
1435 cmp(EQ, L, kDoubleCompareReg, cmp1, cmp2);
1436 bc1nez(target, kDoubleCompareReg);
1437 break;
1438 case ueq:
1439 cmp(UEQ, L, kDoubleCompareReg, cmp1, cmp2);
1440 bc1nez(target, kDoubleCompareReg);
1441 break;
1442 case ne:
1443 cmp(EQ, L, kDoubleCompareReg, cmp1, cmp2);
1444 bc1eqz(target, kDoubleCompareReg);
1445 break;
1446 case nue:
1447 cmp(UEQ, L, kDoubleCompareReg, cmp1, cmp2);
1448 bc1eqz(target, kDoubleCompareReg);
1449 break;
1450 default:
1451 CHECK(0);
1452 }
1453 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001454 }
1455
1456 if (bd == PROTECT) {
1457 nop();
1458 }
1459}
1460
1461
1462void MacroAssembler::Move(FPURegister dst, double imm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001463 static const DoubleRepresentation minus_zero(-0.0);
1464 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001465 DoubleRepresentation value_rep(imm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001466 // Handle special values first.
1467 bool force_load = dst.is(kDoubleRegZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 if (value_rep == zero && !force_load) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001469 mov_d(dst, kDoubleRegZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001470 } else if (value_rep == minus_zero && !force_load) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001471 neg_d(dst, kDoubleRegZero);
1472 } else {
1473 uint32_t lo, hi;
1474 DoubleAsTwoUInt32(imm, &lo, &hi);
1475 // Move the low part of the double into the lower of the corresponding FPU
1476 // register of FPU register pair.
1477 if (lo != 0) {
1478 li(at, Operand(lo));
1479 mtc1(at, dst);
1480 } else {
1481 mtc1(zero_reg, dst);
1482 }
1483 // Move the high part of the double into the higher of the corresponding FPU
1484 // register of FPU register pair.
1485 if (hi != 0) {
1486 li(at, Operand(hi));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001487 Mthc1(at, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001488 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001489 Mthc1(zero_reg, dst);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001490 }
1491 }
1492}
1493
1494
1495void MacroAssembler::Movz(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001496 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001497 Label done;
1498 Branch(&done, ne, rt, Operand(zero_reg));
1499 mov(rd, rs);
1500 bind(&done);
1501 } else {
1502 movz(rd, rs, rt);
1503 }
1504}
1505
1506
1507void MacroAssembler::Movn(Register rd, Register rs, Register rt) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001508 if (IsMipsArchVariant(kLoongson) || IsMipsArchVariant(kMips32r6)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001509 Label done;
1510 Branch(&done, eq, rt, Operand(zero_reg));
1511 mov(rd, rs);
1512 bind(&done);
1513 } else {
1514 movn(rd, rs, rt);
1515 }
1516}
1517
1518
1519void MacroAssembler::Movt(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001520 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001521 // Tests an FP condition code and then conditionally move rs to rd.
1522 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 DCHECK(cc == 0);
1524 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001525 Label done;
1526 Register scratch = t8;
1527 // For testing purposes we need to fetch content of the FCSR register and
1528 // than test its cc (floating point condition code) bit (for cc = 0, it is
1529 // 24. bit of the FCSR).
1530 cfc1(scratch, FCSR);
1531 // For the MIPS I, II and III architectures, the contents of scratch is
1532 // UNPREDICTABLE for the instruction immediately following CFC1.
1533 nop();
1534 srl(scratch, scratch, 16);
1535 andi(scratch, scratch, 0x0080);
1536 Branch(&done, eq, scratch, Operand(zero_reg));
1537 mov(rd, rs);
1538 bind(&done);
1539 } else {
1540 movt(rd, rs, cc);
1541 }
1542}
1543
1544
1545void MacroAssembler::Movf(Register rd, Register rs, uint16_t cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001546 if (IsMipsArchVariant(kLoongson)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001547 // Tests an FP condition code and then conditionally move rs to rd.
1548 // We do not currently use any FPU cc bit other than bit 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001549 DCHECK(cc == 0);
1550 DCHECK(!(rs.is(t8) || rd.is(t8)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001551 Label done;
1552 Register scratch = t8;
1553 // For testing purposes we need to fetch content of the FCSR register and
1554 // than test its cc (floating point condition code) bit (for cc = 0, it is
1555 // 24. bit of the FCSR).
1556 cfc1(scratch, FCSR);
1557 // For the MIPS I, II and III architectures, the contents of scratch is
1558 // UNPREDICTABLE for the instruction immediately following CFC1.
1559 nop();
1560 srl(scratch, scratch, 16);
1561 andi(scratch, scratch, 0x0080);
1562 Branch(&done, ne, scratch, Operand(zero_reg));
1563 mov(rd, rs);
1564 bind(&done);
1565 } else {
1566 movf(rd, rs, cc);
1567 }
1568}
1569
1570
1571void MacroAssembler::Clz(Register rd, Register rs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001572 if (IsMipsArchVariant(kLoongson)) {
1573 DCHECK(!(rd.is(t8) || rd.is(t9)) && !(rs.is(t8) || rs.is(t9)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001574 Register mask = t8;
1575 Register scratch = t9;
1576 Label loop, end;
1577 mov(at, rs);
1578 mov(rd, zero_reg);
1579 lui(mask, 0x8000);
1580 bind(&loop);
1581 and_(scratch, at, mask);
1582 Branch(&end, ne, scratch, Operand(zero_reg));
1583 addiu(rd, rd, 1);
1584 Branch(&loop, ne, mask, Operand(zero_reg), USE_DELAY_SLOT);
1585 srl(mask, mask, 1);
1586 bind(&end);
1587 } else {
1588 clz(rd, rs);
1589 }
1590}
1591
1592
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001593void MacroAssembler::EmitFPUTruncate(FPURoundingMode rounding_mode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001594 Register result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001595 DoubleRegister double_input,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001596 Register scratch,
1597 DoubleRegister double_scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001598 Register except_flag,
1599 CheckForInexactConversion check_inexact) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001600 DCHECK(!result.is(scratch));
1601 DCHECK(!double_input.is(double_scratch));
1602 DCHECK(!except_flag.is(scratch));
1603
1604 Label done;
1605
1606 // Clear the except flag (0 = no exception)
1607 mov(except_flag, zero_reg);
1608
1609 // Test for values that can be exactly represented as a signed 32-bit integer.
1610 cvt_w_d(double_scratch, double_input);
1611 mfc1(result, double_scratch);
1612 cvt_d_w(double_scratch, double_scratch);
1613 BranchF(&done, NULL, eq, double_input, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001614
1615 int32_t except_mask = kFCSRFlagMask; // Assume interested in all exceptions.
1616
1617 if (check_inexact == kDontCheckForInexactConversion) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 // Ignore inexact exceptions.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001619 except_mask &= ~kFCSRInexactFlagMask;
1620 }
1621
1622 // Save FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001623 cfc1(scratch, FCSR);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001624 // Disable FPU exceptions.
1625 ctc1(zero_reg, FCSR);
1626
1627 // Do operation based on rounding mode.
1628 switch (rounding_mode) {
1629 case kRoundToNearest:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001630 Round_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001631 break;
1632 case kRoundToZero:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001633 Trunc_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001634 break;
1635 case kRoundToPlusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001636 Ceil_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001637 break;
1638 case kRoundToMinusInf:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001639 Floor_w_d(double_scratch, double_input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001640 break;
1641 } // End of switch-statement.
1642
1643 // Retrieve FCSR.
1644 cfc1(except_flag, FCSR);
1645 // Restore FCSR.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001646 ctc1(scratch, FCSR);
1647 // Move the converted value into the result register.
1648 mfc1(result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001649
1650 // Check for fpu exceptions.
1651 And(except_flag, except_flag, Operand(except_mask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001652
Ben Murdoch257744e2011-11-30 15:57:28 +00001653 bind(&done);
1654}
1655
1656
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001657void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
1658 DoubleRegister double_input,
1659 Label* done) {
1660 DoubleRegister single_scratch = kLithiumScratchDouble.low();
1661 Register scratch = at;
1662 Register scratch2 = t9;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001663
1664 // Clear cumulative exception flags and save the FCSR.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001665 cfc1(scratch2, FCSR);
1666 ctc1(zero_reg, FCSR);
1667 // Try a conversion to a signed integer.
1668 trunc_w_d(single_scratch, double_input);
1669 mfc1(result, single_scratch);
1670 // Retrieve and restore the FCSR.
1671 cfc1(scratch, FCSR);
1672 ctc1(scratch2, FCSR);
1673 // Check for overflow and NaNs.
1674 And(scratch,
1675 scratch,
1676 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
1677 // If we had no exceptions we are done.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001678 Branch(done, eq, scratch, Operand(zero_reg));
1679}
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001680
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001681
1682void MacroAssembler::TruncateDoubleToI(Register result,
1683 DoubleRegister double_input) {
1684 Label done;
1685
1686 TryInlineTruncateDoubleToI(result, double_input, &done);
1687
1688 // If we fell through then inline version didn't succeed - call stub instead.
1689 push(ra);
1690 Subu(sp, sp, Operand(kDoubleSize)); // Put input on stack.
1691 sdc1(double_input, MemOperand(sp, 0));
1692
1693 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
1694 CallStub(&stub);
1695
1696 Addu(sp, sp, Operand(kDoubleSize));
1697 pop(ra);
1698
1699 bind(&done);
1700}
1701
1702
1703void MacroAssembler::TruncateHeapNumberToI(Register result, Register object) {
1704 Label done;
1705 DoubleRegister double_scratch = f12;
1706 DCHECK(!result.is(object));
1707
1708 ldc1(double_scratch,
1709 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
1710 TryInlineTruncateDoubleToI(result, double_scratch, &done);
1711
1712 // If we fell through then inline version didn't succeed - call stub instead.
1713 push(ra);
1714 DoubleToIStub stub(isolate(),
1715 object,
1716 result,
1717 HeapNumber::kValueOffset - kHeapObjectTag,
1718 true,
1719 true);
1720 CallStub(&stub);
1721 pop(ra);
1722
1723 bind(&done);
1724}
1725
1726
1727void MacroAssembler::TruncateNumberToI(Register object,
1728 Register result,
1729 Register heap_number_map,
1730 Register scratch,
1731 Label* not_number) {
1732 Label done;
1733 DCHECK(!result.is(object));
1734
1735 UntagAndJumpIfSmi(result, object, &done);
1736 JumpIfNotHeapNumber(object, heap_number_map, scratch, not_number);
1737 TruncateHeapNumberToI(result, object);
1738
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001739 bind(&done);
1740}
1741
1742
Ben Murdoch257744e2011-11-30 15:57:28 +00001743void MacroAssembler::GetLeastBitsFromSmi(Register dst,
1744 Register src,
1745 int num_least_bits) {
1746 Ext(dst, src, kSmiTagSize, num_least_bits);
1747}
1748
1749
1750void MacroAssembler::GetLeastBitsFromInt32(Register dst,
1751 Register src,
1752 int num_least_bits) {
1753 And(dst, src, Operand((1 << num_least_bits) - 1));
1754}
1755
1756
Steve Block44f0eee2011-05-26 01:26:41 +01001757// Emulated condtional branches do not emit a nop in the branch delay slot.
1758//
1759// BRANCH_ARGS_CHECK checks that conditional jump arguments are correct.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760#define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \
Steve Block44f0eee2011-05-26 01:26:41 +01001761 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
1762 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
1763
1764
1765void MacroAssembler::Branch(int16_t offset, BranchDelaySlot bdslot) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001766 BranchShort(offset, bdslot);
1767}
1768
1769
1770void MacroAssembler::Branch(int16_t offset, Condition cond, Register rs,
1771 const Operand& rt,
1772 BranchDelaySlot bdslot) {
1773 BranchShort(offset, cond, rs, rt, bdslot);
1774}
1775
1776
1777void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001778 if (L->is_bound()) {
1779 if (is_near(L)) {
1780 BranchShort(L, bdslot);
1781 } else {
1782 Jr(L, bdslot);
1783 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001784 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001785 if (is_trampoline_emitted()) {
1786 Jr(L, bdslot);
1787 } else {
1788 BranchShort(L, bdslot);
1789 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001790 }
1791}
1792
1793
1794void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
1795 const Operand& rt,
1796 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001797 if (L->is_bound()) {
1798 if (is_near(L)) {
1799 BranchShort(L, cond, rs, rt, bdslot);
1800 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001801 if (cond != cc_always) {
1802 Label skip;
1803 Condition neg_cond = NegateCondition(cond);
1804 BranchShort(&skip, neg_cond, rs, rt);
1805 Jr(L, bdslot);
1806 bind(&skip);
1807 } else {
1808 Jr(L, bdslot);
1809 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001810 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001811 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001812 if (is_trampoline_emitted()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001813 if (cond != cc_always) {
1814 Label skip;
1815 Condition neg_cond = NegateCondition(cond);
1816 BranchShort(&skip, neg_cond, rs, rt);
1817 Jr(L, bdslot);
1818 bind(&skip);
1819 } else {
1820 Jr(L, bdslot);
1821 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001822 } else {
1823 BranchShort(L, cond, rs, rt, bdslot);
1824 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001825 }
1826}
1827
1828
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001829void MacroAssembler::Branch(Label* L,
1830 Condition cond,
1831 Register rs,
1832 Heap::RootListIndex index,
1833 BranchDelaySlot bdslot) {
1834 LoadRoot(at, index);
1835 Branch(L, cond, rs, Operand(at), bdslot);
1836}
1837
1838
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001839void MacroAssembler::BranchShort(int16_t offset, BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001840 b(offset);
1841
1842 // Emit a nop in the branch delay slot if required.
1843 if (bdslot == PROTECT)
1844 nop();
1845}
1846
1847
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001848void MacroAssembler::BranchShort(int16_t offset, Condition cond, Register rs,
1849 const Operand& rt,
1850 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001851 BRANCH_ARGS_CHECK(cond, rs, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 DCHECK(!rs.is(zero_reg));
Steve Block6ded16b2010-05-10 14:33:55 +01001853 Register r2 = no_reg;
Steve Block44f0eee2011-05-26 01:26:41 +01001854 Register scratch = at;
1855
Andrei Popescu31002712010-02-23 13:46:05 +00001856 if (rt.is_reg()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001857 // NOTE: 'at' can be clobbered by Branch but it is legal to use it as rs or
1858 // rt.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001859 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001860 r2 = rt.rm_;
Steve Block44f0eee2011-05-26 01:26:41 +01001861 switch (cond) {
1862 case cc_always:
1863 b(offset);
1864 break;
1865 case eq:
1866 beq(rs, r2, offset);
1867 break;
1868 case ne:
1869 bne(rs, r2, offset);
1870 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001871 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001872 case greater:
1873 if (r2.is(zero_reg)) {
1874 bgtz(rs, offset);
1875 } else {
1876 slt(scratch, r2, rs);
1877 bne(scratch, zero_reg, offset);
1878 }
1879 break;
1880 case greater_equal:
1881 if (r2.is(zero_reg)) {
1882 bgez(rs, offset);
1883 } else {
1884 slt(scratch, rs, r2);
1885 beq(scratch, zero_reg, offset);
1886 }
1887 break;
1888 case less:
1889 if (r2.is(zero_reg)) {
1890 bltz(rs, offset);
1891 } else {
1892 slt(scratch, rs, r2);
1893 bne(scratch, zero_reg, offset);
1894 }
1895 break;
1896 case less_equal:
1897 if (r2.is(zero_reg)) {
1898 blez(rs, offset);
1899 } else {
1900 slt(scratch, r2, rs);
1901 beq(scratch, zero_reg, offset);
1902 }
1903 break;
Andrei Popescu31002712010-02-23 13:46:05 +00001904 // Unsigned comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001905 case Ugreater:
1906 if (r2.is(zero_reg)) {
1907 bgtz(rs, offset);
1908 } else {
1909 sltu(scratch, r2, rs);
1910 bne(scratch, zero_reg, offset);
1911 }
1912 break;
1913 case Ugreater_equal:
1914 if (r2.is(zero_reg)) {
1915 bgez(rs, offset);
1916 } else {
1917 sltu(scratch, rs, r2);
1918 beq(scratch, zero_reg, offset);
1919 }
1920 break;
1921 case Uless:
1922 if (r2.is(zero_reg)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001923 // No code needs to be emitted.
1924 return;
Steve Block44f0eee2011-05-26 01:26:41 +01001925 } else {
1926 sltu(scratch, rs, r2);
1927 bne(scratch, zero_reg, offset);
1928 }
1929 break;
1930 case Uless_equal:
1931 if (r2.is(zero_reg)) {
1932 b(offset);
1933 } else {
1934 sltu(scratch, r2, rs);
1935 beq(scratch, zero_reg, offset);
1936 }
1937 break;
1938 default:
1939 UNREACHABLE();
1940 }
1941 } else {
1942 // Be careful to always use shifted_branch_offset only just before the
1943 // branch instruction, as the location will be remember for patching the
1944 // target.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001945 BlockTrampolinePoolScope block_trampoline_pool(this);
Steve Block44f0eee2011-05-26 01:26:41 +01001946 switch (cond) {
1947 case cc_always:
1948 b(offset);
1949 break;
1950 case eq:
1951 // We don't want any other register but scratch clobbered.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001952 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001953 r2 = scratch;
1954 li(r2, rt);
1955 beq(rs, r2, offset);
1956 break;
1957 case ne:
1958 // We don't want any other register but scratch clobbered.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001960 r2 = scratch;
1961 li(r2, rt);
1962 bne(rs, r2, offset);
1963 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001964 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001965 case greater:
1966 if (rt.imm32_ == 0) {
1967 bgtz(rs, offset);
1968 } else {
1969 r2 = scratch;
1970 li(r2, rt);
1971 slt(scratch, r2, rs);
1972 bne(scratch, zero_reg, offset);
1973 }
1974 break;
1975 case greater_equal:
1976 if (rt.imm32_ == 0) {
1977 bgez(rs, offset);
1978 } else if (is_int16(rt.imm32_)) {
1979 slti(scratch, rs, rt.imm32_);
1980 beq(scratch, zero_reg, offset);
1981 } else {
1982 r2 = scratch;
1983 li(r2, rt);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001984 slt(scratch, rs, r2);
Steve Block44f0eee2011-05-26 01:26:41 +01001985 beq(scratch, zero_reg, offset);
1986 }
1987 break;
1988 case less:
1989 if (rt.imm32_ == 0) {
1990 bltz(rs, offset);
1991 } else if (is_int16(rt.imm32_)) {
1992 slti(scratch, rs, rt.imm32_);
1993 bne(scratch, zero_reg, offset);
1994 } else {
1995 r2 = scratch;
1996 li(r2, rt);
1997 slt(scratch, rs, r2);
1998 bne(scratch, zero_reg, offset);
1999 }
2000 break;
2001 case less_equal:
2002 if (rt.imm32_ == 0) {
2003 blez(rs, offset);
2004 } else {
2005 r2 = scratch;
2006 li(r2, rt);
2007 slt(scratch, r2, rs);
2008 beq(scratch, zero_reg, offset);
2009 }
2010 break;
2011 // Unsigned comparison.
2012 case Ugreater:
2013 if (rt.imm32_ == 0) {
2014 bgtz(rs, offset);
2015 } else {
2016 r2 = scratch;
2017 li(r2, rt);
2018 sltu(scratch, r2, rs);
2019 bne(scratch, zero_reg, offset);
2020 }
2021 break;
2022 case Ugreater_equal:
2023 if (rt.imm32_ == 0) {
2024 bgez(rs, offset);
2025 } else if (is_int16(rt.imm32_)) {
2026 sltiu(scratch, rs, rt.imm32_);
2027 beq(scratch, zero_reg, offset);
2028 } else {
2029 r2 = scratch;
2030 li(r2, rt);
2031 sltu(scratch, rs, r2);
2032 beq(scratch, zero_reg, offset);
2033 }
2034 break;
2035 case Uless:
2036 if (rt.imm32_ == 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002037 // No code needs to be emitted.
2038 return;
Steve Block44f0eee2011-05-26 01:26:41 +01002039 } else if (is_int16(rt.imm32_)) {
2040 sltiu(scratch, rs, rt.imm32_);
2041 bne(scratch, zero_reg, offset);
2042 } else {
2043 r2 = scratch;
2044 li(r2, rt);
2045 sltu(scratch, rs, r2);
2046 bne(scratch, zero_reg, offset);
2047 }
2048 break;
2049 case Uless_equal:
2050 if (rt.imm32_ == 0) {
2051 b(offset);
2052 } else {
2053 r2 = scratch;
2054 li(r2, rt);
2055 sltu(scratch, r2, rs);
2056 beq(scratch, zero_reg, offset);
2057 }
2058 break;
2059 default:
2060 UNREACHABLE();
2061 }
Andrei Popescu31002712010-02-23 13:46:05 +00002062 }
Steve Block44f0eee2011-05-26 01:26:41 +01002063 // Emit a nop in the branch delay slot if required.
2064 if (bdslot == PROTECT)
2065 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00002066}
2067
2068
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002069void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) {
Andrei Popescu31002712010-02-23 13:46:05 +00002070 // We use branch_offset as an argument for the branch instructions to be sure
2071 // it is called just before generating the branch instruction, as needed.
2072
Steve Block44f0eee2011-05-26 01:26:41 +01002073 b(shifted_branch_offset(L, false));
Andrei Popescu31002712010-02-23 13:46:05 +00002074
Steve Block44f0eee2011-05-26 01:26:41 +01002075 // Emit a nop in the branch delay slot if required.
2076 if (bdslot == PROTECT)
2077 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00002078}
2079
2080
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002081void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs,
2082 const Operand& rt,
2083 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01002084 BRANCH_ARGS_CHECK(cond, rs, rt);
2085
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002086 int32_t offset = 0;
Steve Block44f0eee2011-05-26 01:26:41 +01002087 Register r2 = no_reg;
2088 Register scratch = at;
2089 if (rt.is_reg()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002090 BlockTrampolinePoolScope block_trampoline_pool(this);
Steve Block44f0eee2011-05-26 01:26:41 +01002091 r2 = rt.rm_;
2092 // Be careful to always use shifted_branch_offset only just before the
2093 // branch instruction, as the location will be remember for patching the
2094 // target.
2095 switch (cond) {
2096 case cc_always:
2097 offset = shifted_branch_offset(L, false);
2098 b(offset);
2099 break;
2100 case eq:
2101 offset = shifted_branch_offset(L, false);
2102 beq(rs, r2, offset);
2103 break;
2104 case ne:
2105 offset = shifted_branch_offset(L, false);
2106 bne(rs, r2, offset);
2107 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00002108 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01002109 case greater:
2110 if (r2.is(zero_reg)) {
2111 offset = shifted_branch_offset(L, false);
2112 bgtz(rs, offset);
2113 } else {
2114 slt(scratch, r2, rs);
2115 offset = shifted_branch_offset(L, false);
2116 bne(scratch, zero_reg, offset);
2117 }
2118 break;
2119 case greater_equal:
2120 if (r2.is(zero_reg)) {
2121 offset = shifted_branch_offset(L, false);
2122 bgez(rs, offset);
2123 } else {
2124 slt(scratch, rs, r2);
2125 offset = shifted_branch_offset(L, false);
2126 beq(scratch, zero_reg, offset);
2127 }
2128 break;
2129 case less:
2130 if (r2.is(zero_reg)) {
2131 offset = shifted_branch_offset(L, false);
2132 bltz(rs, offset);
2133 } else {
2134 slt(scratch, rs, r2);
2135 offset = shifted_branch_offset(L, false);
2136 bne(scratch, zero_reg, offset);
2137 }
2138 break;
2139 case less_equal:
2140 if (r2.is(zero_reg)) {
2141 offset = shifted_branch_offset(L, false);
2142 blez(rs, offset);
2143 } else {
2144 slt(scratch, r2, rs);
2145 offset = shifted_branch_offset(L, false);
2146 beq(scratch, zero_reg, offset);
2147 }
2148 break;
2149 // Unsigned comparison.
2150 case Ugreater:
2151 if (r2.is(zero_reg)) {
2152 offset = shifted_branch_offset(L, false);
2153 bgtz(rs, offset);
2154 } else {
2155 sltu(scratch, r2, rs);
2156 offset = shifted_branch_offset(L, false);
2157 bne(scratch, zero_reg, offset);
2158 }
2159 break;
2160 case Ugreater_equal:
2161 if (r2.is(zero_reg)) {
2162 offset = shifted_branch_offset(L, false);
2163 bgez(rs, offset);
2164 } else {
2165 sltu(scratch, rs, r2);
2166 offset = shifted_branch_offset(L, false);
2167 beq(scratch, zero_reg, offset);
2168 }
2169 break;
2170 case Uless:
2171 if (r2.is(zero_reg)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002172 // No code needs to be emitted.
2173 return;
Steve Block44f0eee2011-05-26 01:26:41 +01002174 } else {
2175 sltu(scratch, rs, r2);
2176 offset = shifted_branch_offset(L, false);
2177 bne(scratch, zero_reg, offset);
2178 }
2179 break;
2180 case Uless_equal:
2181 if (r2.is(zero_reg)) {
2182 offset = shifted_branch_offset(L, false);
2183 b(offset);
2184 } else {
2185 sltu(scratch, r2, rs);
2186 offset = shifted_branch_offset(L, false);
2187 beq(scratch, zero_reg, offset);
2188 }
2189 break;
2190 default:
2191 UNREACHABLE();
2192 }
2193 } else {
2194 // Be careful to always use shifted_branch_offset only just before the
2195 // branch instruction, as the location will be remember for patching the
2196 // target.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002197 BlockTrampolinePoolScope block_trampoline_pool(this);
Steve Block44f0eee2011-05-26 01:26:41 +01002198 switch (cond) {
2199 case cc_always:
2200 offset = shifted_branch_offset(L, false);
2201 b(offset);
2202 break;
2203 case eq:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002204 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002205 r2 = scratch;
2206 li(r2, rt);
2207 offset = shifted_branch_offset(L, false);
2208 beq(rs, r2, offset);
2209 break;
2210 case ne:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002211 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002212 r2 = scratch;
2213 li(r2, rt);
2214 offset = shifted_branch_offset(L, false);
2215 bne(rs, r2, offset);
2216 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00002217 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01002218 case greater:
2219 if (rt.imm32_ == 0) {
2220 offset = shifted_branch_offset(L, false);
2221 bgtz(rs, offset);
2222 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002223 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002224 r2 = scratch;
2225 li(r2, rt);
2226 slt(scratch, r2, rs);
2227 offset = shifted_branch_offset(L, false);
2228 bne(scratch, zero_reg, offset);
2229 }
2230 break;
2231 case greater_equal:
2232 if (rt.imm32_ == 0) {
2233 offset = shifted_branch_offset(L, false);
2234 bgez(rs, offset);
2235 } else if (is_int16(rt.imm32_)) {
2236 slti(scratch, rs, rt.imm32_);
2237 offset = shifted_branch_offset(L, false);
2238 beq(scratch, zero_reg, offset);
2239 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002240 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002241 r2 = scratch;
2242 li(r2, rt);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002243 slt(scratch, rs, r2);
Steve Block44f0eee2011-05-26 01:26:41 +01002244 offset = shifted_branch_offset(L, false);
2245 beq(scratch, zero_reg, offset);
2246 }
2247 break;
2248 case less:
2249 if (rt.imm32_ == 0) {
2250 offset = shifted_branch_offset(L, false);
2251 bltz(rs, offset);
2252 } else if (is_int16(rt.imm32_)) {
2253 slti(scratch, rs, rt.imm32_);
2254 offset = shifted_branch_offset(L, false);
2255 bne(scratch, zero_reg, offset);
2256 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002257 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002258 r2 = scratch;
2259 li(r2, rt);
2260 slt(scratch, rs, r2);
2261 offset = shifted_branch_offset(L, false);
2262 bne(scratch, zero_reg, offset);
2263 }
2264 break;
2265 case less_equal:
2266 if (rt.imm32_ == 0) {
2267 offset = shifted_branch_offset(L, false);
2268 blez(rs, offset);
2269 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002270 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002271 r2 = scratch;
2272 li(r2, rt);
2273 slt(scratch, r2, rs);
2274 offset = shifted_branch_offset(L, false);
2275 beq(scratch, zero_reg, offset);
2276 }
2277 break;
2278 // Unsigned comparison.
2279 case Ugreater:
2280 if (rt.imm32_ == 0) {
2281 offset = shifted_branch_offset(L, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002282 bne(rs, zero_reg, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002283 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002284 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002285 r2 = scratch;
2286 li(r2, rt);
2287 sltu(scratch, r2, rs);
2288 offset = shifted_branch_offset(L, false);
2289 bne(scratch, zero_reg, offset);
2290 }
2291 break;
2292 case Ugreater_equal:
2293 if (rt.imm32_ == 0) {
2294 offset = shifted_branch_offset(L, false);
2295 bgez(rs, offset);
2296 } else if (is_int16(rt.imm32_)) {
2297 sltiu(scratch, rs, rt.imm32_);
2298 offset = shifted_branch_offset(L, false);
2299 beq(scratch, zero_reg, offset);
2300 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002301 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002302 r2 = scratch;
2303 li(r2, rt);
2304 sltu(scratch, rs, r2);
2305 offset = shifted_branch_offset(L, false);
2306 beq(scratch, zero_reg, offset);
2307 }
2308 break;
2309 case Uless:
2310 if (rt.imm32_ == 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002311 // No code needs to be emitted.
2312 return;
Steve Block44f0eee2011-05-26 01:26:41 +01002313 } else if (is_int16(rt.imm32_)) {
2314 sltiu(scratch, rs, rt.imm32_);
2315 offset = shifted_branch_offset(L, false);
2316 bne(scratch, zero_reg, offset);
2317 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002318 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002319 r2 = scratch;
2320 li(r2, rt);
2321 sltu(scratch, rs, r2);
2322 offset = shifted_branch_offset(L, false);
2323 bne(scratch, zero_reg, offset);
2324 }
2325 break;
2326 case Uless_equal:
2327 if (rt.imm32_ == 0) {
2328 offset = shifted_branch_offset(L, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002329 beq(rs, zero_reg, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01002330 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331 DCHECK(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01002332 r2 = scratch;
2333 li(r2, rt);
2334 sltu(scratch, r2, rs);
2335 offset = shifted_branch_offset(L, false);
2336 beq(scratch, zero_reg, offset);
2337 }
2338 break;
2339 default:
2340 UNREACHABLE();
2341 }
2342 }
2343 // Check that offset could actually hold on an int16_t.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002344 DCHECK(is_int16(offset));
Steve Block44f0eee2011-05-26 01:26:41 +01002345 // Emit a nop in the branch delay slot if required.
2346 if (bdslot == PROTECT)
2347 nop();
2348}
2349
2350
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002351void MacroAssembler::BranchAndLink(int16_t offset, BranchDelaySlot bdslot) {
2352 BranchAndLinkShort(offset, bdslot);
2353}
2354
2355
2356void MacroAssembler::BranchAndLink(int16_t offset, Condition cond, Register rs,
2357 const Operand& rt,
2358 BranchDelaySlot bdslot) {
2359 BranchAndLinkShort(offset, cond, rs, rt, bdslot);
2360}
2361
2362
2363void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002364 if (L->is_bound()) {
2365 if (is_near(L)) {
2366 BranchAndLinkShort(L, bdslot);
2367 } else {
2368 Jalr(L, bdslot);
2369 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002370 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002371 if (is_trampoline_emitted()) {
2372 Jalr(L, bdslot);
2373 } else {
2374 BranchAndLinkShort(L, bdslot);
2375 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002376 }
2377}
2378
2379
2380void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs,
2381 const Operand& rt,
2382 BranchDelaySlot bdslot) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002383 if (L->is_bound()) {
2384 if (is_near(L)) {
2385 BranchAndLinkShort(L, cond, rs, rt, bdslot);
2386 } else {
2387 Label skip;
2388 Condition neg_cond = NegateCondition(cond);
2389 BranchShort(&skip, neg_cond, rs, rt);
2390 Jalr(L, bdslot);
2391 bind(&skip);
2392 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002393 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002394 if (is_trampoline_emitted()) {
2395 Label skip;
2396 Condition neg_cond = NegateCondition(cond);
2397 BranchShort(&skip, neg_cond, rs, rt);
2398 Jalr(L, bdslot);
2399 bind(&skip);
2400 } else {
2401 BranchAndLinkShort(L, cond, rs, rt, bdslot);
2402 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002403 }
2404}
2405
2406
Andrei Popescu31002712010-02-23 13:46:05 +00002407// We need to use a bgezal or bltzal, but they can't be used directly with the
2408// slt instructions. We could use sub or add instead but we would miss overflow
2409// cases, so we keep slt and add an intermediate third instruction.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002410void MacroAssembler::BranchAndLinkShort(int16_t offset,
2411 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01002412 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00002413
Steve Block44f0eee2011-05-26 01:26:41 +01002414 // Emit a nop in the branch delay slot if required.
2415 if (bdslot == PROTECT)
2416 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00002417}
2418
2419
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002420void MacroAssembler::BranchAndLinkShort(int16_t offset, Condition cond,
2421 Register rs, const Operand& rt,
2422 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01002423 BRANCH_ARGS_CHECK(cond, rs, rt);
Steve Block6ded16b2010-05-10 14:33:55 +01002424 Register r2 = no_reg;
Steve Block44f0eee2011-05-26 01:26:41 +01002425 Register scratch = at;
2426
Andrei Popescu31002712010-02-23 13:46:05 +00002427 if (rt.is_reg()) {
2428 r2 = rt.rm_;
2429 } else if (cond != cc_always) {
2430 r2 = scratch;
2431 li(r2, rt);
2432 }
2433
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002434 if (!IsMipsArchVariant(kMips32r6)) {
2435 BlockTrampolinePoolScope block_trampoline_pool(this);
2436 switch (cond) {
2437 case cc_always:
2438 bal(offset);
2439 break;
2440 case eq:
2441 bne(rs, r2, 2);
2442 nop();
2443 bal(offset);
2444 break;
2445 case ne:
2446 beq(rs, r2, 2);
2447 nop();
2448 bal(offset);
2449 break;
Andrei Popescu31002712010-02-23 13:46:05 +00002450
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451 // Signed comparison.
2452 case greater:
2453 slt(scratch, r2, rs);
2454 addiu(scratch, scratch, -1);
2455 bgezal(scratch, offset);
2456 break;
2457 case greater_equal:
2458 slt(scratch, rs, r2);
2459 addiu(scratch, scratch, -1);
2460 bltzal(scratch, offset);
2461 break;
2462 case less:
2463 slt(scratch, rs, r2);
2464 addiu(scratch, scratch, -1);
2465 bgezal(scratch, offset);
2466 break;
2467 case less_equal:
2468 slt(scratch, r2, rs);
2469 addiu(scratch, scratch, -1);
2470 bltzal(scratch, offset);
2471 break;
Andrei Popescu31002712010-02-23 13:46:05 +00002472
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002473 // Unsigned comparison.
2474 case Ugreater:
2475 sltu(scratch, r2, rs);
2476 addiu(scratch, scratch, -1);
2477 bgezal(scratch, offset);
2478 break;
2479 case Ugreater_equal:
2480 sltu(scratch, rs, r2);
2481 addiu(scratch, scratch, -1);
2482 bltzal(scratch, offset);
2483 break;
2484 case Uless:
2485 sltu(scratch, rs, r2);
2486 addiu(scratch, scratch, -1);
2487 bgezal(scratch, offset);
2488 break;
2489 case Uless_equal:
2490 sltu(scratch, r2, rs);
2491 addiu(scratch, scratch, -1);
2492 bltzal(scratch, offset);
2493 break;
Andrei Popescu31002712010-02-23 13:46:05 +00002494
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002495 default:
2496 UNREACHABLE();
2497 }
2498 } else {
2499 BlockTrampolinePoolScope block_trampoline_pool(this);
2500 switch (cond) {
2501 case cc_always:
2502 bal(offset);
2503 break;
2504 case eq:
2505 bne(rs, r2, 2);
2506 nop();
2507 bal(offset);
2508 break;
2509 case ne:
2510 beq(rs, r2, 2);
2511 nop();
2512 bal(offset);
2513 break;
2514
2515 // Signed comparison.
2516 case greater:
2517 // rs > rt
2518 slt(scratch, r2, rs);
2519 beq(scratch, zero_reg, 2);
2520 nop();
2521 bal(offset);
2522 break;
2523 case greater_equal:
2524 // rs >= rt
2525 slt(scratch, rs, r2);
2526 bne(scratch, zero_reg, 2);
2527 nop();
2528 bal(offset);
2529 break;
2530 case less:
2531 // rs < r2
2532 slt(scratch, rs, r2);
2533 bne(scratch, zero_reg, 2);
2534 nop();
2535 bal(offset);
2536 break;
2537 case less_equal:
2538 // rs <= r2
2539 slt(scratch, r2, rs);
2540 bne(scratch, zero_reg, 2);
2541 nop();
2542 bal(offset);
2543 break;
2544
2545
2546 // Unsigned comparison.
2547 case Ugreater:
2548 // rs > rt
2549 sltu(scratch, r2, rs);
2550 beq(scratch, zero_reg, 2);
2551 nop();
2552 bal(offset);
2553 break;
2554 case Ugreater_equal:
2555 // rs >= rt
2556 sltu(scratch, rs, r2);
2557 bne(scratch, zero_reg, 2);
2558 nop();
2559 bal(offset);
2560 break;
2561 case Uless:
2562 // rs < r2
2563 sltu(scratch, rs, r2);
2564 bne(scratch, zero_reg, 2);
2565 nop();
2566 bal(offset);
2567 break;
2568 case Uless_equal:
2569 // rs <= r2
2570 sltu(scratch, r2, rs);
2571 bne(scratch, zero_reg, 2);
2572 nop();
2573 bal(offset);
2574 break;
2575 default:
2576 UNREACHABLE();
2577 }
Andrei Popescu31002712010-02-23 13:46:05 +00002578 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002579
Steve Block44f0eee2011-05-26 01:26:41 +01002580 // Emit a nop in the branch delay slot if required.
2581 if (bdslot == PROTECT)
2582 nop();
2583}
2584
2585
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002586void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01002587 bal(shifted_branch_offset(L, false));
2588
2589 // Emit a nop in the branch delay slot if required.
2590 if (bdslot == PROTECT)
2591 nop();
2592}
2593
2594
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002595void MacroAssembler::BranchAndLinkShort(Label* L, Condition cond, Register rs,
2596 const Operand& rt,
2597 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01002598 BRANCH_ARGS_CHECK(cond, rs, rt);
2599
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002600 int32_t offset = 0;
Steve Block44f0eee2011-05-26 01:26:41 +01002601 Register r2 = no_reg;
2602 Register scratch = at;
2603 if (rt.is_reg()) {
2604 r2 = rt.rm_;
2605 } else if (cond != cc_always) {
2606 r2 = scratch;
2607 li(r2, rt);
2608 }
2609
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002610 if (!IsMipsArchVariant(kMips32r6)) {
2611 BlockTrampolinePoolScope block_trampoline_pool(this);
2612 switch (cond) {
2613 case cc_always:
2614 offset = shifted_branch_offset(L, false);
2615 bal(offset);
2616 break;
2617 case eq:
2618 bne(rs, r2, 2);
2619 nop();
2620 offset = shifted_branch_offset(L, false);
2621 bal(offset);
2622 break;
2623 case ne:
2624 beq(rs, r2, 2);
2625 nop();
2626 offset = shifted_branch_offset(L, false);
2627 bal(offset);
2628 break;
Steve Block44f0eee2011-05-26 01:26:41 +01002629
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002630 // Signed comparison.
2631 case greater:
2632 slt(scratch, r2, rs);
2633 addiu(scratch, scratch, -1);
2634 offset = shifted_branch_offset(L, false);
2635 bgezal(scratch, offset);
2636 break;
2637 case greater_equal:
2638 slt(scratch, rs, r2);
2639 addiu(scratch, scratch, -1);
2640 offset = shifted_branch_offset(L, false);
2641 bltzal(scratch, offset);
2642 break;
2643 case less:
2644 slt(scratch, rs, r2);
2645 addiu(scratch, scratch, -1);
2646 offset = shifted_branch_offset(L, false);
2647 bgezal(scratch, offset);
2648 break;
2649 case less_equal:
2650 slt(scratch, r2, rs);
2651 addiu(scratch, scratch, -1);
2652 offset = shifted_branch_offset(L, false);
2653 bltzal(scratch, offset);
2654 break;
Steve Block44f0eee2011-05-26 01:26:41 +01002655
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002656 // Unsigned comparison.
2657 case Ugreater:
2658 sltu(scratch, r2, rs);
2659 addiu(scratch, scratch, -1);
2660 offset = shifted_branch_offset(L, false);
2661 bgezal(scratch, offset);
2662 break;
2663 case Ugreater_equal:
2664 sltu(scratch, rs, r2);
2665 addiu(scratch, scratch, -1);
2666 offset = shifted_branch_offset(L, false);
2667 bltzal(scratch, offset);
2668 break;
2669 case Uless:
2670 sltu(scratch, rs, r2);
2671 addiu(scratch, scratch, -1);
2672 offset = shifted_branch_offset(L, false);
2673 bgezal(scratch, offset);
2674 break;
2675 case Uless_equal:
2676 sltu(scratch, r2, rs);
2677 addiu(scratch, scratch, -1);
2678 offset = shifted_branch_offset(L, false);
2679 bltzal(scratch, offset);
2680 break;
Steve Block44f0eee2011-05-26 01:26:41 +01002681
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002682 default:
2683 UNREACHABLE();
2684 }
2685 } else {
2686 BlockTrampolinePoolScope block_trampoline_pool(this);
2687 switch (cond) {
2688 case cc_always:
2689 offset = shifted_branch_offset(L, false);
2690 bal(offset);
2691 break;
2692 case eq:
2693 bne(rs, r2, 2);
2694 nop();
2695 offset = shifted_branch_offset(L, false);
2696 bal(offset);
2697 break;
2698 case ne:
2699 beq(rs, r2, 2);
2700 nop();
2701 offset = shifted_branch_offset(L, false);
2702 bal(offset);
2703 break;
2704
2705 // Signed comparison.
2706 case greater:
2707 // rs > rt
2708 slt(scratch, r2, rs);
2709 beq(scratch, zero_reg, 2);
2710 nop();
2711 offset = shifted_branch_offset(L, false);
2712 bal(offset);
2713 break;
2714 case greater_equal:
2715 // rs >= rt
2716 slt(scratch, rs, r2);
2717 bne(scratch, zero_reg, 2);
2718 nop();
2719 offset = shifted_branch_offset(L, false);
2720 bal(offset);
2721 break;
2722 case less:
2723 // rs < r2
2724 slt(scratch, rs, r2);
2725 bne(scratch, zero_reg, 2);
2726 nop();
2727 offset = shifted_branch_offset(L, false);
2728 bal(offset);
2729 break;
2730 case less_equal:
2731 // rs <= r2
2732 slt(scratch, r2, rs);
2733 bne(scratch, zero_reg, 2);
2734 nop();
2735 offset = shifted_branch_offset(L, false);
2736 bal(offset);
2737 break;
2738
2739
2740 // Unsigned comparison.
2741 case Ugreater:
2742 // rs > rt
2743 sltu(scratch, r2, rs);
2744 beq(scratch, zero_reg, 2);
2745 nop();
2746 offset = shifted_branch_offset(L, false);
2747 bal(offset);
2748 break;
2749 case Ugreater_equal:
2750 // rs >= rt
2751 sltu(scratch, rs, r2);
2752 bne(scratch, zero_reg, 2);
2753 nop();
2754 offset = shifted_branch_offset(L, false);
2755 bal(offset);
2756 break;
2757 case Uless:
2758 // rs < r2
2759 sltu(scratch, rs, r2);
2760 bne(scratch, zero_reg, 2);
2761 nop();
2762 offset = shifted_branch_offset(L, false);
2763 bal(offset);
2764 break;
2765 case Uless_equal:
2766 // rs <= r2
2767 sltu(scratch, r2, rs);
2768 bne(scratch, zero_reg, 2);
2769 nop();
2770 offset = shifted_branch_offset(L, false);
2771 bal(offset);
2772 break;
2773
2774 default:
2775 UNREACHABLE();
2776 }
Steve Block44f0eee2011-05-26 01:26:41 +01002777 }
2778
2779 // Check that offset could actually hold on an int16_t.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002780 DCHECK(is_int16(offset));
Steve Block44f0eee2011-05-26 01:26:41 +01002781
2782 // Emit a nop in the branch delay slot if required.
2783 if (bdslot == PROTECT)
2784 nop();
2785}
2786
2787
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002788void MacroAssembler::Jump(Register target,
Steve Block44f0eee2011-05-26 01:26:41 +01002789 Condition cond,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002790 Register rs,
2791 const Operand& rt,
2792 BranchDelaySlot bd) {
2793 BlockTrampolinePoolScope block_trampoline_pool(this);
2794 if (cond == cc_always) {
2795 jr(target);
2796 } else {
2797 BRANCH_ARGS_CHECK(cond, rs, rt);
2798 Branch(2, NegateCondition(cond), rs, rt);
2799 jr(target);
2800 }
2801 // Emit a nop in the branch delay slot if required.
2802 if (bd == PROTECT)
2803 nop();
2804}
2805
2806
2807void MacroAssembler::Jump(intptr_t target,
2808 RelocInfo::Mode rmode,
2809 Condition cond,
2810 Register rs,
2811 const Operand& rt,
2812 BranchDelaySlot bd) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002813 Label skip;
2814 if (cond != cc_always) {
2815 Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt);
2816 }
2817 // The first instruction of 'li' may be placed in the delay slot.
2818 // This is not an issue, t9 is expected to be clobbered anyway.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002819 li(t9, Operand(target, rmode));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002820 Jump(t9, al, zero_reg, Operand(zero_reg), bd);
2821 bind(&skip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002822}
2823
2824
2825void MacroAssembler::Jump(Address target,
2826 RelocInfo::Mode rmode,
2827 Condition cond,
2828 Register rs,
2829 const Operand& rt,
2830 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002831 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002832 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
2833}
2834
2835
2836void MacroAssembler::Jump(Handle<Code> code,
2837 RelocInfo::Mode rmode,
2838 Condition cond,
2839 Register rs,
2840 const Operand& rt,
2841 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002842 DCHECK(RelocInfo::IsCodeTarget(rmode));
2843 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002844 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
2845}
2846
2847
2848int MacroAssembler::CallSize(Register target,
2849 Condition cond,
2850 Register rs,
2851 const Operand& rt,
2852 BranchDelaySlot bd) {
2853 int size = 0;
2854
2855 if (cond == cc_always) {
2856 size += 1;
2857 } else {
2858 size += 3;
Steve Block44f0eee2011-05-26 01:26:41 +01002859 }
2860
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002861 if (bd == PROTECT)
2862 size += 1;
Steve Block44f0eee2011-05-26 01:26:41 +01002863
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002864 return size * kInstrSize;
2865}
Steve Block44f0eee2011-05-26 01:26:41 +01002866
Steve Block44f0eee2011-05-26 01:26:41 +01002867
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002868// Note: To call gcc-compiled C code on mips, you must call thru t9.
2869void MacroAssembler::Call(Register target,
2870 Condition cond,
2871 Register rs,
2872 const Operand& rt,
2873 BranchDelaySlot bd) {
2874 BlockTrampolinePoolScope block_trampoline_pool(this);
2875 Label start;
2876 bind(&start);
2877 if (cond == cc_always) {
2878 jalr(target);
2879 } else {
2880 BRANCH_ARGS_CHECK(cond, rs, rt);
2881 Branch(2, NegateCondition(cond), rs, rt);
2882 jalr(target);
Steve Block44f0eee2011-05-26 01:26:41 +01002883 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002884 // Emit a nop in the branch delay slot if required.
2885 if (bd == PROTECT)
2886 nop();
2887
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002888 DCHECK_EQ(CallSize(target, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002889 SizeOfCodeGeneratedSince(&start));
2890}
2891
2892
2893int MacroAssembler::CallSize(Address target,
2894 RelocInfo::Mode rmode,
2895 Condition cond,
2896 Register rs,
2897 const Operand& rt,
2898 BranchDelaySlot bd) {
2899 int size = CallSize(t9, cond, rs, rt, bd);
2900 return size + 2 * kInstrSize;
2901}
2902
2903
2904void MacroAssembler::Call(Address target,
2905 RelocInfo::Mode rmode,
2906 Condition cond,
2907 Register rs,
2908 const Operand& rt,
2909 BranchDelaySlot bd) {
2910 BlockTrampolinePoolScope block_trampoline_pool(this);
2911 Label start;
2912 bind(&start);
2913 int32_t target_int = reinterpret_cast<int32_t>(target);
2914 // Must record previous source positions before the
2915 // li() generates a new code target.
2916 positions_recorder()->WriteRecordedPositions();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002917 li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002918 Call(t9, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002919 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002920 SizeOfCodeGeneratedSince(&start));
2921}
2922
2923
2924int MacroAssembler::CallSize(Handle<Code> code,
2925 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002926 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002927 Condition cond,
2928 Register rs,
2929 const Operand& rt,
2930 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002931 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002932 return CallSize(reinterpret_cast<Address>(code.location()),
2933 rmode, cond, rs, rt, bd);
2934}
2935
2936
2937void MacroAssembler::Call(Handle<Code> code,
2938 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002939 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002940 Condition cond,
2941 Register rs,
2942 const Operand& rt,
2943 BranchDelaySlot bd) {
2944 BlockTrampolinePoolScope block_trampoline_pool(this);
2945 Label start;
2946 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002947 DCHECK(RelocInfo::IsCodeTarget(rmode));
2948 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002949 SetRecordedAstId(ast_id);
2950 rmode = RelocInfo::CODE_TARGET_WITH_ID;
2951 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002952 AllowDeferredHandleDereference embedding_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002953 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002954 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002955 SizeOfCodeGeneratedSince(&start));
2956}
2957
2958
2959void MacroAssembler::Ret(Condition cond,
2960 Register rs,
2961 const Operand& rt,
2962 BranchDelaySlot bd) {
2963 Jump(ra, cond, rs, rt, bd);
2964}
2965
2966
2967void MacroAssembler::J(Label* L, BranchDelaySlot bdslot) {
2968 BlockTrampolinePoolScope block_trampoline_pool(this);
2969
2970 uint32_t imm28;
2971 imm28 = jump_address(L);
2972 imm28 &= kImm28Mask;
2973 { BlockGrowBufferScope block_buf_growth(this);
2974 // Buffer growth (and relocation) must be blocked for internal references
2975 // until associated instructions are emitted and available to be patched.
2976 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2977 j(imm28);
2978 }
2979 // Emit a nop in the branch delay slot if required.
2980 if (bdslot == PROTECT)
2981 nop();
2982}
2983
2984
2985void MacroAssembler::Jr(Label* L, BranchDelaySlot bdslot) {
2986 BlockTrampolinePoolScope block_trampoline_pool(this);
2987
2988 uint32_t imm32;
2989 imm32 = jump_address(L);
2990 { BlockGrowBufferScope block_buf_growth(this);
2991 // Buffer growth (and relocation) must be blocked for internal references
2992 // until associated instructions are emitted and available to be patched.
2993 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2994 lui(at, (imm32 & kHiMask) >> kLuiShift);
2995 ori(at, at, (imm32 & kImm16Mask));
2996 }
2997 jr(at);
2998
2999 // Emit a nop in the branch delay slot if required.
3000 if (bdslot == PROTECT)
3001 nop();
3002}
3003
3004
3005void MacroAssembler::Jalr(Label* L, BranchDelaySlot bdslot) {
3006 BlockTrampolinePoolScope block_trampoline_pool(this);
3007
3008 uint32_t imm32;
3009 imm32 = jump_address(L);
3010 { BlockGrowBufferScope block_buf_growth(this);
3011 // Buffer growth (and relocation) must be blocked for internal references
3012 // until associated instructions are emitted and available to be patched.
3013 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
3014 lui(at, (imm32 & kHiMask) >> kLuiShift);
3015 ori(at, at, (imm32 & kImm16Mask));
3016 }
3017 jalr(at);
3018
3019 // Emit a nop in the branch delay slot if required.
3020 if (bdslot == PROTECT)
3021 nop();
Steve Block44f0eee2011-05-26 01:26:41 +01003022}
3023
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003024
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003025void MacroAssembler::DropAndRet(int drop) {
3026 Ret(USE_DELAY_SLOT);
3027 addiu(sp, sp, drop * kPointerSize);
3028}
Steve Block44f0eee2011-05-26 01:26:41 +01003029
3030void MacroAssembler::DropAndRet(int drop,
3031 Condition cond,
3032 Register r1,
3033 const Operand& r2) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003034 // Both Drop and Ret need to be conditional.
Steve Block44f0eee2011-05-26 01:26:41 +01003035 Label skip;
3036 if (cond != cc_always) {
3037 Branch(&skip, NegateCondition(cond), r1, r2);
3038 }
3039
3040 Drop(drop);
3041 Ret();
3042
3043 if (cond != cc_always) {
3044 bind(&skip);
3045 }
3046}
3047
3048
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003049void MacroAssembler::Drop(int count,
3050 Condition cond,
3051 Register reg,
3052 const Operand& op) {
3053 if (count <= 0) {
3054 return;
3055 }
3056
3057 Label skip;
3058
3059 if (cond != al) {
3060 Branch(&skip, NegateCondition(cond), reg, op);
3061 }
3062
3063 addiu(sp, sp, count * kPointerSize);
3064
3065 if (cond != al) {
3066 bind(&skip);
3067 }
3068}
3069
3070
3071
Steve Block44f0eee2011-05-26 01:26:41 +01003072void MacroAssembler::Swap(Register reg1,
3073 Register reg2,
3074 Register scratch) {
3075 if (scratch.is(no_reg)) {
3076 Xor(reg1, reg1, Operand(reg2));
3077 Xor(reg2, reg2, Operand(reg1));
3078 Xor(reg1, reg1, Operand(reg2));
3079 } else {
3080 mov(scratch, reg1);
3081 mov(reg1, reg2);
3082 mov(reg2, scratch);
3083 }
Andrei Popescu31002712010-02-23 13:46:05 +00003084}
3085
3086
3087void MacroAssembler::Call(Label* target) {
Steve Block44f0eee2011-05-26 01:26:41 +01003088 BranchAndLink(target);
3089}
3090
3091
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003092void MacroAssembler::Push(Handle<Object> handle) {
3093 li(at, Operand(handle));
3094 push(at);
3095}
3096
3097
Steve Block44f0eee2011-05-26 01:26:41 +01003098void MacroAssembler::DebugBreak() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003099 PrepareCEntryArgs(0);
3100 PrepareCEntryFunction(ExternalReference(Runtime::kDebugBreak, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003101 CEntryStub ces(isolate(), 1);
3102 DCHECK(AllowThisStubCall(&ces));
Steve Block44f0eee2011-05-26 01:26:41 +01003103 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
3104}
3105
Steve Block6ded16b2010-05-10 14:33:55 +01003106
Andrei Popescu31002712010-02-23 13:46:05 +00003107// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00003108// Exception handling.
Andrei Popescu31002712010-02-23 13:46:05 +00003109
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003110void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
3111 int handler_index) {
Steve Block6ded16b2010-05-10 14:33:55 +01003112 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003113 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
3114 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003115 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3116 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3117 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3118 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003119
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003120 // For the JSEntry handler, we must preserve a0-a3 and s0.
3121 // t1-t3 are available. We will build up the handler from the bottom by
3122 // pushing on the stack.
3123 // Set up the code object (t1) and the state (t2) for pushing.
3124 unsigned state =
3125 StackHandler::IndexField::encode(handler_index) |
3126 StackHandler::KindField::encode(kind);
3127 li(t1, Operand(CodeObject()), CONSTANT_SIZE);
3128 li(t2, Operand(state));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003129
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003130 // Push the frame pointer, context, state, and code object.
3131 if (kind == StackHandler::JS_ENTRY) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003132 DCHECK_EQ(Smi::FromInt(0), 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003133 // The second zero_reg indicates no context.
3134 // The first zero_reg is the NULL frame pointer.
3135 // The operands are reversed to match the order of MultiPush/Pop.
3136 Push(zero_reg, zero_reg, t2, t1);
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003137 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003138 MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
Ben Murdoch85b71792012-04-11 18:30:58 +01003139 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003140
3141 // Link the current handler as the next handler.
3142 li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
3143 lw(t1, MemOperand(t2));
3144 push(t1);
3145 // Set this new handler as the current one.
3146 sw(sp, MemOperand(t2));
Andrei Popescu31002712010-02-23 13:46:05 +00003147}
3148
3149
3150void MacroAssembler::PopTryHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003151 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003152 pop(a1);
3153 Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
Ben Murdoch589d6972011-11-30 16:04:58 +00003154 li(at, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01003155 sw(a1, MemOperand(at));
Andrei Popescu31002712010-02-23 13:46:05 +00003156}
3157
3158
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003159void MacroAssembler::JumpToHandlerEntry() {
3160 // Compute the handler entry address and jump to it. The handler table is
3161 // a fixed array of (smi-tagged) code offsets.
3162 // v0 = exception, a1 = code object, a2 = state.
3163 lw(a3, FieldMemOperand(a1, Code::kHandlerTableOffset)); // Handler table.
3164 Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3165 srl(a2, a2, StackHandler::kKindWidth); // Handler index.
3166 sll(a2, a2, kPointerSizeLog2);
3167 Addu(a2, a3, a2);
3168 lw(a2, MemOperand(a2)); // Smi-tagged offset.
3169 Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
3170 sra(t9, a2, kSmiTagSize);
3171 Addu(t9, t9, a1);
3172 Jump(t9); // Jump.
3173}
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003174
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003175
3176void MacroAssembler::Throw(Register value) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003177 // Adjust this code if not the case.
3178 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003179 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
3180 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3181 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3182 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3183 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Ben Murdoch85b71792012-04-11 18:30:58 +01003184
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003185 // The exception is expected in v0.
3186 Move(v0, value);
3187
3188 // Drop the stack pointer to the top of the top handler.
Ben Murdoch589d6972011-11-30 16:04:58 +00003189 li(a3, Operand(ExternalReference(Isolate::kHandlerAddress,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003190 isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00003191 lw(sp, MemOperand(a3));
3192
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003193 // Restore the next handler.
Ben Murdoch257744e2011-11-30 15:57:28 +00003194 pop(a2);
3195 sw(a2, MemOperand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00003196
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003197 // Get the code object (a1) and state (a2). Restore the context and frame
3198 // pointer.
3199 MultiPop(a1.bit() | a2.bit() | cp.bit() | fp.bit());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003200
3201 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003202 // (kind == ENTRY) == (fp == 0) == (cp == 0), so we could test either fp
3203 // or cp.
Ben Murdoch257744e2011-11-30 15:57:28 +00003204 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003205 Branch(&done, eq, cp, Operand(zero_reg));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003206 sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003207 bind(&done);
3208
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003209 JumpToHandlerEntry();
Ben Murdoch257744e2011-11-30 15:57:28 +00003210}
3211
3212
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003213void MacroAssembler::ThrowUncatchable(Register value) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003214 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003215 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
3216 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003217 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
3218 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
3219 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
3220 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00003221
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003222 // The exception is expected in v0.
3223 if (!value.is(v0)) {
3224 mov(v0, value);
3225 }
3226 // Drop the stack pointer to the top of the top stack handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003227 li(a3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
3228 lw(sp, MemOperand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00003229
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003230 // Unwind the handlers until the ENTRY handler is found.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003231 Label fetch_next, check_kind;
3232 jmp(&check_kind);
3233 bind(&fetch_next);
3234 lw(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003235
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003236 bind(&check_kind);
3237 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
3238 lw(a2, MemOperand(sp, StackHandlerConstants::kStateOffset));
3239 And(a2, a2, Operand(StackHandler::KindField::kMask));
3240 Branch(&fetch_next, ne, a2, Operand(zero_reg));
3241
3242 // Set the top handler address to next handler past the top ENTRY handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003243 pop(a2);
3244 sw(a2, MemOperand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00003245
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003246 // Get the code object (a1) and state (a2). Clear the context and frame
3247 // pointer (0 was saved in the handler).
3248 MultiPop(a1.bit() | a2.bit() | cp.bit() | fp.bit());
Ben Murdoch257744e2011-11-30 15:57:28 +00003249
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003250 JumpToHandlerEntry();
Ben Murdoch257744e2011-11-30 15:57:28 +00003251}
3252
3253
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003254void MacroAssembler::Allocate(int object_size,
3255 Register result,
3256 Register scratch1,
3257 Register scratch2,
3258 Label* gc_required,
3259 AllocationFlags flags) {
3260 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003261 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003262 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003263 // Trash the registers to simulate an allocation failure.
3264 li(result, 0x7091);
3265 li(scratch1, 0x7191);
3266 li(scratch2, 0x7291);
3267 }
3268 jmp(gc_required);
3269 return;
Steve Block6ded16b2010-05-10 14:33:55 +01003270 }
3271
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003272 DCHECK(!result.is(scratch1));
3273 DCHECK(!result.is(scratch2));
3274 DCHECK(!scratch1.is(scratch2));
3275 DCHECK(!scratch1.is(t9));
3276 DCHECK(!scratch2.is(t9));
3277 DCHECK(!result.is(t9));
Steve Block6ded16b2010-05-10 14:33:55 +01003278
Steve Block44f0eee2011-05-26 01:26:41 +01003279 // Make object size into bytes.
3280 if ((flags & SIZE_IN_WORDS) != 0) {
3281 object_size *= kPointerSize;
3282 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003283 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +01003284
Steve Block44f0eee2011-05-26 01:26:41 +01003285 // Check relative positions of allocation top and limit addresses.
3286 // ARM adds additional checks to make sure the ldm instruction can be
3287 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003288 ExternalReference allocation_top =
3289 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3290 ExternalReference allocation_limit =
3291 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
3292
Steve Block44f0eee2011-05-26 01:26:41 +01003293 intptr_t top =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003294 reinterpret_cast<intptr_t>(allocation_top.address());
Steve Block44f0eee2011-05-26 01:26:41 +01003295 intptr_t limit =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003296 reinterpret_cast<intptr_t>(allocation_limit.address());
3297 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003298
3299 // Set up allocation top address and object size registers.
3300 Register topaddr = scratch1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003301 li(topaddr, Operand(allocation_top));
Steve Block44f0eee2011-05-26 01:26:41 +01003302
3303 // This code stores a temporary value in t9.
3304 if ((flags & RESULT_CONTAINS_TOP) == 0) {
3305 // Load allocation top into result and allocation limit into t9.
3306 lw(result, MemOperand(topaddr));
3307 lw(t9, MemOperand(topaddr, kPointerSize));
3308 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003309 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003310 // Assert that result actually contains top on entry. t9 is used
3311 // immediately below so this use of t9 does not cause difference with
3312 // respect to register content between debug and release mode.
3313 lw(t9, MemOperand(topaddr));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003314 Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
Steve Block44f0eee2011-05-26 01:26:41 +01003315 }
3316 // Load allocation limit into t9. Result already contains allocation top.
3317 lw(t9, MemOperand(topaddr, limit - top));
3318 }
3319
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003320 if ((flags & DOUBLE_ALIGNMENT) != 0) {
3321 // Align the next allocation. Storing the filler map without checking top is
3322 // safe in new-space because the limit of the heap is aligned there.
3323 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
3324 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
3325 And(scratch2, result, Operand(kDoubleAlignmentMask));
3326 Label aligned;
3327 Branch(&aligned, eq, scratch2, Operand(zero_reg));
3328 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
3329 Branch(gc_required, Ugreater_equal, result, Operand(t9));
3330 }
3331 li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
3332 sw(scratch2, MemOperand(result));
3333 Addu(result, result, Operand(kDoubleSize / 2));
3334 bind(&aligned);
3335 }
3336
Steve Block44f0eee2011-05-26 01:26:41 +01003337 // Calculate new top and bail out if new space is exhausted. Use result
3338 // to calculate the new top.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003339 Addu(scratch2, result, Operand(object_size));
Steve Block44f0eee2011-05-26 01:26:41 +01003340 Branch(gc_required, Ugreater, scratch2, Operand(t9));
3341 sw(scratch2, MemOperand(topaddr));
3342
3343 // Tag object if requested.
3344 if ((flags & TAG_OBJECT) != 0) {
3345 Addu(result, result, Operand(kHeapObjectTag));
3346 }
Steve Block6ded16b2010-05-10 14:33:55 +01003347}
3348
3349
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003350void MacroAssembler::Allocate(Register object_size,
3351 Register result,
3352 Register scratch1,
3353 Register scratch2,
3354 Label* gc_required,
3355 AllocationFlags flags) {
Steve Block44f0eee2011-05-26 01:26:41 +01003356 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003357 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003358 // Trash the registers to simulate an allocation failure.
3359 li(result, 0x7091);
3360 li(scratch1, 0x7191);
3361 li(scratch2, 0x7291);
3362 }
3363 jmp(gc_required);
3364 return;
3365 }
3366
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003367 DCHECK(!result.is(scratch1));
3368 DCHECK(!result.is(scratch2));
3369 DCHECK(!scratch1.is(scratch2));
3370 DCHECK(!object_size.is(t9));
3371 DCHECK(!scratch1.is(t9) && !scratch2.is(t9) && !result.is(t9));
Steve Block44f0eee2011-05-26 01:26:41 +01003372
3373 // Check relative positions of allocation top and limit addresses.
3374 // ARM adds additional checks to make sure the ldm instruction can be
3375 // used. On MIPS we don't have ldm so we don't need additional checks either.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003376 ExternalReference allocation_top =
3377 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3378 ExternalReference allocation_limit =
3379 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Steve Block44f0eee2011-05-26 01:26:41 +01003380 intptr_t top =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003381 reinterpret_cast<intptr_t>(allocation_top.address());
Steve Block44f0eee2011-05-26 01:26:41 +01003382 intptr_t limit =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003383 reinterpret_cast<intptr_t>(allocation_limit.address());
3384 DCHECK((limit - top) == kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003385
3386 // Set up allocation top address and object size registers.
3387 Register topaddr = scratch1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003388 li(topaddr, Operand(allocation_top));
Steve Block44f0eee2011-05-26 01:26:41 +01003389
3390 // This code stores a temporary value in t9.
3391 if ((flags & RESULT_CONTAINS_TOP) == 0) {
3392 // Load allocation top into result and allocation limit into t9.
3393 lw(result, MemOperand(topaddr));
3394 lw(t9, MemOperand(topaddr, kPointerSize));
3395 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003396 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003397 // Assert that result actually contains top on entry. t9 is used
3398 // immediately below so this use of t9 does not cause difference with
3399 // respect to register content between debug and release mode.
3400 lw(t9, MemOperand(topaddr));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003401 Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
Steve Block44f0eee2011-05-26 01:26:41 +01003402 }
3403 // Load allocation limit into t9. Result already contains allocation top.
3404 lw(t9, MemOperand(topaddr, limit - top));
3405 }
3406
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003407 if ((flags & DOUBLE_ALIGNMENT) != 0) {
3408 // Align the next allocation. Storing the filler map without checking top is
3409 // safe in new-space because the limit of the heap is aligned there.
3410 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
3411 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
3412 And(scratch2, result, Operand(kDoubleAlignmentMask));
3413 Label aligned;
3414 Branch(&aligned, eq, scratch2, Operand(zero_reg));
3415 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
3416 Branch(gc_required, Ugreater_equal, result, Operand(t9));
3417 }
3418 li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
3419 sw(scratch2, MemOperand(result));
3420 Addu(result, result, Operand(kDoubleSize / 2));
3421 bind(&aligned);
3422 }
3423
Steve Block44f0eee2011-05-26 01:26:41 +01003424 // Calculate new top and bail out if new space is exhausted. Use result
3425 // to calculate the new top. Object size may be in words so a shift is
3426 // required to get the number of bytes.
3427 if ((flags & SIZE_IN_WORDS) != 0) {
3428 sll(scratch2, object_size, kPointerSizeLog2);
3429 Addu(scratch2, result, scratch2);
3430 } else {
3431 Addu(scratch2, result, Operand(object_size));
3432 }
3433 Branch(gc_required, Ugreater, scratch2, Operand(t9));
3434
3435 // Update allocation top. result temporarily holds the new top.
Ben Murdoch257744e2011-11-30 15:57:28 +00003436 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003437 And(t9, scratch2, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003438 Check(eq, kUnalignedAllocationInNewSpace, t9, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01003439 }
3440 sw(scratch2, MemOperand(topaddr));
3441
3442 // Tag object if requested.
3443 if ((flags & TAG_OBJECT) != 0) {
3444 Addu(result, result, Operand(kHeapObjectTag));
3445 }
3446}
3447
3448
3449void MacroAssembler::UndoAllocationInNewSpace(Register object,
3450 Register scratch) {
3451 ExternalReference new_space_allocation_top =
3452 ExternalReference::new_space_allocation_top_address(isolate());
3453
3454 // Make sure the object has no tag before resetting top.
3455 And(object, object, Operand(~kHeapObjectTagMask));
3456#ifdef DEBUG
3457 // Check that the object un-allocated is below the current top.
3458 li(scratch, Operand(new_space_allocation_top));
3459 lw(scratch, MemOperand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003460 Check(less, kUndoAllocationOfNonAllocatedMemory,
Steve Block44f0eee2011-05-26 01:26:41 +01003461 object, Operand(scratch));
3462#endif
3463 // Write the address of the object to un-allocate as the current top.
3464 li(scratch, Operand(new_space_allocation_top));
3465 sw(object, MemOperand(scratch));
3466}
3467
3468
3469void MacroAssembler::AllocateTwoByteString(Register result,
3470 Register length,
3471 Register scratch1,
3472 Register scratch2,
3473 Register scratch3,
3474 Label* gc_required) {
3475 // Calculate the number of bytes needed for the characters in the string while
3476 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003477 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003478 sll(scratch1, length, 1); // Length in bytes, not chars.
3479 addiu(scratch1, scratch1,
3480 kObjectAlignmentMask + SeqTwoByteString::kHeaderSize);
3481 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
3482
3483 // Allocate two-byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 Allocate(scratch1,
3485 result,
3486 scratch2,
3487 scratch3,
3488 gc_required,
3489 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003490
3491 // Set the map, length and hash field.
3492 InitializeNewString(result,
3493 length,
3494 Heap::kStringMapRootIndex,
3495 scratch1,
3496 scratch2);
3497}
3498
3499
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003500void MacroAssembler::AllocateOneByteString(Register result, Register length,
3501 Register scratch1, Register scratch2,
3502 Register scratch3,
3503 Label* gc_required) {
Steve Block44f0eee2011-05-26 01:26:41 +01003504 // Calculate the number of bytes needed for the characters in the string
3505 // while observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003506 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
3507 DCHECK(kCharSize == 1);
3508 addiu(scratch1, length, kObjectAlignmentMask + SeqOneByteString::kHeaderSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003509 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
3510
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003511 // Allocate one-byte string in new space.
3512 Allocate(scratch1,
3513 result,
3514 scratch2,
3515 scratch3,
3516 gc_required,
3517 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003518
3519 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003520 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
3521 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01003522}
3523
3524
3525void MacroAssembler::AllocateTwoByteConsString(Register result,
3526 Register length,
3527 Register scratch1,
3528 Register scratch2,
3529 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003530 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
3531 TAG_OBJECT);
Steve Block44f0eee2011-05-26 01:26:41 +01003532 InitializeNewString(result,
3533 length,
3534 Heap::kConsStringMapRootIndex,
3535 scratch1,
3536 scratch2);
3537}
3538
3539
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003540void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
3541 Register scratch1,
3542 Register scratch2,
3543 Label* gc_required) {
3544 Allocate(ConsString::kSize,
3545 result,
3546 scratch1,
3547 scratch2,
3548 gc_required,
3549 TAG_OBJECT);
3550
3551 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
3552 scratch1, scratch2);
Steve Block44f0eee2011-05-26 01:26:41 +01003553}
3554
3555
Ben Murdoch589d6972011-11-30 16:04:58 +00003556void MacroAssembler::AllocateTwoByteSlicedString(Register result,
3557 Register length,
3558 Register scratch1,
3559 Register scratch2,
3560 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003561 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3562 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00003563
3564 InitializeNewString(result,
3565 length,
3566 Heap::kSlicedStringMapRootIndex,
3567 scratch1,
3568 scratch2);
3569}
3570
3571
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003572void MacroAssembler::AllocateOneByteSlicedString(Register result,
3573 Register length,
3574 Register scratch1,
3575 Register scratch2,
3576 Label* gc_required) {
3577 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3578 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00003579
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003580 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
3581 scratch1, scratch2);
3582}
3583
3584
3585void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3586 Label* not_unique_name) {
3587 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3588 Label succeed;
3589 And(at, reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3590 Branch(&succeed, eq, at, Operand(zero_reg));
3591 Branch(not_unique_name, ne, reg, Operand(SYMBOL_TYPE));
3592
3593 bind(&succeed);
Ben Murdoch589d6972011-11-30 16:04:58 +00003594}
3595
3596
Steve Block44f0eee2011-05-26 01:26:41 +01003597// Allocates a heap number or jumps to the label if the young space is full and
3598// a scavenge is needed.
3599void MacroAssembler::AllocateHeapNumber(Register result,
3600 Register scratch1,
3601 Register scratch2,
3602 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003603 Label* need_gc,
3604 TaggingMode tagging_mode,
3605 MutableMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +01003606 // Allocate an object in the heap for the heap number and tag it as a heap
3607 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003608 Allocate(HeapNumber::kSize, result, scratch1, scratch2, need_gc,
3609 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
3610
3611 Heap::RootListIndex map_index = mode == MUTABLE
3612 ? Heap::kMutableHeapNumberMapRootIndex
3613 : Heap::kHeapNumberMapRootIndex;
3614 AssertIsRoot(heap_number_map, map_index);
Steve Block44f0eee2011-05-26 01:26:41 +01003615
3616 // Store heap number map in the allocated object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003617 if (tagging_mode == TAG_RESULT) {
3618 sw(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
3619 } else {
3620 sw(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
3621 }
Steve Block44f0eee2011-05-26 01:26:41 +01003622}
3623
3624
3625void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3626 FPURegister value,
3627 Register scratch1,
3628 Register scratch2,
3629 Label* gc_required) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003630 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
3631 AllocateHeapNumber(result, scratch1, scratch2, t8, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01003632 sdc1(value, FieldMemOperand(result, HeapNumber::kValueOffset));
3633}
3634
3635
3636// Copies a fixed number of fields of heap objects from src to dst.
3637void MacroAssembler::CopyFields(Register dst,
3638 Register src,
3639 RegList temps,
3640 int field_count) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003641 DCHECK((temps & dst.bit()) == 0);
3642 DCHECK((temps & src.bit()) == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003643 // Primitive implementation using only one temporary register.
3644
3645 Register tmp = no_reg;
3646 // Find a temp register in temps list.
3647 for (int i = 0; i < kNumRegisters; i++) {
3648 if ((temps & (1 << i)) != 0) {
3649 tmp.code_ = i;
3650 break;
3651 }
3652 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003653 DCHECK(!tmp.is(no_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01003654
3655 for (int i = 0; i < field_count; i++) {
3656 lw(tmp, FieldMemOperand(src, i * kPointerSize));
3657 sw(tmp, FieldMemOperand(dst, i * kPointerSize));
3658 }
3659}
3660
3661
Ben Murdoch257744e2011-11-30 15:57:28 +00003662void MacroAssembler::CopyBytes(Register src,
3663 Register dst,
3664 Register length,
3665 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003666 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003667
3668 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003669 Branch(&byte_loop, le, length, Operand(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003670 bind(&align_loop_1);
3671 And(scratch, src, kPointerSize - 1);
3672 Branch(&word_loop, eq, scratch, Operand(zero_reg));
3673 lbu(scratch, MemOperand(src));
3674 Addu(src, src, 1);
3675 sb(scratch, MemOperand(dst));
3676 Addu(dst, dst, 1);
3677 Subu(length, length, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003678 Branch(&align_loop_1, ne, length, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003679
3680 // Copy bytes in word size chunks.
3681 bind(&word_loop);
3682 if (emit_debug_code()) {
3683 And(scratch, src, kPointerSize - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003684 Assert(eq, kExpectingAlignmentForCopyBytes,
Ben Murdoch257744e2011-11-30 15:57:28 +00003685 scratch, Operand(zero_reg));
3686 }
3687 Branch(&byte_loop, lt, length, Operand(kPointerSize));
3688 lw(scratch, MemOperand(src));
3689 Addu(src, src, kPointerSize);
3690
3691 // TODO(kalmard) check if this can be optimized to use sw in most cases.
3692 // Can't use unaligned access - copy byte by byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003693 if (kArchEndian == kLittle) {
3694 sb(scratch, MemOperand(dst, 0));
3695 srl(scratch, scratch, 8);
3696 sb(scratch, MemOperand(dst, 1));
3697 srl(scratch, scratch, 8);
3698 sb(scratch, MemOperand(dst, 2));
3699 srl(scratch, scratch, 8);
3700 sb(scratch, MemOperand(dst, 3));
3701 } else {
3702 sb(scratch, MemOperand(dst, 3));
3703 srl(scratch, scratch, 8);
3704 sb(scratch, MemOperand(dst, 2));
3705 srl(scratch, scratch, 8);
3706 sb(scratch, MemOperand(dst, 1));
3707 srl(scratch, scratch, 8);
3708 sb(scratch, MemOperand(dst, 0));
3709 }
3710
Ben Murdoch257744e2011-11-30 15:57:28 +00003711 Addu(dst, dst, 4);
3712
3713 Subu(length, length, Operand(kPointerSize));
3714 Branch(&word_loop);
3715
3716 // Copy the last bytes if any left.
3717 bind(&byte_loop);
3718 Branch(&done, eq, length, Operand(zero_reg));
3719 bind(&byte_loop_1);
3720 lbu(scratch, MemOperand(src));
3721 Addu(src, src, 1);
3722 sb(scratch, MemOperand(dst));
3723 Addu(dst, dst, 1);
3724 Subu(length, length, Operand(1));
3725 Branch(&byte_loop_1, ne, length, Operand(zero_reg));
3726 bind(&done);
3727}
3728
3729
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003730void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3731 Register end_offset,
3732 Register filler) {
3733 Label loop, entry;
3734 Branch(&entry);
3735 bind(&loop);
3736 sw(filler, MemOperand(start_offset));
3737 Addu(start_offset, start_offset, kPointerSize);
3738 bind(&entry);
3739 Branch(&loop, lt, start_offset, Operand(end_offset));
3740}
3741
3742
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003743void MacroAssembler::CheckFastElements(Register map,
3744 Register scratch,
3745 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003746 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3747 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3748 STATIC_ASSERT(FAST_ELEMENTS == 2);
3749 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003750 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003751 Branch(fail, hi, scratch,
3752 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003753}
3754
3755
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003756void MacroAssembler::CheckFastObjectElements(Register map,
3757 Register scratch,
3758 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003759 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3760 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3761 STATIC_ASSERT(FAST_ELEMENTS == 2);
3762 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003763 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
3764 Branch(fail, ls, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003765 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003766 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003767 Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003768}
3769
3770
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003771void MacroAssembler::CheckFastSmiElements(Register map,
3772 Register scratch,
3773 Label* fail) {
3774 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3775 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003776 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
3777 Branch(fail, hi, scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003778 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003779}
3780
3781
3782void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
3783 Register key_reg,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003784 Register elements_reg,
3785 Register scratch1,
3786 Register scratch2,
3787 Register scratch3,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003788 Label* fail,
3789 int elements_offset) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003790 Label smi_value, maybe_nan, have_double_value, is_nan, done;
3791 Register mantissa_reg = scratch2;
3792 Register exponent_reg = scratch3;
3793
3794 // Handle smi values specially.
3795 JumpIfSmi(value_reg, &smi_value);
3796
3797 // Ensure that the object is a heap number
3798 CheckMap(value_reg,
3799 scratch1,
3800 Heap::kHeapNumberMapRootIndex,
3801 fail,
3802 DONT_DO_SMI_CHECK);
3803
3804 // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000
3805 // in the exponent.
3806 li(scratch1, Operand(kNaNOrInfinityLowerBoundUpper32));
3807 lw(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
3808 Branch(&maybe_nan, ge, exponent_reg, Operand(scratch1));
3809
3810 lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
3811
3812 bind(&have_double_value);
3813 sll(scratch1, key_reg, kDoubleSizeLog2 - kSmiTagSize);
3814 Addu(scratch1, scratch1, elements_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003815 sw(mantissa_reg,
3816 FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset
3817 + kHoleNanLower32Offset));
3818 sw(exponent_reg,
3819 FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - elements_offset
3820 + kHoleNanUpper32Offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003821 jmp(&done);
3822
3823 bind(&maybe_nan);
3824 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
3825 // it's an Infinity, and the non-NaN code path applies.
3826 Branch(&is_nan, gt, exponent_reg, Operand(scratch1));
3827 lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
3828 Branch(&have_double_value, eq, mantissa_reg, Operand(zero_reg));
3829 bind(&is_nan);
3830 // Load canonical NaN for storing into the double array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003831 LoadRoot(at, Heap::kNanValueRootIndex);
3832 lw(mantissa_reg, FieldMemOperand(at, HeapNumber::kMantissaOffset));
3833 lw(exponent_reg, FieldMemOperand(at, HeapNumber::kExponentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003834 jmp(&have_double_value);
3835
3836 bind(&smi_value);
3837 Addu(scratch1, elements_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003838 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag -
3839 elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003840 sll(scratch2, key_reg, kDoubleSizeLog2 - kSmiTagSize);
3841 Addu(scratch1, scratch1, scratch2);
3842 // scratch1 is now effective address of the double element
3843
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003844 Register untagged_value = elements_reg;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003845 SmiUntag(untagged_value, value_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003846 mtc1(untagged_value, f2);
3847 cvt_d_w(f0, f2);
3848 sdc1(f0, MemOperand(scratch1, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003849 bind(&done);
3850}
3851
3852
3853void MacroAssembler::CompareMapAndBranch(Register obj,
3854 Register scratch,
3855 Handle<Map> map,
3856 Label* early_success,
3857 Condition cond,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003858 Label* branch_to) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003859 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003860 CompareMapAndBranch(scratch, map, early_success, cond, branch_to);
3861}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003862
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003863
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003864void MacroAssembler::CompareMapAndBranch(Register obj_map,
3865 Handle<Map> map,
3866 Label* early_success,
3867 Condition cond,
3868 Label* branch_to) {
3869 Branch(branch_to, cond, obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003870}
3871
3872
Steve Block44f0eee2011-05-26 01:26:41 +01003873void MacroAssembler::CheckMap(Register obj,
3874 Register scratch,
3875 Handle<Map> map,
3876 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003877 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003878 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01003879 JumpIfSmi(obj, fail);
3880 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003881 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003882 CompareMapAndBranch(obj, scratch, map, &success, ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003883 bind(&success);
Steve Block44f0eee2011-05-26 01:26:41 +01003884}
3885
3886
Ben Murdoch257744e2011-11-30 15:57:28 +00003887void MacroAssembler::DispatchMap(Register obj,
3888 Register scratch,
3889 Handle<Map> map,
3890 Handle<Code> success,
3891 SmiCheckType smi_check_type) {
3892 Label fail;
3893 if (smi_check_type == DO_SMI_CHECK) {
3894 JumpIfSmi(obj, &fail);
3895 }
3896 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
3897 Jump(success, RelocInfo::CODE_TARGET, eq, scratch, Operand(map));
3898 bind(&fail);
3899}
3900
3901
Steve Block44f0eee2011-05-26 01:26:41 +01003902void MacroAssembler::CheckMap(Register obj,
3903 Register scratch,
3904 Heap::RootListIndex index,
3905 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00003906 SmiCheckType smi_check_type) {
3907 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01003908 JumpIfSmi(obj, fail);
3909 }
3910 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
3911 LoadRoot(at, index);
3912 Branch(fail, ne, scratch, Operand(at));
Steve Block6ded16b2010-05-10 14:33:55 +01003913}
3914
3915
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003916void MacroAssembler::MovFromFloatResult(DoubleRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003917 if (IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003918 if (kArchEndian == kLittle) {
3919 Move(dst, v0, v1);
3920 } else {
3921 Move(dst, v1, v0);
3922 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003923 } else {
3924 Move(dst, f0); // Reg f0 is o32 ABI FP return value.
3925 }
3926}
3927
3928
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003929void MacroAssembler::MovFromFloatParameter(DoubleRegister dst) {
3930 if (IsMipsSoftFloatABI) {
3931 if (kArchEndian == kLittle) {
3932 Move(dst, a0, a1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003933 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003934 Move(dst, a1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003935 }
3936 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003937 Move(dst, f12); // Reg f12 is o32 ABI FP first argument value.
Ben Murdoch257744e2011-11-30 15:57:28 +00003938 }
3939}
3940
3941
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003942void MacroAssembler::MovToFloatParameter(DoubleRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003943 if (!IsMipsSoftFloatABI) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003944 Move(f12, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003945 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003946 if (kArchEndian == kLittle) {
3947 Move(a0, a1, src);
3948 } else {
3949 Move(a1, a0, src);
3950 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003951 }
3952}
3953
3954
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003955void MacroAssembler::MovToFloatResult(DoubleRegister src) {
3956 if (!IsMipsSoftFloatABI) {
3957 Move(f0, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003958 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003959 if (kArchEndian == kLittle) {
3960 Move(v0, v1, src);
3961 } else {
3962 Move(v1, v0, src);
3963 }
3964 }
3965}
3966
3967
3968void MacroAssembler::MovToFloatParameters(DoubleRegister src1,
3969 DoubleRegister src2) {
3970 if (!IsMipsSoftFloatABI) {
3971 if (src2.is(f12)) {
3972 DCHECK(!src1.is(f14));
3973 Move(f14, src2);
3974 Move(f12, src1);
3975 } else {
3976 Move(f12, src1);
3977 Move(f14, src2);
3978 }
3979 } else {
3980 if (kArchEndian == kLittle) {
3981 Move(a0, a1, src1);
3982 Move(a2, a3, src2);
3983 } else {
3984 Move(a1, a0, src1);
3985 Move(a3, a2, src2);
3986 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003987 }
3988}
3989
3990
Steve Block6ded16b2010-05-10 14:33:55 +01003991// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00003992// JavaScript invokes.
Steve Block6ded16b2010-05-10 14:33:55 +01003993
3994void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3995 const ParameterCount& actual,
3996 Handle<Code> code_constant,
3997 Register code_reg,
3998 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003999 bool* definitely_mismatches,
Steve Block44f0eee2011-05-26 01:26:41 +01004000 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004001 const CallWrapper& call_wrapper) {
Steve Block6ded16b2010-05-10 14:33:55 +01004002 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004003 *definitely_mismatches = false;
Steve Block6ded16b2010-05-10 14:33:55 +01004004 Label regular_invoke;
4005
4006 // Check whether the expected and actual arguments count match. If not,
4007 // setup registers according to contract with ArgumentsAdaptorTrampoline:
4008 // a0: actual arguments count
4009 // a1: function (passed through to callee)
4010 // a2: expected arguments count
Steve Block6ded16b2010-05-10 14:33:55 +01004011
4012 // The code below is made a lot easier because the calling code already sets
4013 // up actual and expected registers according to the contract if values are
4014 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004015 DCHECK(actual.is_immediate() || actual.reg().is(a0));
4016 DCHECK(expected.is_immediate() || expected.reg().is(a2));
4017 DCHECK((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(a3));
Steve Block6ded16b2010-05-10 14:33:55 +01004018
4019 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004020 DCHECK(actual.is_immediate());
Steve Block6ded16b2010-05-10 14:33:55 +01004021 if (expected.immediate() == actual.immediate()) {
4022 definitely_matches = true;
4023 } else {
4024 li(a0, Operand(actual.immediate()));
4025 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
4026 if (expected.immediate() == sentinel) {
4027 // Don't worry about adapting arguments for builtins that
4028 // don't want that done. Skip adaption code by making it look
4029 // like we have a match between expected and actual number of
4030 // arguments.
4031 definitely_matches = true;
4032 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004033 *definitely_mismatches = true;
Steve Block6ded16b2010-05-10 14:33:55 +01004034 li(a2, Operand(expected.immediate()));
4035 }
4036 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004037 } else if (actual.is_immediate()) {
4038 Branch(&regular_invoke, eq, expected.reg(), Operand(actual.immediate()));
4039 li(a0, Operand(actual.immediate()));
Steve Block6ded16b2010-05-10 14:33:55 +01004040 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004041 Branch(&regular_invoke, eq, expected.reg(), Operand(actual.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01004042 }
4043
4044 if (!definitely_matches) {
4045 if (!code_constant.is_null()) {
4046 li(a3, Operand(code_constant));
4047 addiu(a3, a3, Code::kHeaderSize - kHeapObjectTag);
4048 }
4049
Steve Block44f0eee2011-05-26 01:26:41 +01004050 Handle<Code> adaptor =
4051 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Block6ded16b2010-05-10 14:33:55 +01004052 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004053 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004054 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00004055 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004056 if (!*definitely_mismatches) {
4057 Branch(done);
4058 }
Steve Block6ded16b2010-05-10 14:33:55 +01004059 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004060 Jump(adaptor, RelocInfo::CODE_TARGET);
Steve Block6ded16b2010-05-10 14:33:55 +01004061 }
4062 bind(&regular_invoke);
4063 }
4064}
4065
Steve Block44f0eee2011-05-26 01:26:41 +01004066
Steve Block6ded16b2010-05-10 14:33:55 +01004067void MacroAssembler::InvokeCode(Register code,
4068 const ParameterCount& expected,
4069 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +01004070 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004071 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004072 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004073 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004074
Steve Block6ded16b2010-05-10 14:33:55 +01004075 Label done;
4076
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004077 bool definitely_mismatches = false;
4078 InvokePrologue(expected, actual, Handle<Code>::null(), code,
4079 &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004080 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004081 if (!definitely_mismatches) {
4082 if (flag == CALL_FUNCTION) {
4083 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004084 Call(code);
4085 call_wrapper.AfterCall();
4086 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004087 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004088 Jump(code);
4089 }
4090 // Continue here if InvokePrologue does handle the invocation due to
4091 // mismatched parameter counts.
4092 bind(&done);
Steve Block6ded16b2010-05-10 14:33:55 +01004093 }
Steve Block6ded16b2010-05-10 14:33:55 +01004094}
4095
4096
Steve Block6ded16b2010-05-10 14:33:55 +01004097void MacroAssembler::InvokeFunction(Register function,
4098 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +01004099 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004100 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004101 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004102 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004103
Steve Block6ded16b2010-05-10 14:33:55 +01004104 // Contract with called JS functions requires that function is passed in a1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004105 DCHECK(function.is(a1));
Steve Block6ded16b2010-05-10 14:33:55 +01004106 Register expected_reg = a2;
4107 Register code_reg = a3;
4108
4109 lw(code_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4110 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4111 lw(expected_reg,
4112 FieldMemOperand(code_reg,
4113 SharedFunctionInfo::kFormalParameterCountOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004114 sra(expected_reg, expected_reg, kSmiTagSize);
4115 lw(code_reg, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004116
4117 ParameterCount expected(expected_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004118 InvokeCode(code_reg, expected, actual, flag, call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01004119}
4120
4121
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004122void MacroAssembler::InvokeFunction(Register function,
4123 const ParameterCount& expected,
Steve Block44f0eee2011-05-26 01:26:41 +01004124 const ParameterCount& actual,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004125 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004126 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004127 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004128 DCHECK(flag == JUMP_FUNCTION || has_frame());
4129
4130 // Contract with called JS functions requires that function is passed in a1.
4131 DCHECK(function.is(a1));
Steve Block44f0eee2011-05-26 01:26:41 +01004132
4133 // Get the function and setup the context.
Steve Block44f0eee2011-05-26 01:26:41 +01004134 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4135
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004136 // We call indirectly through the code field in the function to
4137 // allow recompilation to take effect without changing any of the
4138 // call sites.
4139 lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004140 InvokeCode(a3, expected, actual, flag, call_wrapper);
4141}
4142
4143
4144void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
4145 const ParameterCount& expected,
4146 const ParameterCount& actual,
4147 InvokeFlag flag,
4148 const CallWrapper& call_wrapper) {
4149 li(a1, function);
4150 InvokeFunction(a1, expected, actual, flag, call_wrapper);
Steve Block44f0eee2011-05-26 01:26:41 +01004151}
4152
4153
4154void MacroAssembler::IsObjectJSObjectType(Register heap_object,
4155 Register map,
4156 Register scratch,
4157 Label* fail) {
4158 lw(map, FieldMemOperand(heap_object, HeapObject::kMapOffset));
4159 IsInstanceJSObjectType(map, scratch, fail);
4160}
4161
4162
4163void MacroAssembler::IsInstanceJSObjectType(Register map,
4164 Register scratch,
4165 Label* fail) {
4166 lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004167 Branch(fail, lt, scratch, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4168 Branch(fail, gt, scratch, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block44f0eee2011-05-26 01:26:41 +01004169}
4170
4171
4172void MacroAssembler::IsObjectJSStringType(Register object,
4173 Register scratch,
4174 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004175 DCHECK(kNotStringTag != 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004176
4177 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4178 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4179 And(scratch, scratch, Operand(kIsNotStringMask));
4180 Branch(fail, ne, scratch, Operand(zero_reg));
Steve Block6ded16b2010-05-10 14:33:55 +01004181}
4182
4183
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004184void MacroAssembler::IsObjectNameType(Register object,
4185 Register scratch,
4186 Label* fail) {
4187 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4188 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4189 Branch(fail, hi, scratch, Operand(LAST_NAME_TYPE));
4190}
4191
4192
Steve Block6ded16b2010-05-10 14:33:55 +01004193// ---------------------------------------------------------------------------
4194// Support functions.
4195
Steve Block44f0eee2011-05-26 01:26:41 +01004196
4197void MacroAssembler::TryGetFunctionPrototype(Register function,
4198 Register result,
4199 Register scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004200 Label* miss,
4201 bool miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004202 Label non_instance;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004203 if (miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004204 // Check that the receiver isn't a smi.
4205 JumpIfSmi(function, miss);
4206
4207 // Check that the function really is a function. Load map into result reg.
4208 GetObjectType(function, result, scratch);
4209 Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
4210
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004211 lw(scratch,
4212 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4213 lw(scratch,
4214 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
4215 And(scratch, scratch,
4216 Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
4217 Branch(miss, ne, scratch, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004218
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004219 // Make sure that the function has an instance prototype.
4220 lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
4221 And(scratch, scratch, Operand(1 << Map::kHasNonInstancePrototype));
4222 Branch(&non_instance, ne, scratch, Operand(zero_reg));
4223 }
Steve Block44f0eee2011-05-26 01:26:41 +01004224
4225 // Get the prototype or initial map from the function.
4226 lw(result,
4227 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4228
4229 // If the prototype or initial map is the hole, don't return it and
4230 // simply miss the cache instead. This will allow us to allocate a
4231 // prototype object on-demand in the runtime system.
4232 LoadRoot(t8, Heap::kTheHoleValueRootIndex);
4233 Branch(miss, eq, result, Operand(t8));
4234
4235 // If the function does not have an initial map, we're done.
4236 Label done;
4237 GetObjectType(result, scratch, scratch);
4238 Branch(&done, ne, scratch, Operand(MAP_TYPE));
4239
4240 // Get the prototype from the initial map.
4241 lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004242
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004243 if (miss_on_bound_function) {
4244 jmp(&done);
4245
4246 // Non-instance prototype: Fetch prototype from constructor field
4247 // in initial map.
4248 bind(&non_instance);
4249 lw(result, FieldMemOperand(result, Map::kConstructorOffset));
4250 }
Steve Block44f0eee2011-05-26 01:26:41 +01004251
4252 // All done.
4253 bind(&done);
4254}
Steve Block6ded16b2010-05-10 14:33:55 +01004255
4256
Steve Block44f0eee2011-05-26 01:26:41 +01004257void MacroAssembler::GetObjectType(Register object,
4258 Register map,
4259 Register type_reg) {
4260 lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
4261 lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
4262}
Steve Block6ded16b2010-05-10 14:33:55 +01004263
4264
4265// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004266// Runtime calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004267
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004268void MacroAssembler::CallStub(CodeStub* stub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004269 TypeFeedbackId ast_id,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004270 Condition cond,
4271 Register r1,
4272 const Operand& r2,
4273 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004274 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
4275 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id,
4276 cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004277}
4278
4279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004280void MacroAssembler::TailCallStub(CodeStub* stub,
4281 Condition cond,
4282 Register r1,
4283 const Operand& r2,
4284 BranchDelaySlot bd) {
4285 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004286}
4287
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004288
Ben Murdoch257744e2011-11-30 15:57:28 +00004289static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
4290 return ref0.address() - ref1.address();
4291}
4292
4293
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004294void MacroAssembler::CallApiFunctionAndReturn(
4295 Register function_address,
4296 ExternalReference thunk_ref,
4297 int stack_space,
4298 MemOperand return_value_operand,
4299 MemOperand* context_restore_operand) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004300 ExternalReference next_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004301 ExternalReference::handle_scope_next_address(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00004302 const int kNextOffset = 0;
4303 const int kLimitOffset = AddressOffset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004304 ExternalReference::handle_scope_limit_address(isolate()),
Ben Murdoch257744e2011-11-30 15:57:28 +00004305 next_address);
4306 const int kLevelOffset = AddressOffset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004307 ExternalReference::handle_scope_level_address(isolate()),
Ben Murdoch257744e2011-11-30 15:57:28 +00004308 next_address);
4309
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004310 DCHECK(function_address.is(a1) || function_address.is(a2));
4311
4312 Label profiler_disabled;
4313 Label end_profiler_check;
4314 li(t9, Operand(ExternalReference::is_profiling_address(isolate())));
4315 lb(t9, MemOperand(t9, 0));
4316 Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
4317
4318 // Additional parameter is the address of the actual callback.
4319 li(t9, Operand(thunk_ref));
4320 jmp(&end_profiler_check);
4321
4322 bind(&profiler_disabled);
4323 mov(t9, function_address);
4324 bind(&end_profiler_check);
4325
Ben Murdoch257744e2011-11-30 15:57:28 +00004326 // Allocate HandleScope in callee-save registers.
4327 li(s3, Operand(next_address));
4328 lw(s0, MemOperand(s3, kNextOffset));
4329 lw(s1, MemOperand(s3, kLimitOffset));
4330 lw(s2, MemOperand(s3, kLevelOffset));
4331 Addu(s2, s2, Operand(1));
4332 sw(s2, MemOperand(s3, kLevelOffset));
4333
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004334 if (FLAG_log_timer_events) {
4335 FrameScope frame(this, StackFrame::MANUAL);
4336 PushSafepointRegisters();
4337 PrepareCallCFunction(1, a0);
4338 li(a0, Operand(ExternalReference::isolate_address(isolate())));
4339 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
4340 PopSafepointRegisters();
4341 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004342
4343 // Native call returns to the DirectCEntry stub which redirects to the
4344 // return address pushed on stack (could have moved after GC).
4345 // DirectCEntry stub itself is generated early and never moves.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004346 DirectCEntryStub stub(isolate());
4347 stub.GenerateCall(this, t9);
Ben Murdoch257744e2011-11-30 15:57:28 +00004348
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004349 if (FLAG_log_timer_events) {
4350 FrameScope frame(this, StackFrame::MANUAL);
4351 PushSafepointRegisters();
4352 PrepareCallCFunction(1, a0);
4353 li(a0, Operand(ExternalReference::isolate_address(isolate())));
4354 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
4355 PopSafepointRegisters();
4356 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004357
4358 Label promote_scheduled_exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004359 Label exception_handled;
Ben Murdoch257744e2011-11-30 15:57:28 +00004360 Label delete_allocated_handles;
4361 Label leave_exit_frame;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004362 Label return_value_loaded;
Ben Murdoch257744e2011-11-30 15:57:28 +00004363
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004364 // Load value from ReturnValue.
4365 lw(v0, return_value_operand);
4366 bind(&return_value_loaded);
Ben Murdoch257744e2011-11-30 15:57:28 +00004367
4368 // No more valid handles (the result handle was the last one). Restore
4369 // previous handle scope.
4370 sw(s0, MemOperand(s3, kNextOffset));
4371 if (emit_debug_code()) {
4372 lw(a1, MemOperand(s3, kLevelOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004373 Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
Ben Murdoch257744e2011-11-30 15:57:28 +00004374 }
4375 Subu(s2, s2, Operand(1));
4376 sw(s2, MemOperand(s3, kLevelOffset));
4377 lw(at, MemOperand(s3, kLimitOffset));
4378 Branch(&delete_allocated_handles, ne, s1, Operand(at));
4379
4380 // Check if the function scheduled an exception.
4381 bind(&leave_exit_frame);
4382 LoadRoot(t0, Heap::kTheHoleValueRootIndex);
4383 li(at, Operand(ExternalReference::scheduled_exception_address(isolate())));
4384 lw(t1, MemOperand(at));
4385 Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004386 bind(&exception_handled);
4387
4388 bool restore_context = context_restore_operand != NULL;
4389 if (restore_context) {
4390 lw(cp, *context_restore_operand);
4391 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004392 li(s0, Operand(stack_space));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004393 LeaveExitFrame(false, s0, !restore_context, EMIT_RETURN);
Ben Murdoch257744e2011-11-30 15:57:28 +00004394
4395 bind(&promote_scheduled_exception);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004396 {
4397 FrameScope frame(this, StackFrame::INTERNAL);
4398 CallExternalReference(
4399 ExternalReference(Runtime::kPromoteScheduledException, isolate()),
4400 0);
4401 }
4402 jmp(&exception_handled);
Ben Murdoch257744e2011-11-30 15:57:28 +00004403
4404 // HandleScope limit has changed. Delete allocated extensions.
4405 bind(&delete_allocated_handles);
4406 sw(s1, MemOperand(s3, kLimitOffset));
4407 mov(s0, v0);
4408 mov(a0, v0);
4409 PrepareCallCFunction(1, s1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004410 li(a0, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00004411 CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()),
4412 1);
4413 mov(v0, s0);
4414 jmp(&leave_exit_frame);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004415}
Ben Murdoch257744e2011-11-30 15:57:28 +00004416
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004417
4418bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004419 return has_frame_ || !stub->SometimesSetsUpAFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +00004420}
4421
Andrei Popescu31002712010-02-23 13:46:05 +00004422
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004423void MacroAssembler::IndexFromHash(Register hash, Register index) {
Steve Block44f0eee2011-05-26 01:26:41 +01004424 // If the hash field contains an array index pick it out. The assert checks
4425 // that the constants for the maximum number of digits for an array index
4426 // cached in the hash field and the number of bits reserved for it does not
4427 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004428 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Steve Block44f0eee2011-05-26 01:26:41 +01004429 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004430 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Steve Block44f0eee2011-05-26 01:26:41 +01004431}
4432
4433
4434void MacroAssembler::ObjectToDoubleFPURegister(Register object,
4435 FPURegister result,
4436 Register scratch1,
4437 Register scratch2,
4438 Register heap_number_map,
4439 Label* not_number,
4440 ObjectToDoubleFlags flags) {
4441 Label done;
4442 if ((flags & OBJECT_NOT_SMI) == 0) {
4443 Label not_smi;
4444 JumpIfNotSmi(object, &not_smi);
4445 // Remove smi tag and convert to double.
4446 sra(scratch1, object, kSmiTagSize);
4447 mtc1(scratch1, result);
4448 cvt_d_w(result, result);
4449 Branch(&done);
4450 bind(&not_smi);
4451 }
4452 // Check for heap number and load double value from it.
4453 lw(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
4454 Branch(not_number, ne, scratch1, Operand(heap_number_map));
4455
4456 if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
4457 // If exponent is all ones the number is either a NaN or +/-Infinity.
4458 Register exponent = scratch1;
4459 Register mask_reg = scratch2;
4460 lw(exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
4461 li(mask_reg, HeapNumber::kExponentMask);
4462
4463 And(exponent, exponent, mask_reg);
4464 Branch(not_number, eq, exponent, Operand(mask_reg));
4465 }
4466 ldc1(result, FieldMemOperand(object, HeapNumber::kValueOffset));
4467 bind(&done);
4468}
4469
4470
Steve Block44f0eee2011-05-26 01:26:41 +01004471void MacroAssembler::SmiToDoubleFPURegister(Register smi,
4472 FPURegister value,
4473 Register scratch1) {
4474 sra(scratch1, smi, kSmiTagSize);
4475 mtc1(scratch1, value);
4476 cvt_d_w(value, value);
4477}
4478
4479
Ben Murdoch257744e2011-11-30 15:57:28 +00004480void MacroAssembler::AdduAndCheckForOverflow(Register dst,
4481 Register left,
4482 Register right,
4483 Register overflow_dst,
4484 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004485 DCHECK(!dst.is(overflow_dst));
4486 DCHECK(!dst.is(scratch));
4487 DCHECK(!overflow_dst.is(scratch));
4488 DCHECK(!overflow_dst.is(left));
4489 DCHECK(!overflow_dst.is(right));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004490
4491 if (left.is(right) && dst.is(left)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004492 DCHECK(!dst.is(t9));
4493 DCHECK(!scratch.is(t9));
4494 DCHECK(!left.is(t9));
4495 DCHECK(!right.is(t9));
4496 DCHECK(!overflow_dst.is(t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004497 mov(t9, right);
4498 right = t9;
4499 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004500
Ben Murdoch257744e2011-11-30 15:57:28 +00004501 if (dst.is(left)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004502 mov(scratch, left); // Preserve left.
4503 addu(dst, left, right); // Left is overwritten.
4504 xor_(scratch, dst, scratch); // Original left.
4505 xor_(overflow_dst, dst, right);
4506 and_(overflow_dst, overflow_dst, scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00004507 } else if (dst.is(right)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004508 mov(scratch, right); // Preserve right.
4509 addu(dst, left, right); // Right is overwritten.
4510 xor_(scratch, dst, scratch); // Original right.
4511 xor_(overflow_dst, dst, left);
4512 and_(overflow_dst, overflow_dst, scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00004513 } else {
4514 addu(dst, left, right);
4515 xor_(overflow_dst, dst, left);
4516 xor_(scratch, dst, right);
4517 and_(overflow_dst, scratch, overflow_dst);
4518 }
4519}
4520
4521
4522void MacroAssembler::SubuAndCheckForOverflow(Register dst,
4523 Register left,
4524 Register right,
4525 Register overflow_dst,
4526 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004527 DCHECK(!dst.is(overflow_dst));
4528 DCHECK(!dst.is(scratch));
4529 DCHECK(!overflow_dst.is(scratch));
4530 DCHECK(!overflow_dst.is(left));
4531 DCHECK(!overflow_dst.is(right));
4532 DCHECK(!scratch.is(left));
4533 DCHECK(!scratch.is(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00004534
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004535 // This happens with some crankshaft code. Since Subu works fine if
4536 // left == right, let's not make that restriction here.
4537 if (left.is(right)) {
4538 mov(dst, zero_reg);
4539 mov(overflow_dst, zero_reg);
4540 return;
4541 }
4542
Ben Murdoch257744e2011-11-30 15:57:28 +00004543 if (dst.is(left)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004544 mov(scratch, left); // Preserve left.
4545 subu(dst, left, right); // Left is overwritten.
4546 xor_(overflow_dst, dst, scratch); // scratch is original left.
4547 xor_(scratch, scratch, right); // scratch is original left.
4548 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00004549 } else if (dst.is(right)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004550 mov(scratch, right); // Preserve right.
4551 subu(dst, left, right); // Right is overwritten.
4552 xor_(overflow_dst, dst, left);
4553 xor_(scratch, left, scratch); // Original right.
4554 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00004555 } else {
4556 subu(dst, left, right);
4557 xor_(overflow_dst, dst, left);
4558 xor_(scratch, left, right);
4559 and_(overflow_dst, scratch, overflow_dst);
4560 }
4561}
4562
4563
Steve Block44f0eee2011-05-26 01:26:41 +01004564void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004565 int num_arguments,
4566 SaveFPRegsMode save_doubles) {
Steve Block6ded16b2010-05-10 14:33:55 +01004567 // All parameters are on the stack. v0 has the return value after call.
4568
4569 // If the expected number of arguments of the runtime function is
4570 // constant, we check that the actual number of arguments match the
4571 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004572 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01004573
4574 // TODO(1236192): Most runtime routines don't need the number of
4575 // arguments passed in because it is constant. At some point we
4576 // should remove this need and make the runtime routine entry code
4577 // smarter.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004578 PrepareCEntryArgs(num_arguments);
4579 PrepareCEntryFunction(ExternalReference(f, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004580 CEntryStub stub(isolate(), 1, save_doubles);
Steve Block6ded16b2010-05-10 14:33:55 +01004581 CallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +00004582}
4583
4584
Steve Block44f0eee2011-05-26 01:26:41 +01004585void MacroAssembler::CallExternalReference(const ExternalReference& ext,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004586 int num_arguments,
4587 BranchDelaySlot bd) {
4588 PrepareCEntryArgs(num_arguments);
4589 PrepareCEntryFunction(ext);
Steve Block44f0eee2011-05-26 01:26:41 +01004590
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004591 CEntryStub stub(isolate(), 1);
4592 CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
Steve Block44f0eee2011-05-26 01:26:41 +01004593}
4594
4595
Steve Block6ded16b2010-05-10 14:33:55 +01004596void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
4597 int num_arguments,
4598 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01004599 // TODO(1236192): Most runtime routines don't need the number of
4600 // arguments passed in because it is constant. At some point we
4601 // should remove this need and make the runtime routine entry code
4602 // smarter.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004603 PrepareCEntryArgs(num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01004604 JumpToExternalReference(ext);
Andrei Popescu31002712010-02-23 13:46:05 +00004605}
4606
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004607
Steve Block6ded16b2010-05-10 14:33:55 +01004608void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
Andrei Popescu31002712010-02-23 13:46:05 +00004609 int num_arguments,
4610 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01004611 TailCallExternalReference(ExternalReference(fid, isolate()),
4612 num_arguments,
4613 result_size);
Andrei Popescu31002712010-02-23 13:46:05 +00004614}
4615
4616
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004617void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
4618 BranchDelaySlot bd) {
4619 PrepareCEntryFunction(builtin);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004620 CEntryStub stub(isolate(), 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004621 Jump(stub.GetCode(),
4622 RelocInfo::CODE_TARGET,
4623 al,
4624 zero_reg,
4625 Operand(zero_reg),
4626 bd);
Andrei Popescu31002712010-02-23 13:46:05 +00004627}
4628
4629
4630void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
Ben Murdoch257744e2011-11-30 15:57:28 +00004631 InvokeFlag flag,
4632 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004633 // You can't call a builtin without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004634 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004635
Steve Block44f0eee2011-05-26 01:26:41 +01004636 GetBuiltinEntry(t9, id);
Ben Murdoch257744e2011-11-30 15:57:28 +00004637 if (flag == CALL_FUNCTION) {
4638 call_wrapper.BeforeCall(CallSize(t9));
Steve Block44f0eee2011-05-26 01:26:41 +01004639 Call(t9);
Ben Murdoch257744e2011-11-30 15:57:28 +00004640 call_wrapper.AfterCall();
Steve Block44f0eee2011-05-26 01:26:41 +01004641 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004642 DCHECK(flag == JUMP_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01004643 Jump(t9);
4644 }
4645}
4646
4647
4648void MacroAssembler::GetBuiltinFunction(Register target,
4649 Builtins::JavaScript id) {
4650 // Load the builtins object into target register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004651 lw(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01004652 lw(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
4653 // Load the JavaScript builtin function from the builtins object.
4654 lw(target, FieldMemOperand(target,
4655 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
Andrei Popescu31002712010-02-23 13:46:05 +00004656}
4657
4658
4659void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004660 DCHECK(!target.is(a1));
Steve Block44f0eee2011-05-26 01:26:41 +01004661 GetBuiltinFunction(a1, id);
4662 // Load the code entry point from the builtins object.
4663 lw(target, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Andrei Popescu31002712010-02-23 13:46:05 +00004664}
4665
4666
4667void MacroAssembler::SetCounter(StatsCounter* counter, int value,
4668 Register scratch1, Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +01004669 if (FLAG_native_code_counters && counter->Enabled()) {
4670 li(scratch1, Operand(value));
4671 li(scratch2, Operand(ExternalReference(counter)));
4672 sw(scratch1, MemOperand(scratch2));
4673 }
Andrei Popescu31002712010-02-23 13:46:05 +00004674}
4675
4676
4677void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
4678 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004679 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004680 if (FLAG_native_code_counters && counter->Enabled()) {
4681 li(scratch2, Operand(ExternalReference(counter)));
4682 lw(scratch1, MemOperand(scratch2));
4683 Addu(scratch1, scratch1, Operand(value));
4684 sw(scratch1, MemOperand(scratch2));
4685 }
Andrei Popescu31002712010-02-23 13:46:05 +00004686}
4687
4688
4689void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
4690 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004691 DCHECK(value > 0);
Steve Block44f0eee2011-05-26 01:26:41 +01004692 if (FLAG_native_code_counters && counter->Enabled()) {
4693 li(scratch2, Operand(ExternalReference(counter)));
4694 lw(scratch1, MemOperand(scratch2));
4695 Subu(scratch1, scratch1, Operand(value));
4696 sw(scratch1, MemOperand(scratch2));
4697 }
Andrei Popescu31002712010-02-23 13:46:05 +00004698}
4699
4700
Steve Block6ded16b2010-05-10 14:33:55 +01004701// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00004702// Debugging.
Andrei Popescu31002712010-02-23 13:46:05 +00004703
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004704void MacroAssembler::Assert(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00004705 Register rs, Operand rt) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004706 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004707 Check(cc, reason, rs, rt);
Steve Block44f0eee2011-05-26 01:26:41 +01004708}
4709
4710
4711void MacroAssembler::AssertFastElements(Register elements) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004712 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004713 DCHECK(!elements.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +01004714 Label ok;
Ben Murdoch257744e2011-11-30 15:57:28 +00004715 push(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01004716 lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
4717 LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4718 Branch(&ok, eq, elements, Operand(at));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004719 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
4720 Branch(&ok, eq, elements, Operand(at));
Steve Block44f0eee2011-05-26 01:26:41 +01004721 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
4722 Branch(&ok, eq, elements, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004723 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Steve Block44f0eee2011-05-26 01:26:41 +01004724 bind(&ok);
Ben Murdoch257744e2011-11-30 15:57:28 +00004725 pop(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01004726 }
Andrei Popescu31002712010-02-23 13:46:05 +00004727}
4728
4729
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004730void MacroAssembler::Check(Condition cc, BailoutReason reason,
Andrei Popescu31002712010-02-23 13:46:05 +00004731 Register rs, Operand rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01004732 Label L;
4733 Branch(&L, cc, rs, rt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004734 Abort(reason);
Ben Murdoch257744e2011-11-30 15:57:28 +00004735 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01004736 bind(&L);
Andrei Popescu31002712010-02-23 13:46:05 +00004737}
4738
4739
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004740void MacroAssembler::Abort(BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01004741 Label abort_start;
4742 bind(&abort_start);
Steve Block44f0eee2011-05-26 01:26:41 +01004743#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004744 const char* msg = GetBailoutReason(reason);
Steve Block44f0eee2011-05-26 01:26:41 +01004745 if (msg != NULL) {
4746 RecordComment("Abort message: ");
4747 RecordComment(msg);
4748 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004749
4750 if (FLAG_trap_on_abort) {
4751 stop(msg);
4752 return;
4753 }
Steve Block44f0eee2011-05-26 01:26:41 +01004754#endif
Steve Block44f0eee2011-05-26 01:26:41 +01004755
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004756 li(a0, Operand(Smi::FromInt(reason)));
Ben Murdoch257744e2011-11-30 15:57:28 +00004757 push(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004758 // Disable stub call restrictions to always allow calls to abort.
4759 if (!has_frame_) {
4760 // We don't actually want to generate a pile of code for this, so just
4761 // claim there is a stack frame, without generating one.
4762 FrameScope scope(this, StackFrame::NONE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004763 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004764 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004765 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004766 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004767 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01004768 if (is_trampoline_pool_blocked()) {
4769 // If the calling code cares about the exact number of
4770 // instructions generated, we insert padding here to keep the size
4771 // of the Abort macro constant.
4772 // Currently in debug mode with debug_code enabled the number of
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004773 // generated instructions is 10, so we use this as a maximum value.
4774 static const int kExpectedAbortInstructions = 10;
Steve Block44f0eee2011-05-26 01:26:41 +01004775 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004776 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block44f0eee2011-05-26 01:26:41 +01004777 while (abort_instructions++ < kExpectedAbortInstructions) {
4778 nop();
4779 }
4780 }
4781}
4782
4783
4784void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4785 if (context_chain_length > 0) {
4786 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004787 lw(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01004788 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004789 lw(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01004790 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004791 } else {
4792 // Slot is in the current function context. Move it into the
4793 // destination register in case we store into it (the write barrier
4794 // cannot be allowed to destroy the context in esi).
4795 Move(dst, cp);
4796 }
Steve Block44f0eee2011-05-26 01:26:41 +01004797}
4798
4799
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004800void MacroAssembler::LoadTransitionedArrayMapConditional(
4801 ElementsKind expected_kind,
4802 ElementsKind transitioned_kind,
4803 Register map_in_out,
4804 Register scratch,
4805 Label* no_map_match) {
4806 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004807 lw(scratch,
4808 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4809 lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004810
4811 // Check that the function's map is the same as the expected cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004812 lw(scratch,
4813 MemOperand(scratch,
4814 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4815 size_t offset = expected_kind * kPointerSize +
4816 FixedArrayBase::kHeaderSize;
4817 lw(at, FieldMemOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004818 Branch(no_map_match, ne, map_in_out, Operand(at));
4819
4820 // Use the transitioned cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004821 offset = transitioned_kind * kPointerSize +
4822 FixedArrayBase::kHeaderSize;
4823 lw(map_in_out, FieldMemOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004824}
4825
4826
Steve Block44f0eee2011-05-26 01:26:41 +01004827void MacroAssembler::LoadGlobalFunction(int index, Register function) {
4828 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004829 lw(function,
4830 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4831 // Load the native context from the global or builtins object.
Steve Block44f0eee2011-05-26 01:26:41 +01004832 lw(function, FieldMemOperand(function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004833 GlobalObject::kNativeContextOffset));
4834 // Load the function from the native context.
Steve Block44f0eee2011-05-26 01:26:41 +01004835 lw(function, MemOperand(function, Context::SlotOffset(index)));
4836}
4837
4838
4839void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4840 Register map,
4841 Register scratch) {
4842 // Load the initial map. The global functions all have initial maps.
4843 lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004844 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004845 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00004846 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Steve Block44f0eee2011-05-26 01:26:41 +01004847 Branch(&ok);
4848 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004849 Abort(kGlobalFunctionsMustHaveInitialMap);
Steve Block44f0eee2011-05-26 01:26:41 +01004850 bind(&ok);
4851 }
Andrei Popescu31002712010-02-23 13:46:05 +00004852}
4853
Steve Block6ded16b2010-05-10 14:33:55 +01004854
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004855void MacroAssembler::StubPrologue() {
4856 Push(ra, fp, cp);
4857 Push(Smi::FromInt(StackFrame::STUB));
4858 // Adjust FP to point to saved FP.
4859 Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
4860}
4861
4862
4863void MacroAssembler::Prologue(bool code_pre_aging) {
4864 PredictableCodeSizeScope predictible_code_size_scope(
4865 this, kNoCodeAgeSequenceLength);
4866 // The following three instructions must remain together and unmodified
4867 // for code aging to work properly.
4868 if (code_pre_aging) {
4869 // Pre-age the code.
4870 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
4871 nop(Assembler::CODE_AGE_MARKER_NOP);
4872 // Load the stub address to t9 and call it,
4873 // GetCodeAgeAndParity() extracts the stub address from this instruction.
4874 li(t9,
4875 Operand(reinterpret_cast<uint32_t>(stub->instruction_start())),
4876 CONSTANT_SIZE);
4877 nop(); // Prevent jalr to jal optimization.
4878 jalr(t9, a0);
4879 nop(); // Branch delay slot nop.
4880 nop(); // Pad the empty space.
4881 } else {
4882 Push(ra, fp, cp, a1);
4883 nop(Assembler::CODE_AGE_SEQUENCE_NOP);
4884 // Adjust fp to point to caller's fp.
4885 Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
4886 }
4887}
4888
4889
Steve Block6ded16b2010-05-10 14:33:55 +01004890void MacroAssembler::EnterFrame(StackFrame::Type type) {
4891 addiu(sp, sp, -5 * kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01004892 li(t8, Operand(Smi::FromInt(type)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004893 li(t9, Operand(CodeObject()), CONSTANT_SIZE);
Steve Block6ded16b2010-05-10 14:33:55 +01004894 sw(ra, MemOperand(sp, 4 * kPointerSize));
4895 sw(fp, MemOperand(sp, 3 * kPointerSize));
4896 sw(cp, MemOperand(sp, 2 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004897 sw(t8, MemOperand(sp, 1 * kPointerSize));
4898 sw(t9, MemOperand(sp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004899 // Adjust FP to point to saved FP.
4900 Addu(fp, sp,
4901 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01004902}
4903
4904
4905void MacroAssembler::LeaveFrame(StackFrame::Type type) {
4906 mov(sp, fp);
4907 lw(fp, MemOperand(sp, 0 * kPointerSize));
4908 lw(ra, MemOperand(sp, 1 * kPointerSize));
4909 addiu(sp, sp, 2 * kPointerSize);
4910}
4911
4912
Ben Murdoch257744e2011-11-30 15:57:28 +00004913void MacroAssembler::EnterExitFrame(bool save_doubles,
4914 int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004915 // Set up the frame structure on the stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00004916 STATIC_ASSERT(2 * kPointerSize == ExitFrameConstants::kCallerSPDisplacement);
4917 STATIC_ASSERT(1 * kPointerSize == ExitFrameConstants::kCallerPCOffset);
4918 STATIC_ASSERT(0 * kPointerSize == ExitFrameConstants::kCallerFPOffset);
Steve Block6ded16b2010-05-10 14:33:55 +01004919
Ben Murdoch257744e2011-11-30 15:57:28 +00004920 // This is how the stack will look:
4921 // fp + 2 (==kCallerSPDisplacement) - old stack's end
4922 // [fp + 1 (==kCallerPCOffset)] - saved old ra
4923 // [fp + 0 (==kCallerFPOffset)] - saved old fp
4924 // [fp - 1 (==kSPOffset)] - sp of the called function
4925 // [fp - 2 (==kCodeOffset)] - CodeObject
4926 // fp - (2 + stack_space + alignment) == sp == [fp - kSPOffset] - top of the
4927 // new stack (will contain saved ra)
Steve Block6ded16b2010-05-10 14:33:55 +01004928
4929 // Save registers.
Ben Murdoch257744e2011-11-30 15:57:28 +00004930 addiu(sp, sp, -4 * kPointerSize);
4931 sw(ra, MemOperand(sp, 3 * kPointerSize));
4932 sw(fp, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004933 addiu(fp, sp, 2 * kPointerSize); // Set up new frame pointer.
Steve Block6ded16b2010-05-10 14:33:55 +01004934
Ben Murdoch257744e2011-11-30 15:57:28 +00004935 if (emit_debug_code()) {
4936 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset));
4937 }
4938
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004939 // Accessed from ExitFrame::code_slot.
4940 li(t8, Operand(CodeObject()), CONSTANT_SIZE);
Ben Murdoch257744e2011-11-30 15:57:28 +00004941 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004942
4943 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00004944 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004945 sw(fp, MemOperand(t8));
Ben Murdoch589d6972011-11-30 16:04:58 +00004946 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004947 sw(cp, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01004948
Ben Murdoch257744e2011-11-30 15:57:28 +00004949 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01004950 if (save_doubles) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004951 // The stack must be allign to 0 modulo 8 for stores with sdc1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004952 DCHECK(kDoubleSize == frame_alignment);
Ben Murdoch257744e2011-11-30 15:57:28 +00004953 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004954 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00004955 And(sp, sp, Operand(-frame_alignment)); // Align stack.
4956 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004957 int space = FPURegister::kMaxNumRegisters * kDoubleSize;
Steve Block44f0eee2011-05-26 01:26:41 +01004958 Subu(sp, sp, Operand(space));
4959 // Remember: we only need to save every 2nd double FPU value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004960 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01004961 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00004962 sdc1(reg, MemOperand(sp, i * kDoubleSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004963 }
Steve Block44f0eee2011-05-26 01:26:41 +01004964 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004965
4966 // Reserve place for the return address, stack space and an optional slot
4967 // (used by the DirectCEntryStub to hold the return value if a struct is
4968 // returned) and align the frame preparing for calling the runtime function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004969 DCHECK(stack_space >= 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00004970 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize));
4971 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004972 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00004973 And(sp, sp, Operand(-frame_alignment)); // Align stack.
4974 }
4975
4976 // Set the exit frame sp value to point just before the return address
4977 // location.
4978 addiu(at, sp, kPointerSize);
4979 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004980}
4981
4982
Ben Murdoch257744e2011-11-30 15:57:28 +00004983void MacroAssembler::LeaveExitFrame(bool save_doubles,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004984 Register argument_count,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004985 bool restore_context,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004986 bool do_return) {
Steve Block44f0eee2011-05-26 01:26:41 +01004987 // Optionally restore all double registers.
4988 if (save_doubles) {
Steve Block44f0eee2011-05-26 01:26:41 +01004989 // Remember: we only need to restore every 2nd double FPU value.
Ben Murdoch257744e2011-11-30 15:57:28 +00004990 lw(t8, MemOperand(fp, ExitFrameConstants::kSPOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004991 for (int i = 0; i < FPURegister::kMaxNumRegisters; i+=2) {
Steve Block44f0eee2011-05-26 01:26:41 +01004992 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00004993 ldc1(reg, MemOperand(t8, i * kDoubleSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004994 }
4995 }
4996
Steve Block6ded16b2010-05-10 14:33:55 +01004997 // Clear top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00004998 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004999 sw(zero_reg, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005000
5001 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005002 if (restore_context) {
5003 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
5004 lw(cp, MemOperand(t8));
5005 }
Steve Block6ded16b2010-05-10 14:33:55 +01005006#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005007 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01005008 sw(a3, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01005009#endif
5010
5011 // Pop the arguments, restore registers, and return.
5012 mov(sp, fp); // Respect ABI stack constraint.
Ben Murdoch257744e2011-11-30 15:57:28 +00005013 lw(fp, MemOperand(sp, ExitFrameConstants::kCallerFPOffset));
5014 lw(ra, MemOperand(sp, ExitFrameConstants::kCallerPCOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005015
Ben Murdoch257744e2011-11-30 15:57:28 +00005016 if (argument_count.is_valid()) {
5017 sll(t8, argument_count, kPointerSizeLog2);
5018 addu(sp, sp, t8);
5019 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005020
5021 if (do_return) {
5022 Ret(USE_DELAY_SLOT);
5023 // If returning, the instruction in the delay slot will be the addiu below.
5024 }
5025 addiu(sp, sp, 8);
Steve Block6ded16b2010-05-10 14:33:55 +01005026}
5027
5028
Steve Block44f0eee2011-05-26 01:26:41 +01005029void MacroAssembler::InitializeNewString(Register string,
5030 Register length,
5031 Heap::RootListIndex map_index,
5032 Register scratch1,
5033 Register scratch2) {
5034 sll(scratch1, length, kSmiTagSize);
5035 LoadRoot(scratch2, map_index);
5036 sw(scratch1, FieldMemOperand(string, String::kLengthOffset));
5037 li(scratch1, Operand(String::kEmptyHashField));
5038 sw(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
5039 sw(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
5040}
5041
5042
5043int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005044#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005045 // Running on the real platform. Use the alignment as mandated by the local
5046 // environment.
5047 // Note: This will break if we ever start generating snapshots on one Mips
5048 // platform for another Mips platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005049 return base::OS::ActivationFrameAlignment();
5050#else // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005051 // If we are using the simulator then we should always align to the expected
5052 // alignment. As the simulator is used to generate snapshots we do not know
5053 // if the target platform will need alignment, so this is controlled from a
5054 // flag.
5055 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005056#endif // V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005057}
5058
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005059
Ben Murdoch257744e2011-11-30 15:57:28 +00005060void MacroAssembler::AssertStackIsAligned() {
5061 if (emit_debug_code()) {
5062 const int frame_alignment = ActivationFrameAlignment();
5063 const int frame_alignment_mask = frame_alignment - 1;
Steve Block44f0eee2011-05-26 01:26:41 +01005064
Ben Murdoch257744e2011-11-30 15:57:28 +00005065 if (frame_alignment > kPointerSize) {
5066 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005067 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Ben Murdoch257744e2011-11-30 15:57:28 +00005068 andi(at, sp, frame_alignment_mask);
5069 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
5070 // Don't use Check here, as it will call Runtime_Abort re-entering here.
5071 stop("Unexpected stack alignment");
5072 bind(&alignment_as_expected);
5073 }
Steve Block6ded16b2010-05-10 14:33:55 +01005074 }
Steve Block6ded16b2010-05-10 14:33:55 +01005075}
5076
Steve Block44f0eee2011-05-26 01:26:41 +01005077
Steve Block44f0eee2011-05-26 01:26:41 +01005078void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
5079 Register reg,
5080 Register scratch,
5081 Label* not_power_of_two_or_zero) {
5082 Subu(scratch, reg, Operand(1));
5083 Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt,
5084 scratch, Operand(zero_reg));
5085 and_(at, scratch, reg); // In the delay slot.
5086 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg));
5087}
5088
5089
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005090void MacroAssembler::SmiTagCheckOverflow(Register reg, Register overflow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005091 DCHECK(!reg.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005092 mov(overflow, reg); // Save original value.
5093 SmiTag(reg);
5094 xor_(overflow, overflow, reg); // Overflow if (value ^ 2 * value) < 0.
5095}
5096
5097
5098void MacroAssembler::SmiTagCheckOverflow(Register dst,
5099 Register src,
5100 Register overflow) {
5101 if (dst.is(src)) {
5102 // Fall back to slower case.
5103 SmiTagCheckOverflow(dst, overflow);
5104 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005105 DCHECK(!dst.is(src));
5106 DCHECK(!dst.is(overflow));
5107 DCHECK(!src.is(overflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005108 SmiTag(dst, src);
5109 xor_(overflow, dst, src); // Overflow if (value ^ 2 * value) < 0.
5110 }
5111}
5112
5113
5114void MacroAssembler::UntagAndJumpIfSmi(Register dst,
5115 Register src,
5116 Label* smi_case) {
5117 JumpIfSmi(src, smi_case, at, USE_DELAY_SLOT);
5118 SmiUntag(dst, src);
5119}
5120
5121
5122void MacroAssembler::UntagAndJumpIfNotSmi(Register dst,
5123 Register src,
5124 Label* non_smi_case) {
5125 JumpIfNotSmi(src, non_smi_case, at, USE_DELAY_SLOT);
5126 SmiUntag(dst, src);
5127}
5128
5129void MacroAssembler::JumpIfSmi(Register value,
5130 Label* smi_label,
5131 Register scratch,
5132 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005133 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005134 andi(scratch, value, kSmiTagMask);
5135 Branch(bd, smi_label, eq, scratch, Operand(zero_reg));
5136}
5137
5138void MacroAssembler::JumpIfNotSmi(Register value,
5139 Label* not_smi_label,
5140 Register scratch,
5141 BranchDelaySlot bd) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005142 DCHECK_EQ(0, kSmiTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005143 andi(scratch, value, kSmiTagMask);
5144 Branch(bd, not_smi_label, ne, scratch, Operand(zero_reg));
5145}
5146
5147
Steve Block44f0eee2011-05-26 01:26:41 +01005148void MacroAssembler::JumpIfNotBothSmi(Register reg1,
5149 Register reg2,
5150 Label* on_not_both_smi) {
5151 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005152 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01005153 or_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005154 JumpIfNotSmi(at, on_not_both_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01005155}
5156
5157
5158void MacroAssembler::JumpIfEitherSmi(Register reg1,
5159 Register reg2,
5160 Label* on_either_smi) {
5161 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005162 DCHECK_EQ(1, kSmiTagMask);
Steve Block44f0eee2011-05-26 01:26:41 +01005163 // Both Smi tags must be 1 (not Smi).
5164 and_(at, reg1, reg2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005165 JumpIfSmi(at, on_either_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01005166}
5167
5168
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005169void MacroAssembler::AssertNotSmi(Register object) {
5170 if (emit_debug_code()) {
5171 STATIC_ASSERT(kSmiTag == 0);
5172 andi(at, object, kSmiTagMask);
5173 Check(ne, kOperandIsASmi, at, Operand(zero_reg));
5174 }
Steve Block44f0eee2011-05-26 01:26:41 +01005175}
5176
5177
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005178void MacroAssembler::AssertSmi(Register object) {
5179 if (emit_debug_code()) {
5180 STATIC_ASSERT(kSmiTag == 0);
5181 andi(at, object, kSmiTagMask);
5182 Check(eq, kOperandIsASmi, at, Operand(zero_reg));
5183 }
Steve Block44f0eee2011-05-26 01:26:41 +01005184}
5185
5186
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005187void MacroAssembler::AssertString(Register object) {
5188 if (emit_debug_code()) {
5189 STATIC_ASSERT(kSmiTag == 0);
5190 SmiTst(object, t0);
5191 Check(ne, kOperandIsASmiAndNotAString, t0, Operand(zero_reg));
5192 push(object);
5193 lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
5194 lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
5195 Check(lo, kOperandIsNotAString, object, Operand(FIRST_NONSTRING_TYPE));
5196 pop(object);
5197 }
Ben Murdoch257744e2011-11-30 15:57:28 +00005198}
5199
5200
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005201void MacroAssembler::AssertName(Register object) {
5202 if (emit_debug_code()) {
5203 STATIC_ASSERT(kSmiTag == 0);
5204 SmiTst(object, t0);
5205 Check(ne, kOperandIsASmiAndNotAName, t0, Operand(zero_reg));
5206 push(object);
5207 lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
5208 lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
5209 Check(le, kOperandIsNotAName, object, Operand(LAST_NAME_TYPE));
5210 pop(object);
5211 }
5212}
5213
5214
5215void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
5216 Register scratch) {
5217 if (emit_debug_code()) {
5218 Label done_checking;
5219 AssertNotSmi(object);
5220 LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5221 Branch(&done_checking, eq, object, Operand(scratch));
5222 push(object);
5223 lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
5224 LoadRoot(scratch, Heap::kAllocationSiteMapRootIndex);
5225 Assert(eq, kExpectedUndefinedOrCell, object, Operand(scratch));
5226 pop(object);
5227 bind(&done_checking);
5228 }
5229}
5230
5231
5232void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
5233 if (emit_debug_code()) {
5234 DCHECK(!reg.is(at));
5235 LoadRoot(at, index);
5236 Check(eq, kHeapNumberMapRegisterClobbered, reg, Operand(at));
5237 }
Steve Block44f0eee2011-05-26 01:26:41 +01005238}
5239
5240
5241void MacroAssembler::JumpIfNotHeapNumber(Register object,
5242 Register heap_number_map,
5243 Register scratch,
5244 Label* on_not_heap_number) {
5245 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005246 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01005247 Branch(on_not_heap_number, ne, scratch, Operand(heap_number_map));
5248}
5249
5250
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005251void MacroAssembler::LookupNumberStringCache(Register object,
5252 Register result,
5253 Register scratch1,
5254 Register scratch2,
5255 Register scratch3,
5256 Label* not_found) {
5257 // Use of registers. Register result is used as a temporary.
5258 Register number_string_cache = result;
5259 Register mask = scratch3;
5260
5261 // Load the number string cache.
5262 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
5263
5264 // Make the hash mask from the length of the number string cache. It
5265 // contains two elements (number and string) for each cache entry.
5266 lw(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
5267 // Divide length by two (length is a smi).
5268 sra(mask, mask, kSmiTagSize + 1);
5269 Addu(mask, mask, -1); // Make mask.
5270
5271 // Calculate the entry in the number string cache. The hash value in the
5272 // number string cache for smis is just the smi value, and the hash for
5273 // doubles is the xor of the upper and lower words. See
5274 // Heap::GetNumberStringCache.
5275 Label is_smi;
5276 Label load_result_from_cache;
5277 JumpIfSmi(object, &is_smi);
5278 CheckMap(object,
5279 scratch1,
5280 Heap::kHeapNumberMapRootIndex,
5281 not_found,
5282 DONT_DO_SMI_CHECK);
5283
5284 STATIC_ASSERT(8 == kDoubleSize);
5285 Addu(scratch1,
5286 object,
5287 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
5288 lw(scratch2, MemOperand(scratch1, kPointerSize));
5289 lw(scratch1, MemOperand(scratch1, 0));
5290 Xor(scratch1, scratch1, Operand(scratch2));
5291 And(scratch1, scratch1, Operand(mask));
5292
5293 // Calculate address of entry in string cache: each entry consists
5294 // of two pointer sized fields.
5295 sll(scratch1, scratch1, kPointerSizeLog2 + 1);
5296 Addu(scratch1, number_string_cache, scratch1);
5297
5298 Register probe = mask;
5299 lw(probe, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
5300 JumpIfSmi(probe, not_found);
5301 ldc1(f12, FieldMemOperand(object, HeapNumber::kValueOffset));
5302 ldc1(f14, FieldMemOperand(probe, HeapNumber::kValueOffset));
5303 BranchF(&load_result_from_cache, NULL, eq, f12, f14);
5304 Branch(not_found);
5305
5306 bind(&is_smi);
5307 Register scratch = scratch1;
5308 sra(scratch, object, 1); // Shift away the tag.
5309 And(scratch, mask, Operand(scratch));
5310
5311 // Calculate address of entry in string cache: each entry consists
5312 // of two pointer sized fields.
5313 sll(scratch, scratch, kPointerSizeLog2 + 1);
5314 Addu(scratch, number_string_cache, scratch);
5315
5316 // Check if the entry is the smi we are looking for.
5317 lw(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize));
5318 Branch(not_found, ne, object, Operand(probe));
5319
5320 // Get the result from the cache.
5321 bind(&load_result_from_cache);
5322 lw(result, FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
5323
5324 IncrementCounter(isolate()->counters()->number_to_string_native(),
5325 1,
5326 scratch1,
5327 scratch2);
5328}
5329
5330
5331void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
5332 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01005333 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005334 // Test that both first and second are sequential one-byte strings.
Steve Block44f0eee2011-05-26 01:26:41 +01005335 // Assume that they are non-smis.
5336 lw(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
5337 lw(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
5338 lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
5339 lbu(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
5340
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005341 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
5342 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01005343}
5344
5345
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005346void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
5347 Register second,
5348 Register scratch1,
5349 Register scratch2,
5350 Label* failure) {
Steve Block44f0eee2011-05-26 01:26:41 +01005351 // Check that neither is a smi.
5352 STATIC_ASSERT(kSmiTag == 0);
5353 And(scratch1, first, Operand(second));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005354 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005355 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
5356 scratch2, failure);
Steve Block44f0eee2011-05-26 01:26:41 +01005357}
5358
5359
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005360void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
5361 Register first, Register second, Register scratch1, Register scratch2,
Steve Block44f0eee2011-05-26 01:26:41 +01005362 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005363 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01005364 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005365 const int kFlatOneByteStringTag =
5366 kStringTag | kOneByteStringTag | kSeqStringTag;
5367 DCHECK(kFlatOneByteStringTag <= 0xffff); // Ensure this fits 16-bit immed.
5368 andi(scratch1, first, kFlatOneByteStringMask);
5369 Branch(failure, ne, scratch1, Operand(kFlatOneByteStringTag));
5370 andi(scratch2, second, kFlatOneByteStringMask);
5371 Branch(failure, ne, scratch2, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01005372}
5373
5374
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005375void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
5376 Register scratch,
5377 Label* failure) {
5378 const int kFlatOneByteStringMask =
Steve Block44f0eee2011-05-26 01:26:41 +01005379 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005380 const int kFlatOneByteStringTag =
5381 kStringTag | kOneByteStringTag | kSeqStringTag;
5382 And(scratch, type, Operand(kFlatOneByteStringMask));
5383 Branch(failure, ne, scratch, Operand(kFlatOneByteStringTag));
Steve Block44f0eee2011-05-26 01:26:41 +01005384}
5385
5386
5387static const int kRegisterPassedArguments = 4;
5388
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005389int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
5390 int num_double_arguments) {
5391 int stack_passed_words = 0;
5392 num_reg_arguments += 2 * num_double_arguments;
5393
5394 // Up to four simple arguments are passed in registers a0..a3.
5395 if (num_reg_arguments > kRegisterPassedArguments) {
5396 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
5397 }
5398 stack_passed_words += kCArgSlotCount;
5399 return stack_passed_words;
5400}
5401
5402
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005403void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
5404 Register index,
5405 Register value,
5406 Register scratch,
5407 uint32_t encoding_mask) {
5408 Label is_object;
5409 SmiTst(string, at);
5410 Check(ne, kNonObject, at, Operand(zero_reg));
5411
5412 lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
5413 lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
5414
5415 andi(at, at, kStringRepresentationMask | kStringEncodingMask);
5416 li(scratch, Operand(encoding_mask));
5417 Check(eq, kUnexpectedStringType, at, Operand(scratch));
5418
5419 // The index is assumed to be untagged coming in, tag it to compare with the
5420 // string length without using a temp register, it is restored at the end of
5421 // this function.
5422 Label index_tag_ok, index_tag_bad;
5423 TrySmiTag(index, scratch, &index_tag_bad);
5424 Branch(&index_tag_ok);
5425 bind(&index_tag_bad);
5426 Abort(kIndexIsTooLarge);
5427 bind(&index_tag_ok);
5428
5429 lw(at, FieldMemOperand(string, String::kLengthOffset));
5430 Check(lt, kIndexIsTooLarge, index, Operand(at));
5431
5432 DCHECK(Smi::FromInt(0) == 0);
5433 Check(ge, kIndexIsNegative, index, Operand(zero_reg));
5434
5435 SmiUntag(index, index);
5436}
5437
5438
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005439void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
5440 int num_double_arguments,
5441 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01005442 int frame_alignment = ActivationFrameAlignment();
5443
Steve Block44f0eee2011-05-26 01:26:41 +01005444 // Up to four simple arguments are passed in registers a0..a3.
5445 // Those four arguments must have reserved argument slots on the stack for
5446 // mips, even though those argument slots are not normally used.
5447 // Remaining arguments are pushed on the stack, above (higher address than)
5448 // the argument slots.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005449 int stack_passed_arguments = CalculateStackPassedWords(
5450 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005451 if (frame_alignment > kPointerSize) {
5452 // Make stack end at alignment and make room for num_arguments - 4 words
5453 // and the original value of sp.
5454 mov(scratch, sp);
5455 Subu(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005456 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01005457 And(sp, sp, Operand(-frame_alignment));
5458 sw(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
5459 } else {
5460 Subu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
5461 }
5462}
5463
5464
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005465void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
5466 Register scratch) {
5467 PrepareCallCFunction(num_reg_arguments, 0, scratch);
5468}
5469
5470
Steve Block44f0eee2011-05-26 01:26:41 +01005471void MacroAssembler::CallCFunction(ExternalReference function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005472 int num_reg_arguments,
5473 int num_double_arguments) {
5474 li(t8, Operand(function));
5475 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005476}
5477
5478
5479void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005480 int num_reg_arguments,
5481 int num_double_arguments) {
5482 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
5483}
5484
5485
5486void MacroAssembler::CallCFunction(ExternalReference function,
Steve Block44f0eee2011-05-26 01:26:41 +01005487 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005488 CallCFunction(function, num_arguments, 0);
5489}
5490
5491
5492void MacroAssembler::CallCFunction(Register function,
5493 int num_arguments) {
5494 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01005495}
5496
5497
5498void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005499 int num_reg_arguments,
5500 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005501 DCHECK(has_frame());
Steve Block44f0eee2011-05-26 01:26:41 +01005502 // Make sure that the stack is aligned before calling a C function unless
5503 // running in the simulator. The simulator has its own alignment check which
5504 // provides more information.
5505 // The argument stots are presumed to have been set up by
5506 // PrepareCallCFunction. The C function must be called via t9, for mips ABI.
5507
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005508#if V8_HOST_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01005509 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005510 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01005511 int frame_alignment_mask = frame_alignment - 1;
5512 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005513 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block44f0eee2011-05-26 01:26:41 +01005514 Label alignment_as_expected;
5515 And(at, sp, Operand(frame_alignment_mask));
5516 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
5517 // Don't use Check here, as it will call Runtime_Abort possibly
5518 // re-entering here.
5519 stop("Unexpected alignment in CallCFunction");
5520 bind(&alignment_as_expected);
5521 }
5522 }
5523#endif // V8_HOST_ARCH_MIPS
5524
5525 // Just call directly. The function called cannot cause a GC, or
5526 // allow preemption, so the return address in the link register
5527 // stays correct.
Steve Block44f0eee2011-05-26 01:26:41 +01005528
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005529 if (!function.is(t9)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005530 mov(t9, function);
Steve Block44f0eee2011-05-26 01:26:41 +01005531 function = t9;
5532 }
5533
5534 Call(function);
5535
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005536 int stack_passed_arguments = CalculateStackPassedWords(
5537 num_reg_arguments, num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01005538
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005539 if (base::OS::ActivationFrameAlignment() > kPointerSize) {
Steve Block44f0eee2011-05-26 01:26:41 +01005540 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
5541 } else {
5542 Addu(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
5543 }
5544}
5545
5546
5547#undef BRANCH_ARGS_CHECK
5548
5549
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005550void MacroAssembler::PatchRelocatedValue(Register li_location,
5551 Register scratch,
5552 Register new_value) {
5553 lw(scratch, MemOperand(li_location));
5554 // At this point scratch is a lui(at, ...) instruction.
5555 if (emit_debug_code()) {
5556 And(scratch, scratch, kOpcodeMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005557 Check(eq, kTheInstructionToPatchShouldBeALui,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005558 scratch, Operand(LUI));
5559 lw(scratch, MemOperand(li_location));
5560 }
5561 srl(t9, new_value, kImm16Bits);
5562 Ins(scratch, t9, 0, kImm16Bits);
5563 sw(scratch, MemOperand(li_location));
5564
5565 lw(scratch, MemOperand(li_location, kInstrSize));
5566 // scratch is now ori(at, ...).
5567 if (emit_debug_code()) {
5568 And(scratch, scratch, kOpcodeMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005569 Check(eq, kTheInstructionToPatchShouldBeAnOri,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005570 scratch, Operand(ORI));
5571 lw(scratch, MemOperand(li_location, kInstrSize));
5572 }
5573 Ins(scratch, new_value, 0, kImm16Bits);
5574 sw(scratch, MemOperand(li_location, kInstrSize));
5575
5576 // Update the I-cache so the new lui and ori can be executed.
5577 FlushICache(li_location, 2);
5578}
5579
5580void MacroAssembler::GetRelocatedValue(Register li_location,
5581 Register value,
5582 Register scratch) {
5583 lw(value, MemOperand(li_location));
5584 if (emit_debug_code()) {
5585 And(value, value, kOpcodeMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005586 Check(eq, kTheInstructionShouldBeALui,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005587 value, Operand(LUI));
5588 lw(value, MemOperand(li_location));
5589 }
5590
5591 // value now holds a lui instruction. Extract the immediate.
5592 sll(value, value, kImm16Bits);
5593
5594 lw(scratch, MemOperand(li_location, kInstrSize));
5595 if (emit_debug_code()) {
5596 And(scratch, scratch, kOpcodeMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005597 Check(eq, kTheInstructionShouldBeAnOri,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005598 scratch, Operand(ORI));
5599 lw(scratch, MemOperand(li_location, kInstrSize));
5600 }
5601 // "scratch" now holds an ori instruction. Extract the immediate.
5602 andi(scratch, scratch, kImm16Mask);
5603
5604 // Merge the results.
5605 or_(value, value, scratch);
5606}
5607
5608
5609void MacroAssembler::CheckPageFlag(
5610 Register object,
5611 Register scratch,
5612 int mask,
5613 Condition cc,
5614 Label* condition_met) {
5615 And(scratch, object, Operand(~Page::kPageAlignmentMask));
5616 lw(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
5617 And(scratch, scratch, Operand(mask));
5618 Branch(condition_met, cc, scratch, Operand(zero_reg));
5619}
5620
5621
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005622void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
5623 Register scratch,
5624 Label* if_deprecated) {
5625 if (map->CanBeDeprecated()) {
5626 li(scratch, Operand(map));
5627 lw(scratch, FieldMemOperand(scratch, Map::kBitField3Offset));
5628 And(scratch, scratch, Operand(Map::Deprecated::kMask));
5629 Branch(if_deprecated, ne, scratch, Operand(zero_reg));
5630 }
5631}
5632
5633
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005634void MacroAssembler::JumpIfBlack(Register object,
5635 Register scratch0,
5636 Register scratch1,
5637 Label* on_black) {
5638 HasColor(object, scratch0, scratch1, on_black, 1, 0); // kBlackBitPattern.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005639 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005640}
5641
5642
5643void MacroAssembler::HasColor(Register object,
5644 Register bitmap_scratch,
5645 Register mask_scratch,
5646 Label* has_color,
5647 int first_bit,
5648 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005649 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t8));
5650 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, t9));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005651
5652 GetMarkBits(object, bitmap_scratch, mask_scratch);
5653
5654 Label other_color, word_boundary;
5655 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5656 And(t8, t9, Operand(mask_scratch));
5657 Branch(&other_color, first_bit == 1 ? eq : ne, t8, Operand(zero_reg));
5658 // Shift left 1 by adding.
5659 Addu(mask_scratch, mask_scratch, Operand(mask_scratch));
5660 Branch(&word_boundary, eq, mask_scratch, Operand(zero_reg));
5661 And(t8, t9, Operand(mask_scratch));
5662 Branch(has_color, second_bit == 1 ? ne : eq, t8, Operand(zero_reg));
5663 jmp(&other_color);
5664
5665 bind(&word_boundary);
5666 lw(t9, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
5667 And(t9, t9, Operand(1));
5668 Branch(has_color, second_bit == 1 ? ne : eq, t9, Operand(zero_reg));
5669 bind(&other_color);
5670}
5671
5672
5673// Detect some, but not all, common pointer-free objects. This is used by the
5674// incremental write barrier which doesn't care about oddballs (they are always
5675// marked black immediately so this code is not hit).
5676void MacroAssembler::JumpIfDataObject(Register value,
5677 Register scratch,
5678 Label* not_data_object) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005679 DCHECK(!AreAliased(value, scratch, t8, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005680 Label is_data_object;
5681 lw(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
5682 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
5683 Branch(&is_data_object, eq, t8, Operand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005684 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
5685 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005686 // If it's a string and it's not a cons string then it's an object containing
5687 // no GC pointers.
5688 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
5689 And(t8, scratch, Operand(kIsIndirectStringMask | kIsNotStringMask));
5690 Branch(not_data_object, ne, t8, Operand(zero_reg));
5691 bind(&is_data_object);
5692}
5693
5694
5695void MacroAssembler::GetMarkBits(Register addr_reg,
5696 Register bitmap_reg,
5697 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005698 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005699 And(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
5700 Ext(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
5701 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
5702 Ext(t8, addr_reg, kLowBits, kPageSizeBits - kLowBits);
5703 sll(t8, t8, kPointerSizeLog2);
5704 Addu(bitmap_reg, bitmap_reg, t8);
5705 li(t8, Operand(1));
5706 sllv(mask_reg, t8, mask_reg);
5707}
5708
5709
5710void MacroAssembler::EnsureNotWhite(
5711 Register value,
5712 Register bitmap_scratch,
5713 Register mask_scratch,
5714 Register load_scratch,
5715 Label* value_is_white_and_not_data) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005716 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, t8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005717 GetMarkBits(value, bitmap_scratch, mask_scratch);
5718
5719 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005720 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
5721 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
5722 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
5723 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005724
5725 Label done;
5726
5727 // Since both black and grey have a 1 in the first position and white does
5728 // not have a 1 there we only need to check one bit.
5729 lw(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5730 And(t8, mask_scratch, load_scratch);
5731 Branch(&done, ne, t8, Operand(zero_reg));
5732
5733 if (emit_debug_code()) {
5734 // Check for impossible bit pattern.
5735 Label ok;
5736 // sll may overflow, making the check conservative.
5737 sll(t8, mask_scratch, 1);
5738 And(t8, load_scratch, t8);
5739 Branch(&ok, eq, t8, Operand(zero_reg));
5740 stop("Impossible marking bit pattern");
5741 bind(&ok);
5742 }
5743
5744 // Value is white. We check whether it is data that doesn't need scanning.
5745 // Currently only checks for HeapNumber and non-cons strings.
5746 Register map = load_scratch; // Holds map while checking type.
5747 Register length = load_scratch; // Holds length of object after testing type.
5748 Label is_data_object;
5749
5750 // Check for heap-number
5751 lw(map, FieldMemOperand(value, HeapObject::kMapOffset));
5752 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
5753 {
5754 Label skip;
5755 Branch(&skip, ne, t8, Operand(map));
5756 li(length, HeapNumber::kSize);
5757 Branch(&is_data_object);
5758 bind(&skip);
5759 }
5760
5761 // Check for strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005762 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
5763 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005764 // If it's a string and it's not a cons string then it's an object containing
5765 // no GC pointers.
5766 Register instance_type = load_scratch;
5767 lbu(instance_type, FieldMemOperand(map, Map::kInstanceTypeOffset));
5768 And(t8, instance_type, Operand(kIsIndirectStringMask | kIsNotStringMask));
5769 Branch(value_is_white_and_not_data, ne, t8, Operand(zero_reg));
5770 // It's a non-indirect (non-cons and non-slice) string.
5771 // If it's external, the length is just ExternalString::kSize.
5772 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
5773 // External strings are the only ones with the kExternalStringTag bit
5774 // set.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005775 DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
5776 DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005777 And(t8, instance_type, Operand(kExternalStringTag));
5778 {
5779 Label skip;
5780 Branch(&skip, eq, t8, Operand(zero_reg));
5781 li(length, ExternalString::kSize);
5782 Branch(&is_data_object);
5783 bind(&skip);
5784 }
5785
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005786 // Sequential string, either Latin1 or UC16.
5787 // For Latin1 (char-size of 1) we shift the smi tag away to get the length.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005788 // For UC16 (char-size of 2) we just leave the smi tag in place, thereby
5789 // getting the length multiplied by 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005790 DCHECK(kOneByteStringTag == 4 && kStringEncodingMask == 4);
5791 DCHECK(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005792 lw(t9, FieldMemOperand(value, String::kLengthOffset));
5793 And(t8, instance_type, Operand(kStringEncodingMask));
5794 {
5795 Label skip;
5796 Branch(&skip, eq, t8, Operand(zero_reg));
5797 srl(t9, t9, 1);
5798 bind(&skip);
5799 }
5800 Addu(length, t9, Operand(SeqString::kHeaderSize + kObjectAlignmentMask));
5801 And(length, length, Operand(~kObjectAlignmentMask));
5802
5803 bind(&is_data_object);
5804 // Value is a data object, and it is white. Mark it black. Since we know
5805 // that the object is white we can make it black by flipping one bit.
5806 lw(t8, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5807 Or(t8, t8, Operand(mask_scratch));
5808 sw(t8, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5809
5810 And(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask));
5811 lw(t8, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
5812 Addu(t8, t8, Operand(length));
5813 sw(t8, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
5814
5815 bind(&done);
5816}
5817
5818
Ben Murdoch257744e2011-11-30 15:57:28 +00005819void MacroAssembler::LoadInstanceDescriptors(Register map,
5820 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005821 lw(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
5822}
5823
5824
5825void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
5826 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
5827 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
5828}
5829
5830
5831void MacroAssembler::EnumLength(Register dst, Register map) {
5832 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
5833 lw(dst, FieldMemOperand(map, Map::kBitField3Offset));
5834 And(dst, dst, Operand(Map::EnumLengthBits::kMask));
5835 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00005836}
5837
5838
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005839void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005840 Register empty_fixed_array_value = t2;
5841 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005842 Label next, start;
5843 mov(a2, a0);
5844
5845 // Check if the enum length field is properly initialized, indicating that
5846 // there is an enum cache.
5847 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
5848
5849 EnumLength(a3, a1);
5850 Branch(
5851 call_runtime, eq, a3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
5852
5853 jmp(&start);
5854
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005855 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005856 lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005857
5858 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005859 EnumLength(a3, a1);
5860 Branch(call_runtime, ne, a3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005861
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005862 bind(&start);
5863
5864 // Check that there are no elements. Register a2 contains the current JS
5865 // object we've reached through the prototype chain.
5866 Label no_elements;
5867 lw(a2, FieldMemOperand(a2, JSObject::kElementsOffset));
5868 Branch(&no_elements, eq, a2, Operand(empty_fixed_array_value));
5869
5870 // Second chance, the object may be using the empty slow element dictionary.
5871 LoadRoot(at, Heap::kEmptySlowElementDictionaryRootIndex);
5872 Branch(call_runtime, ne, a2, Operand(at));
5873
5874 bind(&no_elements);
5875 lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
5876 Branch(&next, ne, a2, Operand(null_value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005877}
5878
5879
5880void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005881 DCHECK(!output_reg.is(input_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005882 Label done;
5883 li(output_reg, Operand(255));
5884 // Normal branch: nop in delay slot.
5885 Branch(&done, gt, input_reg, Operand(output_reg));
5886 // Use delay slot in this branch.
5887 Branch(USE_DELAY_SLOT, &done, lt, input_reg, Operand(zero_reg));
5888 mov(output_reg, zero_reg); // In delay slot.
5889 mov(output_reg, input_reg); // Value is in range 0..255.
5890 bind(&done);
5891}
5892
5893
5894void MacroAssembler::ClampDoubleToUint8(Register result_reg,
5895 DoubleRegister input_reg,
5896 DoubleRegister temp_double_reg) {
5897 Label above_zero;
5898 Label done;
5899 Label in_bounds;
5900
5901 Move(temp_double_reg, 0.0);
5902 BranchF(&above_zero, NULL, gt, input_reg, temp_double_reg);
5903
5904 // Double value is less than zero, NaN or Inf, return 0.
5905 mov(result_reg, zero_reg);
5906 Branch(&done);
5907
5908 // Double value is >= 255, return 255.
5909 bind(&above_zero);
5910 Move(temp_double_reg, 255.0);
5911 BranchF(&in_bounds, NULL, le, input_reg, temp_double_reg);
5912 li(result_reg, Operand(255));
5913 Branch(&done);
5914
5915 // In 0-255 range, round and truncate.
5916 bind(&in_bounds);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005917 cvt_w_d(temp_double_reg, input_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005918 mfc1(result_reg, temp_double_reg);
5919 bind(&done);
5920}
5921
5922
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005923void MacroAssembler::TestJSArrayForAllocationMemento(
5924 Register receiver_reg,
5925 Register scratch_reg,
5926 Label* no_memento_found,
5927 Condition cond,
5928 Label* allocation_memento_present) {
5929 ExternalReference new_space_start =
5930 ExternalReference::new_space_start(isolate());
5931 ExternalReference new_space_allocation_top =
5932 ExternalReference::new_space_allocation_top_address(isolate());
5933 Addu(scratch_reg, receiver_reg,
5934 Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
5935 Branch(no_memento_found, lt, scratch_reg, Operand(new_space_start));
5936 li(at, Operand(new_space_allocation_top));
5937 lw(at, MemOperand(at));
5938 Branch(no_memento_found, gt, scratch_reg, Operand(at));
5939 lw(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
5940 if (allocation_memento_present) {
5941 Branch(allocation_memento_present, cond, scratch_reg,
5942 Operand(isolate()->factory()->allocation_memento_map()));
5943 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005944}
5945
5946
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005947Register GetRegisterThatIsNotOneOf(Register reg1,
5948 Register reg2,
5949 Register reg3,
5950 Register reg4,
5951 Register reg5,
5952 Register reg6) {
5953 RegList regs = 0;
5954 if (reg1.is_valid()) regs |= reg1.bit();
5955 if (reg2.is_valid()) regs |= reg2.bit();
5956 if (reg3.is_valid()) regs |= reg3.bit();
5957 if (reg4.is_valid()) regs |= reg4.bit();
5958 if (reg5.is_valid()) regs |= reg5.bit();
5959 if (reg6.is_valid()) regs |= reg6.bit();
5960
5961 for (int i = 0; i < Register::NumAllocatableRegisters(); i++) {
5962 Register candidate = Register::FromAllocationIndex(i);
5963 if (regs & candidate.bit()) continue;
5964 return candidate;
5965 }
5966 UNREACHABLE();
5967 return no_reg;
5968}
5969
5970
5971void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5972 Register object,
5973 Register scratch0,
5974 Register scratch1,
5975 Label* found) {
5976 DCHECK(!scratch1.is(scratch0));
5977 Factory* factory = isolate()->factory();
5978 Register current = scratch0;
5979 Label loop_again;
5980
5981 // Scratch contained elements pointer.
5982 Move(current, object);
5983
5984 // Loop based on the map going up the prototype chain.
5985 bind(&loop_again);
5986 lw(current, FieldMemOperand(current, HeapObject::kMapOffset));
5987 lb(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
5988 DecodeField<Map::ElementsKindBits>(scratch1);
5989 Branch(found, eq, scratch1, Operand(DICTIONARY_ELEMENTS));
5990 lw(current, FieldMemOperand(current, Map::kPrototypeOffset));
5991 Branch(&loop_again, ne, current, Operand(factory->null_value()));
5992}
5993
5994
5995bool AreAliased(Register reg1,
5996 Register reg2,
5997 Register reg3,
5998 Register reg4,
5999 Register reg5,
6000 Register reg6,
6001 Register reg7,
6002 Register reg8) {
6003 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
6004 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
6005 reg7.is_valid() + reg8.is_valid();
6006
6007 RegList regs = 0;
6008 if (reg1.is_valid()) regs |= reg1.bit();
6009 if (reg2.is_valid()) regs |= reg2.bit();
6010 if (reg3.is_valid()) regs |= reg3.bit();
6011 if (reg4.is_valid()) regs |= reg4.bit();
6012 if (reg5.is_valid()) regs |= reg5.bit();
6013 if (reg6.is_valid()) regs |= reg6.bit();
6014 if (reg7.is_valid()) regs |= reg7.bit();
6015 if (reg8.is_valid()) regs |= reg8.bit();
6016 int n_of_non_aliasing_regs = NumRegs(regs);
6017
6018 return n_of_valid_regs != n_of_non_aliasing_regs;
6019}
6020
6021
6022CodePatcher::CodePatcher(byte* address,
6023 int instructions,
6024 FlushICache flush_cache)
Steve Block44f0eee2011-05-26 01:26:41 +01006025 : address_(address),
Steve Block44f0eee2011-05-26 01:26:41 +01006026 size_(instructions * Assembler::kInstrSize),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006027 masm_(NULL, address, size_ + Assembler::kGap),
6028 flush_cache_(flush_cache) {
Steve Block44f0eee2011-05-26 01:26:41 +01006029 // Create a new macro assembler pointing to the address of the code to patch.
6030 // The size is adjusted with kGap on order for the assembler to generate size
6031 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006032 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01006033}
6034
6035
6036CodePatcher::~CodePatcher() {
6037 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006038 if (flush_cache_ == FLUSH) {
6039 CpuFeatures::FlushICache(address_, size_);
6040 }
Steve Block44f0eee2011-05-26 01:26:41 +01006041
6042 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006043 DCHECK(masm_.pc_ == address_ + size_);
6044 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Block44f0eee2011-05-26 01:26:41 +01006045}
6046
6047
Ben Murdoch257744e2011-11-30 15:57:28 +00006048void CodePatcher::Emit(Instr instr) {
6049 masm()->emit(instr);
Steve Block44f0eee2011-05-26 01:26:41 +01006050}
6051
6052
6053void CodePatcher::Emit(Address addr) {
6054 masm()->emit(reinterpret_cast<Instr>(addr));
6055}
6056
6057
Ben Murdoch257744e2011-11-30 15:57:28 +00006058void CodePatcher::ChangeBranchCondition(Condition cond) {
6059 Instr instr = Assembler::instr_at(masm_.pc_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006060 DCHECK(Assembler::IsBranch(instr));
Ben Murdoch257744e2011-11-30 15:57:28 +00006061 uint32_t opcode = Assembler::GetOpcodeField(instr);
6062 // Currently only the 'eq' and 'ne' cond values are supported and the simple
6063 // branch instructions (with opcode being the branch type).
6064 // There are some special cases (see Assembler::IsBranch()) so extending this
6065 // would be tricky.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006066 DCHECK(opcode == BEQ ||
Ben Murdoch257744e2011-11-30 15:57:28 +00006067 opcode == BNE ||
6068 opcode == BLEZ ||
6069 opcode == BGTZ ||
6070 opcode == BEQL ||
6071 opcode == BNEL ||
6072 opcode == BLEZL ||
6073 opcode == BGTZL);
6074 opcode = (cond == eq) ? BEQ : BNE;
6075 instr = (instr & ~kOpcodeMask) | opcode;
6076 masm_.emit(instr);
6077}
Steve Block44f0eee2011-05-26 01:26:41 +01006078
6079
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006080void MacroAssembler::TruncatingDiv(Register result,
6081 Register dividend,
6082 int32_t divisor) {
6083 DCHECK(!dividend.is(result));
6084 DCHECK(!dividend.is(at));
6085 DCHECK(!result.is(at));
6086 base::MagicNumbersForDivision<uint32_t> mag =
6087 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
6088 li(at, Operand(mag.multiplier));
6089 Mulh(result, dividend, Operand(at));
6090 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
6091 if (divisor > 0 && neg) {
6092 Addu(result, result, Operand(dividend));
6093 }
6094 if (divisor < 0 && !neg && mag.multiplier > 0) {
6095 Subu(result, result, Operand(dividend));
6096 }
6097 if (mag.shift > 0) sra(result, result, mag.shift);
6098 srl(at, dividend, 31);
6099 Addu(result, result, Operand(at));
6100}
6101
6102
Andrei Popescu31002712010-02-23 13:46:05 +00006103} } // namespace v8::internal
6104
Leon Clarkef7060e22010-06-03 12:02:55 +01006105#endif // V8_TARGET_ARCH_MIPS