blob: 1c0af5d629dc536b5a035787a8e15d0a4cbe4fc5 [file] [log] [blame]
Ben Murdoch85b71792012-04-11 18:30:58 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Andrei Popescu31002712010-02-23 13:46:05 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Ben Murdoch257744e2011-11-30 15:57:28 +000028#include <limits.h> // For LONG_MIN, LONG_MAX.
Andrei Popescu31002712010-02-23 13:46:05 +000029
30#include "v8.h"
31
Leon Clarkef7060e22010-06-03 12:02:55 +010032#if defined(V8_TARGET_ARCH_MIPS)
33
Andrei Popescu31002712010-02-23 13:46:05 +000034#include "bootstrapper.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000035#include "codegen.h"
Andrei Popescu31002712010-02-23 13:46:05 +000036#include "debug.h"
37#include "runtime.h"
38
39namespace v8 {
40namespace internal {
41
Ben Murdoch257744e2011-11-30 15:57:28 +000042MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
43 : Assembler(arg_isolate, buffer, size),
Andrei Popescu31002712010-02-23 13:46:05 +000044 generating_stub_(false),
Ben Murdoch85b71792012-04-11 18:30:58 +010045 allow_stub_calls_(true) {
Ben Murdoch257744e2011-11-30 15:57:28 +000046 if (isolate() != NULL) {
47 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
48 isolate());
49 }
Andrei Popescu31002712010-02-23 13:46:05 +000050}
51
52
Andrei Popescu31002712010-02-23 13:46:05 +000053void MacroAssembler::LoadRoot(Register destination,
54 Heap::RootListIndex index) {
Steve Block6ded16b2010-05-10 14:33:55 +010055 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000056}
57
Steve Block44f0eee2011-05-26 01:26:41 +010058
Andrei Popescu31002712010-02-23 13:46:05 +000059void MacroAssembler::LoadRoot(Register destination,
60 Heap::RootListIndex index,
61 Condition cond,
62 Register src1, const Operand& src2) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 Branch(2, NegateCondition(cond), src1, src2);
Steve Block6ded16b2010-05-10 14:33:55 +010064 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
Andrei Popescu31002712010-02-23 13:46:05 +000065}
66
67
Steve Block44f0eee2011-05-26 01:26:41 +010068void MacroAssembler::StoreRoot(Register source,
69 Heap::RootListIndex index) {
70 sw(source, MemOperand(s6, index << kPointerSizeLog2));
71}
72
73
74void MacroAssembler::StoreRoot(Register source,
75 Heap::RootListIndex index,
76 Condition cond,
77 Register src1, const Operand& src2) {
78 Branch(2, NegateCondition(cond), src1, src2);
79 sw(source, MemOperand(s6, index << kPointerSizeLog2));
80}
81
82
Ben Murdoch85b71792012-04-11 18:30:58 +010083void MacroAssembler::RecordWriteHelper(Register object,
84 Register address,
85 Register scratch) {
86 if (emit_debug_code()) {
87 // Check that the object is not in new space.
88 Label not_in_new_space;
89 InNewSpace(object, scratch, ne, &not_in_new_space);
90 Abort("new-space object passed to RecordWriteHelper");
91 bind(&not_in_new_space);
Ben Murdochc7cc0282012-03-05 14:35:55 +000092 }
Ben Murdoch85b71792012-04-11 18:30:58 +010093
94 // Calculate page address: Clear bits from 0 to kPageSizeBits.
95 if (mips32r2) {
96 Ins(object, zero_reg, 0, kPageSizeBits);
97 } else {
98 // The Ins macro is slow on r1, so use shifts instead.
99 srl(object, object, kPageSizeBits);
100 sll(object, object, kPageSizeBits);
101 }
102
103 // Calculate region number.
104 Ext(address, address, Page::kRegionSizeLog2,
105 kPageSizeBits - Page::kRegionSizeLog2);
106
107 // Mark region dirty.
108 lw(scratch, MemOperand(object, Page::kDirtyFlagOffset));
109 li(at, Operand(1));
110 sllv(at, at, address);
111 or_(scratch, scratch, at);
112 sw(scratch, MemOperand(object, Page::kDirtyFlagOffset));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000113}
114
115
Ben Murdoch257744e2011-11-30 15:57:28 +0000116// Push and pop all registers that can hold pointers.
117void MacroAssembler::PushSafepointRegisters() {
118 // Safepoints expect a block of kNumSafepointRegisters values on the
119 // stack, so adjust the stack for unsaved registers.
120 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
121 ASSERT(num_unsaved >= 0);
Ben Murdoch85b71792012-04-11 18:30:58 +0100122 Subu(sp, sp, Operand(num_unsaved * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +0000123 MultiPush(kSafepointSavedRegisters);
124}
125
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000126
Ben Murdoch257744e2011-11-30 15:57:28 +0000127void MacroAssembler::PopSafepointRegisters() {
128 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
129 MultiPop(kSafepointSavedRegisters);
Ben Murdoch85b71792012-04-11 18:30:58 +0100130 Addu(sp, sp, Operand(num_unsaved * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +0000131}
132
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000133
Ben Murdoch257744e2011-11-30 15:57:28 +0000134void MacroAssembler::PushSafepointRegistersAndDoubles() {
135 PushSafepointRegisters();
136 Subu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize));
137 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) {
138 FPURegister reg = FPURegister::FromAllocationIndex(i);
139 sdc1(reg, MemOperand(sp, i * kDoubleSize));
140 }
141}
142
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000143
Ben Murdoch257744e2011-11-30 15:57:28 +0000144void MacroAssembler::PopSafepointRegistersAndDoubles() {
145 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) {
146 FPURegister reg = FPURegister::FromAllocationIndex(i);
147 ldc1(reg, MemOperand(sp, i * kDoubleSize));
148 }
149 Addu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize));
150 PopSafepointRegisters();
151}
152
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000153
Ben Murdoch257744e2011-11-30 15:57:28 +0000154void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src,
155 Register dst) {
156 sw(src, SafepointRegistersAndDoublesSlot(dst));
157}
158
159
160void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
161 sw(src, SafepointRegisterSlot(dst));
162}
163
164
165void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
166 lw(dst, SafepointRegisterSlot(src));
167}
168
169
170int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
171 // The registers are pushed starting with the highest encoding,
172 // which means that lowest encodings are closest to the stack pointer.
173 return kSafepointRegisterStackIndexMap[reg_code];
174}
175
176
177MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
178 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
179}
180
181
182MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
183 // General purpose registers are pushed last on the stack.
184 int doubles_size = FPURegister::kNumAllocatableRegisters * kDoubleSize;
185 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
186 return MemOperand(sp, doubles_size + register_offset);
187}
188
189
Ben Murdoch85b71792012-04-11 18:30:58 +0100190
191
Steve Block44f0eee2011-05-26 01:26:41 +0100192void MacroAssembler::InNewSpace(Register object,
193 Register scratch,
194 Condition cc,
195 Label* branch) {
196 ASSERT(cc == eq || cc == ne);
197 And(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
198 Branch(branch, cc, scratch,
199 Operand(ExternalReference::new_space_start(isolate())));
200}
201
202
Ben Murdoch85b71792012-04-11 18:30:58 +0100203// Will clobber 4 registers: object, scratch0, scratch1, at. The
204// register 'object' contains a heap object pointer. The heap object
205// tag is shifted away.
206void MacroAssembler::RecordWrite(Register object,
207 Operand offset,
208 Register scratch0,
209 Register scratch1) {
210 // The compiled code assumes that record write doesn't change the
211 // context register, so we check that none of the clobbered
212 // registers are cp.
213 ASSERT(!object.is(cp) && !scratch0.is(cp) && !scratch1.is(cp));
214
Steve Block44f0eee2011-05-26 01:26:41 +0100215 Label done;
216
Ben Murdoch85b71792012-04-11 18:30:58 +0100217 // First, test that the object is not in the new space. We cannot set
218 // region marks for new space pages.
219 InNewSpace(object, scratch0, eq, &done);
Steve Block44f0eee2011-05-26 01:26:41 +0100220
Ben Murdoch85b71792012-04-11 18:30:58 +0100221 // Add offset into the object.
222 Addu(scratch0, object, offset);
Steve Block44f0eee2011-05-26 01:26:41 +0100223
Ben Murdoch85b71792012-04-11 18:30:58 +0100224 // Record the actual write.
225 RecordWriteHelper(object, scratch0, scratch1);
Steve Block44f0eee2011-05-26 01:26:41 +0100226
227 bind(&done);
228
Ben Murdoch85b71792012-04-11 18:30:58 +0100229 // Clobber all input registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100230 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000231 if (emit_debug_code()) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100232 li(object, Operand(BitCast<int32_t>(kZapValue)));
233 li(scratch0, Operand(BitCast<int32_t>(kZapValue)));
234 li(scratch1, Operand(BitCast<int32_t>(kZapValue)));
Steve Block44f0eee2011-05-26 01:26:41 +0100235 }
236}
237
238
239// Will clobber 4 registers: object, address, scratch, ip. The
240// register 'object' contains a heap object pointer. The heap object
241// tag is shifted away.
242void MacroAssembler::RecordWrite(Register object,
243 Register address,
Ben Murdoch85b71792012-04-11 18:30:58 +0100244 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100245 // The compiled code assumes that record write doesn't change the
246 // context register, so we check that none of the clobbered
247 // registers are cp.
Ben Murdoch85b71792012-04-11 18:30:58 +0100248 ASSERT(!object.is(cp) && !address.is(cp) && !scratch.is(cp));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000249
Steve Block44f0eee2011-05-26 01:26:41 +0100250 Label done;
251
Ben Murdoch85b71792012-04-11 18:30:58 +0100252 // First, test that the object is not in the new space. We cannot set
253 // region marks for new space pages.
254 InNewSpace(object, scratch, eq, &done);
Steve Block44f0eee2011-05-26 01:26:41 +0100255
256 // Record the actual write.
Ben Murdoch85b71792012-04-11 18:30:58 +0100257 RecordWriteHelper(object, address, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +0100258
259 bind(&done);
260
Ben Murdoch85b71792012-04-11 18:30:58 +0100261 // Clobber all input registers when running with the debug-code flag
Steve Block44f0eee2011-05-26 01:26:41 +0100262 // turned on to provoke errors.
Ben Murdoch257744e2011-11-30 15:57:28 +0000263 if (emit_debug_code()) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100264 li(object, Operand(BitCast<int32_t>(kZapValue)));
265 li(address, Operand(BitCast<int32_t>(kZapValue)));
266 li(scratch, Operand(BitCast<int32_t>(kZapValue)));
Steve Block44f0eee2011-05-26 01:26:41 +0100267 }
268}
269
270
271// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000272// Allocation support.
Steve Block44f0eee2011-05-26 01:26:41 +0100273
274
275void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
276 Register scratch,
277 Label* miss) {
278 Label same_contexts;
279
280 ASSERT(!holder_reg.is(scratch));
281 ASSERT(!holder_reg.is(at));
282 ASSERT(!scratch.is(at));
283
284 // Load current lexical context from the stack frame.
285 lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
286 // In debug mode, make sure the lexical context is set.
287#ifdef DEBUG
288 Check(ne, "we should not have an empty lexical context",
289 scratch, Operand(zero_reg));
290#endif
291
292 // Load the global context of the current context.
293 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
294 lw(scratch, FieldMemOperand(scratch, offset));
295 lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
296
297 // Check the context is a global context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000298 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100299 // TODO(119): Avoid push(holder_reg)/pop(holder_reg).
Ben Murdoch257744e2011-11-30 15:57:28 +0000300 push(holder_reg); // Temporarily save holder on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +0100301 // Read the first word and compare to the global_context_map.
302 lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
303 LoadRoot(at, Heap::kGlobalContextMapRootIndex);
304 Check(eq, "JSGlobalObject::global_context should be a global context.",
305 holder_reg, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +0000306 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100307 }
308
309 // Check if both contexts are the same.
310 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
311 Branch(&same_contexts, eq, scratch, Operand(at));
312
313 // Check the context is a global context.
Ben Murdoch257744e2011-11-30 15:57:28 +0000314 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100315 // TODO(119): Avoid push(holder_reg)/pop(holder_reg).
Ben Murdoch257744e2011-11-30 15:57:28 +0000316 push(holder_reg); // Temporarily save holder on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +0100317 mov(holder_reg, at); // Move at to its holding place.
318 LoadRoot(at, Heap::kNullValueRootIndex);
319 Check(ne, "JSGlobalProxy::context() should not be null.",
320 holder_reg, Operand(at));
321
322 lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
323 LoadRoot(at, Heap::kGlobalContextMapRootIndex);
324 Check(eq, "JSGlobalObject::global_context should be a global context.",
325 holder_reg, Operand(at));
326 // Restore at is not needed. at is reloaded below.
Ben Murdoch257744e2011-11-30 15:57:28 +0000327 pop(holder_reg); // Restore holder.
Steve Block44f0eee2011-05-26 01:26:41 +0100328 // Restore at to holder's context.
329 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
330 }
331
332 // Check that the security token in the calling global object is
333 // compatible with the security token in the receiving global
334 // object.
335 int token_offset = Context::kHeaderSize +
336 Context::SECURITY_TOKEN_INDEX * kPointerSize;
337
338 lw(scratch, FieldMemOperand(scratch, token_offset));
339 lw(at, FieldMemOperand(at, token_offset));
340 Branch(miss, ne, scratch, Operand(at));
341
342 bind(&same_contexts);
Andrei Popescu31002712010-02-23 13:46:05 +0000343}
344
345
Ben Murdochc7cc0282012-03-05 14:35:55 +0000346void MacroAssembler::GetNumberHash(Register reg0, Register scratch) {
347 // First of all we assign the hash seed to scratch.
348 LoadRoot(scratch, Heap::kHashSeedRootIndex);
349 SmiUntag(scratch);
350
351 // Xor original key with a seed.
352 xor_(reg0, reg0, scratch);
353
354 // Compute the hash code from the untagged key. This must be kept in sync
355 // with ComputeIntegerHash in utils.h.
356 //
357 // hash = ~hash + (hash << 15);
358 nor(scratch, reg0, zero_reg);
359 sll(at, reg0, 15);
360 addu(reg0, scratch, at);
361
362 // hash = hash ^ (hash >> 12);
363 srl(at, reg0, 12);
364 xor_(reg0, reg0, at);
365
366 // hash = hash + (hash << 2);
367 sll(at, reg0, 2);
368 addu(reg0, reg0, at);
369
370 // hash = hash ^ (hash >> 4);
371 srl(at, reg0, 4);
372 xor_(reg0, reg0, at);
373
374 // hash = hash * 2057;
Ben Murdoch85b71792012-04-11 18:30:58 +0100375 li(scratch, Operand(2057));
376 mul(reg0, reg0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000377
378 // hash = hash ^ (hash >> 16);
379 srl(at, reg0, 16);
380 xor_(reg0, reg0, at);
381}
382
383
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000384void MacroAssembler::LoadFromNumberDictionary(Label* miss,
385 Register elements,
386 Register key,
387 Register result,
388 Register reg0,
389 Register reg1,
390 Register reg2) {
391 // Register use:
392 //
393 // elements - holds the slow-case elements of the receiver on entry.
394 // Unchanged unless 'result' is the same register.
395 //
396 // key - holds the smi key on entry.
397 // Unchanged unless 'result' is the same register.
398 //
399 //
400 // result - holds the result on exit if the load succeeded.
401 // Allowed to be the same as 'key' or 'result'.
402 // Unchanged on bailout so 'key' or 'result' can be used
403 // in further computation.
404 //
405 // Scratch registers:
406 //
407 // reg0 - holds the untagged key on entry and holds the hash once computed.
408 //
409 // reg1 - Used to hold the capacity mask of the dictionary.
410 //
411 // reg2 - Used for the index into the dictionary.
412 // at - Temporary (avoid MacroAssembler instructions also using 'at').
413 Label done;
414
Ben Murdochc7cc0282012-03-05 14:35:55 +0000415 GetNumberHash(reg0, reg1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000416
417 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +0000418 lw(reg1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000419 sra(reg1, reg1, kSmiTagSize);
420 Subu(reg1, reg1, Operand(1));
421
422 // Generate an unrolled loop that performs a few probes before giving up.
423 static const int kProbes = 4;
424 for (int i = 0; i < kProbes; i++) {
425 // Use reg2 for index calculations and keep the hash intact in reg0.
426 mov(reg2, reg0);
427 // Compute the masked index: (hash + i + i * i) & mask.
428 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +0000429 Addu(reg2, reg2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000430 }
431 and_(reg2, reg2, reg1);
432
433 // Scale the index by multiplying by the element size.
Ben Murdochc7cc0282012-03-05 14:35:55 +0000434 ASSERT(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000435 sll(at, reg2, 1); // 2x.
436 addu(reg2, reg2, at); // reg2 = reg2 * 3.
437
438 // Check if the key is identical to the name.
439 sll(at, reg2, kPointerSizeLog2);
440 addu(reg2, elements, at);
441
Ben Murdochc7cc0282012-03-05 14:35:55 +0000442 lw(at, FieldMemOperand(reg2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000443 if (i != kProbes - 1) {
444 Branch(&done, eq, key, Operand(at));
445 } else {
446 Branch(miss, ne, key, Operand(at));
447 }
448 }
449
450 bind(&done);
451 // Check that the value is a normal property.
452 // reg2: elements + (index * kPointerSize).
453 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000454 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000455 lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +0000456 And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000457 Branch(miss, ne, at, Operand(zero_reg));
458
459 // Get the value at the masked, scaled index and return.
460 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +0000461 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000462 lw(result, FieldMemOperand(reg2, kValueOffset));
463}
464
465
Andrei Popescu31002712010-02-23 13:46:05 +0000466// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000467// Instruction macros.
Andrei Popescu31002712010-02-23 13:46:05 +0000468
Andrei Popescu31002712010-02-23 13:46:05 +0000469void MacroAssembler::Addu(Register rd, Register rs, const Operand& rt) {
470 if (rt.is_reg()) {
471 addu(rd, rs, rt.rm());
472 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100473 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000474 addiu(rd, rs, rt.imm32_);
475 } else {
476 // li handles the relocation.
477 ASSERT(!rs.is(at));
478 li(at, rt);
479 addu(rd, rs, at);
480 }
481 }
482}
483
484
Steve Block44f0eee2011-05-26 01:26:41 +0100485void MacroAssembler::Subu(Register rd, Register rs, const Operand& rt) {
486 if (rt.is_reg()) {
487 subu(rd, rs, rt.rm());
488 } else {
489 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
490 addiu(rd, rs, -rt.imm32_); // No subiu instr, use addiu(x, y, -imm).
491 } else {
492 // li handles the relocation.
493 ASSERT(!rs.is(at));
494 li(at, rt);
495 subu(rd, rs, at);
496 }
497 }
498}
499
500
Andrei Popescu31002712010-02-23 13:46:05 +0000501void MacroAssembler::Mul(Register rd, Register rs, const Operand& rt) {
502 if (rt.is_reg()) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100503 mul(rd, rs, rt.rm());
Andrei Popescu31002712010-02-23 13:46:05 +0000504 } else {
505 // li handles the relocation.
506 ASSERT(!rs.is(at));
507 li(at, rt);
Ben Murdoch85b71792012-04-11 18:30:58 +0100508 mul(rd, rs, at);
Andrei Popescu31002712010-02-23 13:46:05 +0000509 }
510}
511
512
513void MacroAssembler::Mult(Register rs, const Operand& rt) {
514 if (rt.is_reg()) {
515 mult(rs, rt.rm());
516 } else {
517 // li handles the relocation.
518 ASSERT(!rs.is(at));
519 li(at, rt);
520 mult(rs, at);
521 }
522}
523
524
525void MacroAssembler::Multu(Register rs, const Operand& rt) {
526 if (rt.is_reg()) {
527 multu(rs, rt.rm());
528 } else {
529 // li handles the relocation.
530 ASSERT(!rs.is(at));
531 li(at, rt);
532 multu(rs, at);
533 }
534}
535
536
537void MacroAssembler::Div(Register rs, const Operand& rt) {
538 if (rt.is_reg()) {
539 div(rs, rt.rm());
540 } else {
541 // li handles the relocation.
542 ASSERT(!rs.is(at));
543 li(at, rt);
544 div(rs, at);
545 }
546}
547
548
549void MacroAssembler::Divu(Register rs, const Operand& rt) {
550 if (rt.is_reg()) {
551 divu(rs, rt.rm());
552 } else {
553 // li handles the relocation.
554 ASSERT(!rs.is(at));
555 li(at, rt);
556 divu(rs, at);
557 }
558}
559
560
561void MacroAssembler::And(Register rd, Register rs, const Operand& rt) {
562 if (rt.is_reg()) {
563 and_(rd, rs, rt.rm());
564 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100565 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000566 andi(rd, rs, rt.imm32_);
567 } else {
568 // li handles the relocation.
569 ASSERT(!rs.is(at));
570 li(at, rt);
571 and_(rd, rs, at);
572 }
573 }
574}
575
576
577void MacroAssembler::Or(Register rd, Register rs, const Operand& rt) {
578 if (rt.is_reg()) {
579 or_(rd, rs, rt.rm());
580 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100581 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000582 ori(rd, rs, rt.imm32_);
583 } else {
584 // li handles the relocation.
585 ASSERT(!rs.is(at));
586 li(at, rt);
587 or_(rd, rs, at);
588 }
589 }
590}
591
592
593void MacroAssembler::Xor(Register rd, Register rs, const Operand& rt) {
594 if (rt.is_reg()) {
595 xor_(rd, rs, rt.rm());
596 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100597 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000598 xori(rd, rs, rt.imm32_);
599 } else {
600 // li handles the relocation.
601 ASSERT(!rs.is(at));
602 li(at, rt);
603 xor_(rd, rs, at);
604 }
605 }
606}
607
608
609void MacroAssembler::Nor(Register rd, Register rs, const Operand& rt) {
610 if (rt.is_reg()) {
611 nor(rd, rs, rt.rm());
612 } else {
613 // li handles the relocation.
614 ASSERT(!rs.is(at));
615 li(at, rt);
616 nor(rd, rs, at);
617 }
618}
619
620
Ben Murdoch257744e2011-11-30 15:57:28 +0000621void MacroAssembler::Neg(Register rs, const Operand& rt) {
622 ASSERT(rt.is_reg());
623 ASSERT(!at.is(rs));
624 ASSERT(!at.is(rt.rm()));
625 li(at, -1);
626 xor_(rs, rt.rm(), at);
627}
628
629
Andrei Popescu31002712010-02-23 13:46:05 +0000630void MacroAssembler::Slt(Register rd, Register rs, const Operand& rt) {
631 if (rt.is_reg()) {
632 slt(rd, rs, rt.rm());
633 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100634 if (is_int16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000635 slti(rd, rs, rt.imm32_);
636 } else {
637 // li handles the relocation.
638 ASSERT(!rs.is(at));
639 li(at, rt);
640 slt(rd, rs, at);
641 }
642 }
643}
644
645
646void MacroAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
647 if (rt.is_reg()) {
648 sltu(rd, rs, rt.rm());
649 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100650 if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000651 sltiu(rd, rs, rt.imm32_);
652 } else {
653 // li handles the relocation.
654 ASSERT(!rs.is(at));
655 li(at, rt);
656 sltu(rd, rs, at);
657 }
658 }
659}
660
661
Steve Block44f0eee2011-05-26 01:26:41 +0100662void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100663 if (mips32r2) {
Steve Block44f0eee2011-05-26 01:26:41 +0100664 if (rt.is_reg()) {
665 rotrv(rd, rs, rt.rm());
666 } else {
667 rotr(rd, rs, rt.imm32_);
668 }
669 } else {
670 if (rt.is_reg()) {
671 subu(at, zero_reg, rt.rm());
672 sllv(at, rs, at);
673 srlv(rd, rs, rt.rm());
674 or_(rd, rd, at);
675 } else {
676 if (rt.imm32_ == 0) {
677 srl(rd, rs, 0);
678 } else {
679 srl(at, rs, rt.imm32_);
680 sll(rd, rs, (0x20 - rt.imm32_) & 0x1f);
681 or_(rd, rd, at);
682 }
683 }
684 }
Andrei Popescu31002712010-02-23 13:46:05 +0000685}
686
Ben Murdoch85b71792012-04-11 18:30:58 +0100687
Steve Block44f0eee2011-05-26 01:26:41 +0100688//------------Pseudo-instructions-------------
689
Ben Murdoch85b71792012-04-11 18:30:58 +0100690void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
Andrei Popescu31002712010-02-23 13:46:05 +0000691 ASSERT(!j.is_reg());
Steve Block44f0eee2011-05-26 01:26:41 +0100692 BlockTrampolinePoolScope block_trampoline_pool(this);
Ben Murdoch85b71792012-04-11 18:30:58 +0100693 if (!MustUseReg(j.rmode_) && !gen2instr) {
Andrei Popescu31002712010-02-23 13:46:05 +0000694 // Normal load of an immediate value which does not need Relocation Info.
695 if (is_int16(j.imm32_)) {
696 addiu(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +0100697 } else if (!(j.imm32_ & kHiMask)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000698 ori(rd, zero_reg, j.imm32_);
Steve Block44f0eee2011-05-26 01:26:41 +0100699 } else if (!(j.imm32_ & kImm16Mask)) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100700 lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
Andrei Popescu31002712010-02-23 13:46:05 +0000701 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +0100702 lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
Steve Block44f0eee2011-05-26 01:26:41 +0100703 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +0000704 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100705 } else if (MustUseReg(j.rmode_) || gen2instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100706 if (MustUseReg(j.rmode_)) {
Andrei Popescu31002712010-02-23 13:46:05 +0000707 RecordRelocInfo(j.rmode_, j.imm32_);
708 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100709 // We need always the same number of instructions as we may need to patch
Andrei Popescu31002712010-02-23 13:46:05 +0000710 // this code to load another value which may need 2 instructions to load.
Ben Murdoch85b71792012-04-11 18:30:58 +0100711 lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
Ben Murdoch257744e2011-11-30 15:57:28 +0000712 ori(rd, rd, (j.imm32_ & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +0000713 }
714}
715
716
Andrei Popescu31002712010-02-23 13:46:05 +0000717void MacroAssembler::MultiPush(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000718 int16_t num_to_push = NumberOfBitsSet(regs);
719 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +0000720
Ben Murdoch589d6972011-11-30 16:04:58 +0000721 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch85b71792012-04-11 18:30:58 +0100722 for (int16_t i = kNumRegisters; i > 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +0000723 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000724 stack_offset -= kPointerSize;
725 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +0000726 }
727 }
728}
729
730
731void MacroAssembler::MultiPushReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000732 int16_t num_to_push = NumberOfBitsSet(regs);
733 int16_t stack_offset = num_to_push * kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +0000734
Ben Murdoch589d6972011-11-30 16:04:58 +0000735 Subu(sp, sp, Operand(stack_offset));
Steve Block6ded16b2010-05-10 14:33:55 +0100736 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +0000737 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000738 stack_offset -= kPointerSize;
739 sw(ToRegister(i), MemOperand(sp, stack_offset));
Andrei Popescu31002712010-02-23 13:46:05 +0000740 }
741 }
742}
743
744
745void MacroAssembler::MultiPop(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000746 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +0000747
Steve Block6ded16b2010-05-10 14:33:55 +0100748 for (int16_t i = 0; i < kNumRegisters; i++) {
Andrei Popescu31002712010-02-23 13:46:05 +0000749 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000750 lw(ToRegister(i), MemOperand(sp, stack_offset));
751 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +0000752 }
753 }
Ben Murdoch589d6972011-11-30 16:04:58 +0000754 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +0000755}
756
757
758void MacroAssembler::MultiPopReversed(RegList regs) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000759 int16_t stack_offset = 0;
Andrei Popescu31002712010-02-23 13:46:05 +0000760
Ben Murdoch85b71792012-04-11 18:30:58 +0100761 for (int16_t i = kNumRegisters; i > 0; i--) {
Andrei Popescu31002712010-02-23 13:46:05 +0000762 if ((regs & (1 << i)) != 0) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000763 lw(ToRegister(i), MemOperand(sp, stack_offset));
764 stack_offset += kPointerSize;
Andrei Popescu31002712010-02-23 13:46:05 +0000765 }
766 }
Ben Murdoch589d6972011-11-30 16:04:58 +0000767 addiu(sp, sp, stack_offset);
768}
769
770
771void MacroAssembler::MultiPushFPU(RegList regs) {
772 CpuFeatures::Scope scope(FPU);
773 int16_t num_to_push = NumberOfBitsSet(regs);
774 int16_t stack_offset = num_to_push * kDoubleSize;
775
776 Subu(sp, sp, Operand(stack_offset));
Ben Murdoch85b71792012-04-11 18:30:58 +0100777 for (int16_t i = kNumRegisters; i > 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000778 if ((regs & (1 << i)) != 0) {
779 stack_offset -= kDoubleSize;
780 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
781 }
782 }
783}
784
785
786void MacroAssembler::MultiPushReversedFPU(RegList regs) {
787 CpuFeatures::Scope scope(FPU);
788 int16_t num_to_push = NumberOfBitsSet(regs);
789 int16_t stack_offset = num_to_push * kDoubleSize;
790
791 Subu(sp, sp, Operand(stack_offset));
792 for (int16_t i = 0; i < kNumRegisters; i++) {
793 if ((regs & (1 << i)) != 0) {
794 stack_offset -= kDoubleSize;
795 sdc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
796 }
797 }
798}
799
800
801void MacroAssembler::MultiPopFPU(RegList regs) {
802 CpuFeatures::Scope scope(FPU);
803 int16_t stack_offset = 0;
804
805 for (int16_t i = 0; i < kNumRegisters; i++) {
806 if ((regs & (1 << i)) != 0) {
807 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
808 stack_offset += kDoubleSize;
809 }
810 }
811 addiu(sp, sp, stack_offset);
812}
813
814
815void MacroAssembler::MultiPopReversedFPU(RegList regs) {
816 CpuFeatures::Scope scope(FPU);
817 int16_t stack_offset = 0;
818
Ben Murdoch85b71792012-04-11 18:30:58 +0100819 for (int16_t i = kNumRegisters; i > 0; i--) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000820 if ((regs & (1 << i)) != 0) {
821 ldc1(FPURegister::from_code(i), MemOperand(sp, stack_offset));
822 stack_offset += kDoubleSize;
823 }
824 }
825 addiu(sp, sp, stack_offset);
Andrei Popescu31002712010-02-23 13:46:05 +0000826}
827
828
Steve Block44f0eee2011-05-26 01:26:41 +0100829void MacroAssembler::Ext(Register rt,
830 Register rs,
831 uint16_t pos,
832 uint16_t size) {
833 ASSERT(pos < 32);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000834 ASSERT(pos + size < 33);
Andrei Popescu31002712010-02-23 13:46:05 +0000835
Ben Murdoch85b71792012-04-11 18:30:58 +0100836 if (mips32r2) {
Steve Block44f0eee2011-05-26 01:26:41 +0100837 ext_(rt, rs, pos, size);
838 } else {
839 // Move rs to rt and shift it left then right to get the
840 // desired bitfield on the right side and zeroes on the left.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000841 int shift_left = 32 - (pos + size);
842 sll(rt, rs, shift_left); // Acts as a move if shift_left == 0.
843
844 int shift_right = 32 - size;
845 if (shift_right > 0) {
846 srl(rt, rt, shift_right);
847 }
Steve Block44f0eee2011-05-26 01:26:41 +0100848 }
849}
850
851
852void MacroAssembler::Ins(Register rt,
853 Register rs,
854 uint16_t pos,
855 uint16_t size) {
856 ASSERT(pos < 32);
Ben Murdoch85b71792012-04-11 18:30:58 +0100857 ASSERT(pos + size < 32);
Steve Block44f0eee2011-05-26 01:26:41 +0100858
Ben Murdoch85b71792012-04-11 18:30:58 +0100859 if (mips32r2) {
Steve Block44f0eee2011-05-26 01:26:41 +0100860 ins_(rt, rs, pos, size);
861 } else {
862 ASSERT(!rt.is(t8) && !rs.is(t8));
Ben Murdoch85b71792012-04-11 18:30:58 +0100863
864 srl(t8, rt, pos + size);
865 // The left chunk from rt that needs to
866 // be saved is on the right side of t8.
867 sll(at, t8, pos + size);
868 // The 'at' register now contains the left chunk on
869 // the left (proper position) and zeroes.
870 sll(t8, rt, 32 - pos);
871 // t8 now contains the right chunk on the left and zeroes.
872 srl(t8, t8, 32 - pos);
873 // t8 now contains the right chunk on
874 // the right (proper position) and zeroes.
875 or_(rt, at, t8);
876 // rt now contains the left and right chunks from the original rt
877 // in their proper position and zeroes in the middle.
878 sll(t8, rs, 32 - size);
879 // t8 now contains the chunk from rs on the left and zeroes.
880 srl(t8, t8, 32 - size - pos);
881 // t8 now contains the original chunk from rs in
882 // the middle (proper position).
883 or_(rt, rt, t8);
884 // rt now contains the result of the ins instruction in R2 mode.
Steve Block44f0eee2011-05-26 01:26:41 +0100885 }
886}
887
888
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000889void MacroAssembler::Cvt_d_uw(FPURegister fd,
890 FPURegister fs,
891 FPURegister scratch) {
892 // Move the data from fs to t8.
893 mfc1(t8, fs);
894 Cvt_d_uw(fd, t8, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +0100895}
896
897
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000898void MacroAssembler::Cvt_d_uw(FPURegister fd,
899 Register rs,
900 FPURegister scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100901 // Convert rs to a FP value in fd (and fd + 1).
902 // We do this by converting rs minus the MSB to avoid sign conversion,
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000903 // then adding 2^31 to the result (if needed).
Steve Block44f0eee2011-05-26 01:26:41 +0100904
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000905 ASSERT(!fd.is(scratch));
Steve Block44f0eee2011-05-26 01:26:41 +0100906 ASSERT(!rs.is(t9));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000907 ASSERT(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100908
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000909 // Save rs's MSB to t9.
910 Ext(t9, rs, 31, 1);
Steve Block44f0eee2011-05-26 01:26:41 +0100911 // Remove rs's MSB.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000912 Ext(at, rs, 0, 31);
913 // Move the result to fd.
914 mtc1(at, fd);
Steve Block44f0eee2011-05-26 01:26:41 +0100915
916 // Convert fd to a real FP value.
917 cvt_d_w(fd, fd);
918
919 Label conversion_done;
920
921 // If rs's MSB was 0, it's done.
922 // Otherwise we need to add that to the FP register.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000923 Branch(&conversion_done, eq, t9, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +0100924
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000925 // Load 2^31 into f20 as its float representation.
926 li(at, 0x41E00000);
927 mtc1(at, FPURegister::from_code(scratch.code() + 1));
928 mtc1(zero_reg, scratch);
929 // Add it to fd.
930 add_d(fd, fd, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +0100931
Steve Block44f0eee2011-05-26 01:26:41 +0100932 bind(&conversion_done);
933}
934
935
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000936void MacroAssembler::Trunc_uw_d(FPURegister fd,
937 FPURegister fs,
938 FPURegister scratch) {
939 Trunc_uw_d(fs, t8, scratch);
940 mtc1(t8, fd);
Steve Block44f0eee2011-05-26 01:26:41 +0100941}
942
943
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000944void MacroAssembler::Trunc_uw_d(FPURegister fd,
945 Register rs,
946 FPURegister scratch) {
947 ASSERT(!fd.is(scratch));
948 ASSERT(!rs.is(at));
Steve Block44f0eee2011-05-26 01:26:41 +0100949
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000950 // Load 2^31 into scratch as its float representation.
951 li(at, 0x41E00000);
952 mtc1(at, FPURegister::from_code(scratch.code() + 1));
953 mtc1(zero_reg, scratch);
954 // Test if scratch > fd.
Ben Murdoch85b71792012-04-11 18:30:58 +0100955 c(OLT, D, fd, scratch);
956
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000957 Label simple_convert;
Ben Murdoch85b71792012-04-11 18:30:58 +0100958 // If fd < 2^31 we can convert it normally.
959 bc1t(&simple_convert);
Steve Block44f0eee2011-05-26 01:26:41 +0100960
961 // First we subtract 2^31 from fd, then trunc it to rs
962 // and add 2^31 to rs.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000963 sub_d(scratch, fd, scratch);
964 trunc_w_d(scratch, scratch);
965 mfc1(rs, scratch);
966 Or(rs, rs, 1 << 31);
Steve Block44f0eee2011-05-26 01:26:41 +0100967
968 Label done;
969 Branch(&done);
970 // Simple conversion.
971 bind(&simple_convert);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000972 trunc_w_d(scratch, fd);
973 mfc1(rs, scratch);
Steve Block44f0eee2011-05-26 01:26:41 +0100974
975 bind(&done);
976}
977
978
979// Tries to get a signed int32 out of a double precision floating point heap
980// number. Rounds towards 0. Branch to 'not_int32' if the double is out of the
981// 32bits signed integer range.
982// This method implementation differs from the ARM version for performance
983// reasons.
984void MacroAssembler::ConvertToInt32(Register source,
985 Register dest,
986 Register scratch,
987 Register scratch2,
988 FPURegister double_scratch,
989 Label *not_int32) {
990 Label right_exponent, done;
991 // Get exponent word (ENDIAN issues).
992 lw(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset));
993 // Get exponent alone in scratch2.
994 And(scratch2, scratch, Operand(HeapNumber::kExponentMask));
995 // Load dest with zero. We use this either for the final shift or
996 // for the answer.
997 mov(dest, zero_reg);
998 // Check whether the exponent matches a 32 bit signed int that is not a Smi.
999 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is
1000 // the exponent that we are fastest at and also the highest exponent we can
1001 // handle here.
1002 const uint32_t non_smi_exponent =
1003 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
1004 // If we have a match of the int32-but-not-Smi exponent then skip some logic.
1005 Branch(&right_exponent, eq, scratch2, Operand(non_smi_exponent));
1006 // If the exponent is higher than that then go to not_int32 case. This
1007 // catches numbers that don't fit in a signed int32, infinities and NaNs.
1008 Branch(not_int32, gt, scratch2, Operand(non_smi_exponent));
1009
1010 // We know the exponent is smaller than 30 (biased). If it is less than
Ben Murdoch85b71792012-04-11 18:30:58 +01001011 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
Steve Block44f0eee2011-05-26 01:26:41 +01001012 // it rounds to zero.
1013 const uint32_t zero_exponent =
1014 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
1015 Subu(scratch2, scratch2, Operand(zero_exponent));
1016 // Dest already has a Smi zero.
1017 Branch(&done, lt, scratch2, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001018 if (!CpuFeatures::IsSupported(FPU)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001019 // We have a shifted exponent between 0 and 30 in scratch2.
1020 srl(dest, scratch2, HeapNumber::kExponentShift);
1021 // We now have the exponent in dest. Subtract from 30 to get
1022 // how much to shift down.
1023 li(at, Operand(30));
1024 subu(dest, at, dest);
1025 }
1026 bind(&right_exponent);
Ben Murdoch257744e2011-11-30 15:57:28 +00001027 if (CpuFeatures::IsSupported(FPU)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001028 CpuFeatures::Scope scope(FPU);
1029 // MIPS FPU instructions implementing double precision to integer
1030 // conversion using round to zero. Since the FP value was qualified
1031 // above, the resulting integer should be a legal int32.
1032 // The original 'Exponent' word is still in scratch.
1033 lwc1(double_scratch, FieldMemOperand(source, HeapNumber::kMantissaOffset));
1034 mtc1(scratch, FPURegister::from_code(double_scratch.code() + 1));
1035 trunc_w_d(double_scratch, double_scratch);
1036 mfc1(dest, double_scratch);
1037 } else {
1038 // On entry, dest has final downshift, scratch has original sign/exp/mant.
1039 // Save sign bit in top bit of dest.
1040 And(scratch2, scratch, Operand(0x80000000));
1041 Or(dest, dest, Operand(scratch2));
1042 // Put back the implicit 1, just above mantissa field.
1043 Or(scratch, scratch, Operand(1 << HeapNumber::kExponentShift));
1044
1045 // Shift up the mantissa bits to take up the space the exponent used to
1046 // take. We just orred in the implicit bit so that took care of one and
1047 // we want to leave the sign bit 0 so we subtract 2 bits from the shift
1048 // distance. But we want to clear the sign-bit so shift one more bit
1049 // left, then shift right one bit.
1050 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
1051 sll(scratch, scratch, shift_distance + 1);
1052 srl(scratch, scratch, 1);
1053
1054 // Get the second half of the double. For some exponents we don't
1055 // actually need this because the bits get shifted out again, but
1056 // it's probably slower to test than just to do it.
1057 lw(scratch2, FieldMemOperand(source, HeapNumber::kMantissaOffset));
1058 // Extract the top 10 bits, and insert those bottom 10 bits of scratch.
1059 // The width of the field here is the same as the shift amount above.
1060 const int field_width = shift_distance;
1061 Ext(scratch2, scratch2, 32-shift_distance, field_width);
1062 Ins(scratch, scratch2, 0, field_width);
1063 // Move down according to the exponent.
1064 srlv(scratch, scratch, dest);
1065 // Prepare the negative version of our integer.
1066 subu(scratch2, zero_reg, scratch);
1067 // Trick to check sign bit (msb) held in dest, count leading zero.
1068 // 0 indicates negative, save negative version with conditional move.
Ben Murdoch85b71792012-04-11 18:30:58 +01001069 clz(dest, dest);
1070 movz(scratch, scratch2, dest);
Steve Block44f0eee2011-05-26 01:26:41 +01001071 mov(dest, scratch);
1072 }
1073 bind(&done);
1074}
1075
1076
Ben Murdoch257744e2011-11-30 15:57:28 +00001077void MacroAssembler::EmitOutOfInt32RangeTruncate(Register result,
1078 Register input_high,
1079 Register input_low,
1080 Register scratch) {
1081 Label done, normal_exponent, restore_sign;
1082 // Extract the biased exponent in result.
1083 Ext(result,
1084 input_high,
1085 HeapNumber::kExponentShift,
1086 HeapNumber::kExponentBits);
1087
1088 // Check for Infinity and NaNs, which should return 0.
1089 Subu(scratch, result, HeapNumber::kExponentMask);
Ben Murdoch85b71792012-04-11 18:30:58 +01001090 movz(result, zero_reg, scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00001091 Branch(&done, eq, scratch, Operand(zero_reg));
1092
1093 // Express exponent as delta to (number of mantissa bits + 31).
1094 Subu(result,
1095 result,
1096 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
1097
1098 // If the delta is strictly positive, all bits would be shifted away,
1099 // which means that we can return 0.
1100 Branch(&normal_exponent, le, result, Operand(zero_reg));
1101 mov(result, zero_reg);
1102 Branch(&done);
1103
1104 bind(&normal_exponent);
1105 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
1106 // Calculate shift.
1107 Addu(scratch, result, Operand(kShiftBase + HeapNumber::kMantissaBits));
1108
1109 // Save the sign.
1110 Register sign = result;
1111 result = no_reg;
1112 And(sign, input_high, Operand(HeapNumber::kSignMask));
1113
1114 // On ARM shifts > 31 bits are valid and will result in zero. On MIPS we need
1115 // to check for this specific case.
1116 Label high_shift_needed, high_shift_done;
1117 Branch(&high_shift_needed, lt, scratch, Operand(32));
1118 mov(input_high, zero_reg);
1119 Branch(&high_shift_done);
1120 bind(&high_shift_needed);
1121
1122 // Set the implicit 1 before the mantissa part in input_high.
1123 Or(input_high,
1124 input_high,
1125 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
1126 // Shift the mantissa bits to the correct position.
1127 // We don't need to clear non-mantissa bits as they will be shifted away.
1128 // If they weren't, it would mean that the answer is in the 32bit range.
1129 sllv(input_high, input_high, scratch);
1130
1131 bind(&high_shift_done);
1132
1133 // Replace the shifted bits with bits from the lower mantissa word.
1134 Label pos_shift, shift_done;
1135 li(at, 32);
1136 subu(scratch, at, scratch);
1137 Branch(&pos_shift, ge, scratch, Operand(zero_reg));
1138
1139 // Negate scratch.
1140 Subu(scratch, zero_reg, scratch);
1141 sllv(input_low, input_low, scratch);
1142 Branch(&shift_done);
1143
1144 bind(&pos_shift);
1145 srlv(input_low, input_low, scratch);
1146
1147 bind(&shift_done);
1148 Or(input_high, input_high, Operand(input_low));
1149 // Restore sign if necessary.
1150 mov(scratch, sign);
1151 result = sign;
1152 sign = no_reg;
1153 Subu(result, zero_reg, input_high);
Ben Murdoch85b71792012-04-11 18:30:58 +01001154 movz(result, input_high, scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00001155 bind(&done);
1156}
1157
1158
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001159void MacroAssembler::EmitECMATruncate(Register result,
1160 FPURegister double_input,
1161 FPURegister single_scratch,
1162 Register scratch,
Ben Murdoch85b71792012-04-11 18:30:58 +01001163 Register input_high,
1164 Register input_low) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001165 CpuFeatures::Scope scope(FPU);
Ben Murdoch85b71792012-04-11 18:30:58 +01001166 ASSERT(!input_high.is(result));
1167 ASSERT(!input_low.is(result));
1168 ASSERT(!input_low.is(input_high));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001169 ASSERT(!scratch.is(result) &&
Ben Murdoch85b71792012-04-11 18:30:58 +01001170 !scratch.is(input_high) &&
1171 !scratch.is(input_low));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001172 ASSERT(!single_scratch.is(double_input));
1173
1174 Label done;
1175 Label manual;
1176
1177 // Clear cumulative exception flags and save the FCSR.
Ben Murdoch85b71792012-04-11 18:30:58 +01001178 Register scratch2 = input_high;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001179 cfc1(scratch2, FCSR);
1180 ctc1(zero_reg, FCSR);
1181 // Try a conversion to a signed integer.
1182 trunc_w_d(single_scratch, double_input);
1183 mfc1(result, single_scratch);
1184 // Retrieve and restore the FCSR.
1185 cfc1(scratch, FCSR);
1186 ctc1(scratch2, FCSR);
1187 // Check for overflow and NaNs.
1188 And(scratch,
1189 scratch,
1190 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
1191 // If we had no exceptions we are done.
1192 Branch(&done, eq, scratch, Operand(zero_reg));
1193
1194 // Load the double value and perform a manual truncation.
1195 Move(input_low, input_high, double_input);
1196 EmitOutOfInt32RangeTruncate(result,
1197 input_high,
1198 input_low,
1199 scratch);
1200 bind(&done);
1201}
1202
1203
Ben Murdoch257744e2011-11-30 15:57:28 +00001204void MacroAssembler::GetLeastBitsFromSmi(Register dst,
1205 Register src,
1206 int num_least_bits) {
1207 Ext(dst, src, kSmiTagSize, num_least_bits);
1208}
1209
1210
1211void MacroAssembler::GetLeastBitsFromInt32(Register dst,
1212 Register src,
1213 int num_least_bits) {
1214 And(dst, src, Operand((1 << num_least_bits) - 1));
1215}
1216
1217
Steve Block44f0eee2011-05-26 01:26:41 +01001218// Emulated condtional branches do not emit a nop in the branch delay slot.
1219//
1220// BRANCH_ARGS_CHECK checks that conditional jump arguments are correct.
1221#define BRANCH_ARGS_CHECK(cond, rs, rt) ASSERT( \
1222 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
1223 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
1224
1225
Ben Murdoch85b71792012-04-11 18:30:58 +01001226bool MacroAssembler::UseAbsoluteCodePointers() {
1227 if (is_trampoline_emitted()) {
1228 return true;
1229 } else {
1230 return false;
1231 }
1232}
1233
1234
Steve Block44f0eee2011-05-26 01:26:41 +01001235void MacroAssembler::Branch(int16_t offset, BranchDelaySlot bdslot) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001236 BranchShort(offset, bdslot);
1237}
1238
1239
1240void MacroAssembler::Branch(int16_t offset, Condition cond, Register rs,
1241 const Operand& rt,
1242 BranchDelaySlot bdslot) {
1243 BranchShort(offset, cond, rs, rt, bdslot);
1244}
1245
1246
1247void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001248 bool is_label_near = is_near(L);
1249 if (UseAbsoluteCodePointers() && !is_label_near) {
1250 Jr(L, bdslot);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001251 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01001252 BranchShort(L, bdslot);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001253 }
1254}
1255
1256
1257void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
1258 const Operand& rt,
1259 BranchDelaySlot bdslot) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001260 bool is_label_near = is_near(L);
1261 if (UseAbsoluteCodePointers() && !is_label_near) {
1262 Label skip;
1263 Condition neg_cond = NegateCondition(cond);
1264 BranchShort(&skip, neg_cond, rs, rt);
1265 Jr(L, bdslot);
1266 bind(&skip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001267 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01001268 BranchShort(L, cond, rs, rt, bdslot);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001269 }
1270}
1271
1272
1273void MacroAssembler::BranchShort(int16_t offset, BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001274 b(offset);
1275
1276 // Emit a nop in the branch delay slot if required.
1277 if (bdslot == PROTECT)
1278 nop();
1279}
1280
1281
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001282void MacroAssembler::BranchShort(int16_t offset, Condition cond, Register rs,
1283 const Operand& rt,
1284 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001285 BRANCH_ARGS_CHECK(cond, rs, rt);
1286 ASSERT(!rs.is(zero_reg));
Steve Block6ded16b2010-05-10 14:33:55 +01001287 Register r2 = no_reg;
Steve Block44f0eee2011-05-26 01:26:41 +01001288 Register scratch = at;
1289
Andrei Popescu31002712010-02-23 13:46:05 +00001290 if (rt.is_reg()) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001291 // We don't want any other register but scratch clobbered.
1292 ASSERT(!scratch.is(rs) && !scratch.is(rt.rm_));
Andrei Popescu31002712010-02-23 13:46:05 +00001293 r2 = rt.rm_;
Steve Block44f0eee2011-05-26 01:26:41 +01001294 switch (cond) {
1295 case cc_always:
1296 b(offset);
1297 break;
1298 case eq:
1299 beq(rs, r2, offset);
1300 break;
1301 case ne:
1302 bne(rs, r2, offset);
1303 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001304 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001305 case greater:
1306 if (r2.is(zero_reg)) {
1307 bgtz(rs, offset);
1308 } else {
1309 slt(scratch, r2, rs);
1310 bne(scratch, zero_reg, offset);
1311 }
1312 break;
1313 case greater_equal:
1314 if (r2.is(zero_reg)) {
1315 bgez(rs, offset);
1316 } else {
1317 slt(scratch, rs, r2);
1318 beq(scratch, zero_reg, offset);
1319 }
1320 break;
1321 case less:
1322 if (r2.is(zero_reg)) {
1323 bltz(rs, offset);
1324 } else {
1325 slt(scratch, rs, r2);
1326 bne(scratch, zero_reg, offset);
1327 }
1328 break;
1329 case less_equal:
1330 if (r2.is(zero_reg)) {
1331 blez(rs, offset);
1332 } else {
1333 slt(scratch, r2, rs);
1334 beq(scratch, zero_reg, offset);
1335 }
1336 break;
Andrei Popescu31002712010-02-23 13:46:05 +00001337 // Unsigned comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001338 case Ugreater:
1339 if (r2.is(zero_reg)) {
1340 bgtz(rs, offset);
1341 } else {
1342 sltu(scratch, r2, rs);
1343 bne(scratch, zero_reg, offset);
1344 }
1345 break;
1346 case Ugreater_equal:
1347 if (r2.is(zero_reg)) {
1348 bgez(rs, offset);
1349 } else {
1350 sltu(scratch, rs, r2);
1351 beq(scratch, zero_reg, offset);
1352 }
1353 break;
1354 case Uless:
1355 if (r2.is(zero_reg)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001356 // No code needs to be emitted.
1357 return;
Steve Block44f0eee2011-05-26 01:26:41 +01001358 } else {
1359 sltu(scratch, rs, r2);
1360 bne(scratch, zero_reg, offset);
1361 }
1362 break;
1363 case Uless_equal:
1364 if (r2.is(zero_reg)) {
1365 b(offset);
1366 } else {
1367 sltu(scratch, r2, rs);
1368 beq(scratch, zero_reg, offset);
1369 }
1370 break;
1371 default:
1372 UNREACHABLE();
1373 }
1374 } else {
1375 // Be careful to always use shifted_branch_offset only just before the
1376 // branch instruction, as the location will be remember for patching the
1377 // target.
1378 switch (cond) {
1379 case cc_always:
1380 b(offset);
1381 break;
1382 case eq:
1383 // We don't want any other register but scratch clobbered.
1384 ASSERT(!scratch.is(rs));
1385 r2 = scratch;
1386 li(r2, rt);
1387 beq(rs, r2, offset);
1388 break;
1389 case ne:
1390 // We don't want any other register but scratch clobbered.
1391 ASSERT(!scratch.is(rs));
1392 r2 = scratch;
1393 li(r2, rt);
1394 bne(rs, r2, offset);
1395 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001396 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001397 case greater:
1398 if (rt.imm32_ == 0) {
1399 bgtz(rs, offset);
1400 } else {
1401 r2 = scratch;
1402 li(r2, rt);
1403 slt(scratch, r2, rs);
1404 bne(scratch, zero_reg, offset);
1405 }
1406 break;
1407 case greater_equal:
1408 if (rt.imm32_ == 0) {
1409 bgez(rs, offset);
1410 } else if (is_int16(rt.imm32_)) {
1411 slti(scratch, rs, rt.imm32_);
1412 beq(scratch, zero_reg, offset);
1413 } else {
1414 r2 = scratch;
1415 li(r2, rt);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001416 slt(scratch, rs, r2);
Steve Block44f0eee2011-05-26 01:26:41 +01001417 beq(scratch, zero_reg, offset);
1418 }
1419 break;
1420 case less:
1421 if (rt.imm32_ == 0) {
1422 bltz(rs, offset);
1423 } else if (is_int16(rt.imm32_)) {
1424 slti(scratch, rs, rt.imm32_);
1425 bne(scratch, zero_reg, offset);
1426 } else {
1427 r2 = scratch;
1428 li(r2, rt);
1429 slt(scratch, rs, r2);
1430 bne(scratch, zero_reg, offset);
1431 }
1432 break;
1433 case less_equal:
1434 if (rt.imm32_ == 0) {
1435 blez(rs, offset);
1436 } else {
1437 r2 = scratch;
1438 li(r2, rt);
1439 slt(scratch, r2, rs);
1440 beq(scratch, zero_reg, offset);
1441 }
1442 break;
1443 // Unsigned comparison.
1444 case Ugreater:
1445 if (rt.imm32_ == 0) {
1446 bgtz(rs, offset);
1447 } else {
1448 r2 = scratch;
1449 li(r2, rt);
1450 sltu(scratch, r2, rs);
1451 bne(scratch, zero_reg, offset);
1452 }
1453 break;
1454 case Ugreater_equal:
1455 if (rt.imm32_ == 0) {
1456 bgez(rs, offset);
1457 } else if (is_int16(rt.imm32_)) {
1458 sltiu(scratch, rs, rt.imm32_);
1459 beq(scratch, zero_reg, offset);
1460 } else {
1461 r2 = scratch;
1462 li(r2, rt);
1463 sltu(scratch, rs, r2);
1464 beq(scratch, zero_reg, offset);
1465 }
1466 break;
1467 case Uless:
1468 if (rt.imm32_ == 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001469 // No code needs to be emitted.
1470 return;
Steve Block44f0eee2011-05-26 01:26:41 +01001471 } else if (is_int16(rt.imm32_)) {
1472 sltiu(scratch, rs, rt.imm32_);
1473 bne(scratch, zero_reg, offset);
1474 } else {
1475 r2 = scratch;
1476 li(r2, rt);
1477 sltu(scratch, rs, r2);
1478 bne(scratch, zero_reg, offset);
1479 }
1480 break;
1481 case Uless_equal:
1482 if (rt.imm32_ == 0) {
1483 b(offset);
1484 } else {
1485 r2 = scratch;
1486 li(r2, rt);
1487 sltu(scratch, r2, rs);
1488 beq(scratch, zero_reg, offset);
1489 }
1490 break;
1491 default:
1492 UNREACHABLE();
1493 }
Andrei Popescu31002712010-02-23 13:46:05 +00001494 }
Steve Block44f0eee2011-05-26 01:26:41 +01001495 // Emit a nop in the branch delay slot if required.
1496 if (bdslot == PROTECT)
1497 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00001498}
1499
1500
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001501void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) {
Andrei Popescu31002712010-02-23 13:46:05 +00001502 // We use branch_offset as an argument for the branch instructions to be sure
1503 // it is called just before generating the branch instruction, as needed.
1504
Steve Block44f0eee2011-05-26 01:26:41 +01001505 b(shifted_branch_offset(L, false));
Andrei Popescu31002712010-02-23 13:46:05 +00001506
Steve Block44f0eee2011-05-26 01:26:41 +01001507 // Emit a nop in the branch delay slot if required.
1508 if (bdslot == PROTECT)
1509 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00001510}
1511
1512
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001513void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs,
1514 const Operand& rt,
1515 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001516 BRANCH_ARGS_CHECK(cond, rs, rt);
1517
1518 int32_t offset;
1519 Register r2 = no_reg;
1520 Register scratch = at;
1521 if (rt.is_reg()) {
1522 r2 = rt.rm_;
1523 // Be careful to always use shifted_branch_offset only just before the
1524 // branch instruction, as the location will be remember for patching the
1525 // target.
1526 switch (cond) {
1527 case cc_always:
1528 offset = shifted_branch_offset(L, false);
1529 b(offset);
1530 break;
1531 case eq:
1532 offset = shifted_branch_offset(L, false);
1533 beq(rs, r2, offset);
1534 break;
1535 case ne:
1536 offset = shifted_branch_offset(L, false);
1537 bne(rs, r2, offset);
1538 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001539 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001540 case greater:
1541 if (r2.is(zero_reg)) {
1542 offset = shifted_branch_offset(L, false);
1543 bgtz(rs, offset);
1544 } else {
1545 slt(scratch, r2, rs);
1546 offset = shifted_branch_offset(L, false);
1547 bne(scratch, zero_reg, offset);
1548 }
1549 break;
1550 case greater_equal:
1551 if (r2.is(zero_reg)) {
1552 offset = shifted_branch_offset(L, false);
1553 bgez(rs, offset);
1554 } else {
1555 slt(scratch, rs, r2);
1556 offset = shifted_branch_offset(L, false);
1557 beq(scratch, zero_reg, offset);
1558 }
1559 break;
1560 case less:
1561 if (r2.is(zero_reg)) {
1562 offset = shifted_branch_offset(L, false);
1563 bltz(rs, offset);
1564 } else {
1565 slt(scratch, rs, r2);
1566 offset = shifted_branch_offset(L, false);
1567 bne(scratch, zero_reg, offset);
1568 }
1569 break;
1570 case less_equal:
1571 if (r2.is(zero_reg)) {
1572 offset = shifted_branch_offset(L, false);
1573 blez(rs, offset);
1574 } else {
1575 slt(scratch, r2, rs);
1576 offset = shifted_branch_offset(L, false);
1577 beq(scratch, zero_reg, offset);
1578 }
1579 break;
1580 // Unsigned comparison.
1581 case Ugreater:
1582 if (r2.is(zero_reg)) {
1583 offset = shifted_branch_offset(L, false);
1584 bgtz(rs, offset);
1585 } else {
1586 sltu(scratch, r2, rs);
1587 offset = shifted_branch_offset(L, false);
1588 bne(scratch, zero_reg, offset);
1589 }
1590 break;
1591 case Ugreater_equal:
1592 if (r2.is(zero_reg)) {
1593 offset = shifted_branch_offset(L, false);
1594 bgez(rs, offset);
1595 } else {
1596 sltu(scratch, rs, r2);
1597 offset = shifted_branch_offset(L, false);
1598 beq(scratch, zero_reg, offset);
1599 }
1600 break;
1601 case Uless:
1602 if (r2.is(zero_reg)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001603 // No code needs to be emitted.
1604 return;
Steve Block44f0eee2011-05-26 01:26:41 +01001605 } else {
1606 sltu(scratch, rs, r2);
1607 offset = shifted_branch_offset(L, false);
1608 bne(scratch, zero_reg, offset);
1609 }
1610 break;
1611 case Uless_equal:
1612 if (r2.is(zero_reg)) {
1613 offset = shifted_branch_offset(L, false);
1614 b(offset);
1615 } else {
1616 sltu(scratch, r2, rs);
1617 offset = shifted_branch_offset(L, false);
1618 beq(scratch, zero_reg, offset);
1619 }
1620 break;
1621 default:
1622 UNREACHABLE();
1623 }
1624 } else {
1625 // Be careful to always use shifted_branch_offset only just before the
1626 // branch instruction, as the location will be remember for patching the
1627 // target.
1628 switch (cond) {
1629 case cc_always:
1630 offset = shifted_branch_offset(L, false);
1631 b(offset);
1632 break;
1633 case eq:
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001634 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001635 r2 = scratch;
1636 li(r2, rt);
1637 offset = shifted_branch_offset(L, false);
1638 beq(rs, r2, offset);
1639 break;
1640 case ne:
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001641 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001642 r2 = scratch;
1643 li(r2, rt);
1644 offset = shifted_branch_offset(L, false);
1645 bne(rs, r2, offset);
1646 break;
Ben Murdoch257744e2011-11-30 15:57:28 +00001647 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001648 case greater:
1649 if (rt.imm32_ == 0) {
1650 offset = shifted_branch_offset(L, false);
1651 bgtz(rs, offset);
1652 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001653 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001654 r2 = scratch;
1655 li(r2, rt);
1656 slt(scratch, r2, rs);
1657 offset = shifted_branch_offset(L, false);
1658 bne(scratch, zero_reg, offset);
1659 }
1660 break;
1661 case greater_equal:
1662 if (rt.imm32_ == 0) {
1663 offset = shifted_branch_offset(L, false);
1664 bgez(rs, offset);
1665 } else if (is_int16(rt.imm32_)) {
1666 slti(scratch, rs, rt.imm32_);
1667 offset = shifted_branch_offset(L, false);
1668 beq(scratch, zero_reg, offset);
1669 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001670 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001671 r2 = scratch;
1672 li(r2, rt);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001673 slt(scratch, rs, r2);
Steve Block44f0eee2011-05-26 01:26:41 +01001674 offset = shifted_branch_offset(L, false);
1675 beq(scratch, zero_reg, offset);
1676 }
1677 break;
1678 case less:
1679 if (rt.imm32_ == 0) {
1680 offset = shifted_branch_offset(L, false);
1681 bltz(rs, offset);
1682 } else if (is_int16(rt.imm32_)) {
1683 slti(scratch, rs, rt.imm32_);
1684 offset = shifted_branch_offset(L, false);
1685 bne(scratch, zero_reg, offset);
1686 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001687 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001688 r2 = scratch;
1689 li(r2, rt);
1690 slt(scratch, rs, r2);
1691 offset = shifted_branch_offset(L, false);
1692 bne(scratch, zero_reg, offset);
1693 }
1694 break;
1695 case less_equal:
1696 if (rt.imm32_ == 0) {
1697 offset = shifted_branch_offset(L, false);
1698 blez(rs, offset);
1699 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001700 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001701 r2 = scratch;
1702 li(r2, rt);
1703 slt(scratch, r2, rs);
1704 offset = shifted_branch_offset(L, false);
1705 beq(scratch, zero_reg, offset);
1706 }
1707 break;
1708 // Unsigned comparison.
1709 case Ugreater:
1710 if (rt.imm32_ == 0) {
1711 offset = shifted_branch_offset(L, false);
1712 bgtz(rs, offset);
1713 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001714 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001715 r2 = scratch;
1716 li(r2, rt);
1717 sltu(scratch, r2, rs);
1718 offset = shifted_branch_offset(L, false);
1719 bne(scratch, zero_reg, offset);
1720 }
1721 break;
1722 case Ugreater_equal:
1723 if (rt.imm32_ == 0) {
1724 offset = shifted_branch_offset(L, false);
1725 bgez(rs, offset);
1726 } else if (is_int16(rt.imm32_)) {
1727 sltiu(scratch, rs, rt.imm32_);
1728 offset = shifted_branch_offset(L, false);
1729 beq(scratch, zero_reg, offset);
1730 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001731 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001732 r2 = scratch;
1733 li(r2, rt);
1734 sltu(scratch, rs, r2);
1735 offset = shifted_branch_offset(L, false);
1736 beq(scratch, zero_reg, offset);
1737 }
1738 break;
1739 case Uless:
1740 if (rt.imm32_ == 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001741 // No code needs to be emitted.
1742 return;
Steve Block44f0eee2011-05-26 01:26:41 +01001743 } else if (is_int16(rt.imm32_)) {
1744 sltiu(scratch, rs, rt.imm32_);
1745 offset = shifted_branch_offset(L, false);
1746 bne(scratch, zero_reg, offset);
1747 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001748 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001749 r2 = scratch;
1750 li(r2, rt);
1751 sltu(scratch, rs, r2);
1752 offset = shifted_branch_offset(L, false);
1753 bne(scratch, zero_reg, offset);
1754 }
1755 break;
1756 case Uless_equal:
1757 if (rt.imm32_ == 0) {
1758 offset = shifted_branch_offset(L, false);
1759 b(offset);
1760 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001761 ASSERT(!scratch.is(rs));
Steve Block44f0eee2011-05-26 01:26:41 +01001762 r2 = scratch;
1763 li(r2, rt);
1764 sltu(scratch, r2, rs);
1765 offset = shifted_branch_offset(L, false);
1766 beq(scratch, zero_reg, offset);
1767 }
1768 break;
1769 default:
1770 UNREACHABLE();
1771 }
1772 }
1773 // Check that offset could actually hold on an int16_t.
1774 ASSERT(is_int16(offset));
1775 // Emit a nop in the branch delay slot if required.
1776 if (bdslot == PROTECT)
1777 nop();
1778}
1779
1780
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001781void MacroAssembler::BranchAndLink(int16_t offset, BranchDelaySlot bdslot) {
1782 BranchAndLinkShort(offset, bdslot);
1783}
1784
1785
1786void MacroAssembler::BranchAndLink(int16_t offset, Condition cond, Register rs,
1787 const Operand& rt,
1788 BranchDelaySlot bdslot) {
1789 BranchAndLinkShort(offset, cond, rs, rt, bdslot);
1790}
1791
1792
1793void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001794 bool is_label_near = is_near(L);
1795 if (UseAbsoluteCodePointers() && !is_label_near) {
1796 Jalr(L, bdslot);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001797 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01001798 BranchAndLinkShort(L, bdslot);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001799 }
1800}
1801
1802
1803void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs,
1804 const Operand& rt,
1805 BranchDelaySlot bdslot) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001806 bool is_label_near = is_near(L);
1807 if (UseAbsoluteCodePointers() && !is_label_near) {
1808 Label skip;
1809 Condition neg_cond = NegateCondition(cond);
1810 BranchShort(&skip, neg_cond, rs, rt);
1811 Jalr(L, bdslot);
1812 bind(&skip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001813 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01001814 BranchAndLinkShort(L, cond, rs, rt, bdslot);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001815 }
1816}
1817
1818
Andrei Popescu31002712010-02-23 13:46:05 +00001819// We need to use a bgezal or bltzal, but they can't be used directly with the
1820// slt instructions. We could use sub or add instead but we would miss overflow
1821// cases, so we keep slt and add an intermediate third instruction.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001822void MacroAssembler::BranchAndLinkShort(int16_t offset,
1823 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001824 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001825
Steve Block44f0eee2011-05-26 01:26:41 +01001826 // Emit a nop in the branch delay slot if required.
1827 if (bdslot == PROTECT)
1828 nop();
Andrei Popescu31002712010-02-23 13:46:05 +00001829}
1830
1831
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001832void MacroAssembler::BranchAndLinkShort(int16_t offset, Condition cond,
1833 Register rs, const Operand& rt,
1834 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001835 BRANCH_ARGS_CHECK(cond, rs, rt);
Steve Block6ded16b2010-05-10 14:33:55 +01001836 Register r2 = no_reg;
Steve Block44f0eee2011-05-26 01:26:41 +01001837 Register scratch = at;
1838
Andrei Popescu31002712010-02-23 13:46:05 +00001839 if (rt.is_reg()) {
1840 r2 = rt.rm_;
1841 } else if (cond != cc_always) {
1842 r2 = scratch;
1843 li(r2, rt);
1844 }
1845
1846 switch (cond) {
1847 case cc_always:
Steve Block44f0eee2011-05-26 01:26:41 +01001848 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001849 break;
1850 case eq:
1851 bne(rs, r2, 2);
1852 nop();
Steve Block44f0eee2011-05-26 01:26:41 +01001853 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001854 break;
1855 case ne:
1856 beq(rs, r2, 2);
1857 nop();
Steve Block44f0eee2011-05-26 01:26:41 +01001858 bal(offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001859 break;
1860
Ben Murdoch257744e2011-11-30 15:57:28 +00001861 // Signed comparison.
Andrei Popescu31002712010-02-23 13:46:05 +00001862 case greater:
1863 slt(scratch, r2, rs);
1864 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001865 bgezal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001866 break;
1867 case greater_equal:
1868 slt(scratch, rs, r2);
1869 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001870 bltzal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001871 break;
1872 case less:
1873 slt(scratch, rs, r2);
1874 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001875 bgezal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001876 break;
1877 case less_equal:
1878 slt(scratch, r2, rs);
1879 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001880 bltzal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001881 break;
1882
1883 // Unsigned comparison.
1884 case Ugreater:
1885 sltu(scratch, r2, rs);
1886 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001887 bgezal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001888 break;
1889 case Ugreater_equal:
1890 sltu(scratch, rs, r2);
1891 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001892 bltzal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001893 break;
1894 case Uless:
1895 sltu(scratch, rs, r2);
1896 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001897 bgezal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001898 break;
1899 case Uless_equal:
1900 sltu(scratch, r2, rs);
1901 addiu(scratch, scratch, -1);
Steve Block44f0eee2011-05-26 01:26:41 +01001902 bltzal(scratch, offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001903 break;
1904
1905 default:
1906 UNREACHABLE();
1907 }
Steve Block44f0eee2011-05-26 01:26:41 +01001908 // Emit a nop in the branch delay slot if required.
1909 if (bdslot == PROTECT)
1910 nop();
1911}
1912
1913
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001914void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001915 bal(shifted_branch_offset(L, false));
1916
1917 // Emit a nop in the branch delay slot if required.
1918 if (bdslot == PROTECT)
1919 nop();
1920}
1921
1922
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001923void MacroAssembler::BranchAndLinkShort(Label* L, Condition cond, Register rs,
1924 const Operand& rt,
1925 BranchDelaySlot bdslot) {
Steve Block44f0eee2011-05-26 01:26:41 +01001926 BRANCH_ARGS_CHECK(cond, rs, rt);
1927
1928 int32_t offset;
1929 Register r2 = no_reg;
1930 Register scratch = at;
1931 if (rt.is_reg()) {
1932 r2 = rt.rm_;
1933 } else if (cond != cc_always) {
1934 r2 = scratch;
1935 li(r2, rt);
1936 }
1937
1938 switch (cond) {
1939 case cc_always:
1940 offset = shifted_branch_offset(L, false);
1941 bal(offset);
1942 break;
1943 case eq:
1944 bne(rs, r2, 2);
1945 nop();
1946 offset = shifted_branch_offset(L, false);
1947 bal(offset);
1948 break;
1949 case ne:
1950 beq(rs, r2, 2);
1951 nop();
1952 offset = shifted_branch_offset(L, false);
1953 bal(offset);
1954 break;
1955
Ben Murdoch257744e2011-11-30 15:57:28 +00001956 // Signed comparison.
Steve Block44f0eee2011-05-26 01:26:41 +01001957 case greater:
1958 slt(scratch, r2, rs);
1959 addiu(scratch, scratch, -1);
1960 offset = shifted_branch_offset(L, false);
1961 bgezal(scratch, offset);
1962 break;
1963 case greater_equal:
1964 slt(scratch, rs, r2);
1965 addiu(scratch, scratch, -1);
1966 offset = shifted_branch_offset(L, false);
1967 bltzal(scratch, offset);
1968 break;
1969 case less:
1970 slt(scratch, rs, r2);
1971 addiu(scratch, scratch, -1);
1972 offset = shifted_branch_offset(L, false);
1973 bgezal(scratch, offset);
1974 break;
1975 case less_equal:
1976 slt(scratch, r2, rs);
1977 addiu(scratch, scratch, -1);
1978 offset = shifted_branch_offset(L, false);
1979 bltzal(scratch, offset);
1980 break;
1981
1982 // Unsigned comparison.
1983 case Ugreater:
1984 sltu(scratch, r2, rs);
1985 addiu(scratch, scratch, -1);
1986 offset = shifted_branch_offset(L, false);
1987 bgezal(scratch, offset);
1988 break;
1989 case Ugreater_equal:
1990 sltu(scratch, rs, r2);
1991 addiu(scratch, scratch, -1);
1992 offset = shifted_branch_offset(L, false);
1993 bltzal(scratch, offset);
1994 break;
1995 case Uless:
1996 sltu(scratch, rs, r2);
1997 addiu(scratch, scratch, -1);
1998 offset = shifted_branch_offset(L, false);
1999 bgezal(scratch, offset);
2000 break;
2001 case Uless_equal:
2002 sltu(scratch, r2, rs);
2003 addiu(scratch, scratch, -1);
2004 offset = shifted_branch_offset(L, false);
2005 bltzal(scratch, offset);
2006 break;
2007
2008 default:
2009 UNREACHABLE();
2010 }
2011
2012 // Check that offset could actually hold on an int16_t.
2013 ASSERT(is_int16(offset));
2014
2015 // Emit a nop in the branch delay slot if required.
2016 if (bdslot == PROTECT)
2017 nop();
2018}
2019
2020
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002021void MacroAssembler::Jump(Register target,
Steve Block44f0eee2011-05-26 01:26:41 +01002022 Condition cond,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002023 Register rs,
2024 const Operand& rt,
2025 BranchDelaySlot bd) {
2026 BlockTrampolinePoolScope block_trampoline_pool(this);
2027 if (cond == cc_always) {
2028 jr(target);
2029 } else {
2030 BRANCH_ARGS_CHECK(cond, rs, rt);
2031 Branch(2, NegateCondition(cond), rs, rt);
2032 jr(target);
2033 }
2034 // Emit a nop in the branch delay slot if required.
2035 if (bd == PROTECT)
2036 nop();
2037}
2038
2039
2040void MacroAssembler::Jump(intptr_t target,
2041 RelocInfo::Mode rmode,
2042 Condition cond,
2043 Register rs,
2044 const Operand& rt,
2045 BranchDelaySlot bd) {
2046 li(t9, Operand(target, rmode));
Ben Murdoch85b71792012-04-11 18:30:58 +01002047 Jump(t9, cond, rs, rt, bd);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002048}
2049
2050
2051void MacroAssembler::Jump(Address target,
2052 RelocInfo::Mode rmode,
2053 Condition cond,
2054 Register rs,
2055 const Operand& rt,
2056 BranchDelaySlot bd) {
2057 ASSERT(!RelocInfo::IsCodeTarget(rmode));
2058 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
2059}
2060
2061
2062void MacroAssembler::Jump(Handle<Code> code,
2063 RelocInfo::Mode rmode,
2064 Condition cond,
2065 Register rs,
2066 const Operand& rt,
2067 BranchDelaySlot bd) {
2068 ASSERT(RelocInfo::IsCodeTarget(rmode));
2069 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
2070}
2071
2072
2073int MacroAssembler::CallSize(Register target,
2074 Condition cond,
2075 Register rs,
2076 const Operand& rt,
2077 BranchDelaySlot bd) {
2078 int size = 0;
2079
2080 if (cond == cc_always) {
2081 size += 1;
2082 } else {
2083 size += 3;
Steve Block44f0eee2011-05-26 01:26:41 +01002084 }
2085
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002086 if (bd == PROTECT)
2087 size += 1;
Steve Block44f0eee2011-05-26 01:26:41 +01002088
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002089 return size * kInstrSize;
2090}
Steve Block44f0eee2011-05-26 01:26:41 +01002091
Steve Block44f0eee2011-05-26 01:26:41 +01002092
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002093// Note: To call gcc-compiled C code on mips, you must call thru t9.
2094void MacroAssembler::Call(Register target,
2095 Condition cond,
2096 Register rs,
2097 const Operand& rt,
2098 BranchDelaySlot bd) {
2099 BlockTrampolinePoolScope block_trampoline_pool(this);
2100 Label start;
2101 bind(&start);
2102 if (cond == cc_always) {
2103 jalr(target);
2104 } else {
2105 BRANCH_ARGS_CHECK(cond, rs, rt);
2106 Branch(2, NegateCondition(cond), rs, rt);
2107 jalr(target);
Steve Block44f0eee2011-05-26 01:26:41 +01002108 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002109 // Emit a nop in the branch delay slot if required.
2110 if (bd == PROTECT)
2111 nop();
2112
2113 ASSERT_EQ(CallSize(target, cond, rs, rt, bd),
2114 SizeOfCodeGeneratedSince(&start));
2115}
2116
2117
2118int MacroAssembler::CallSize(Address target,
2119 RelocInfo::Mode rmode,
2120 Condition cond,
2121 Register rs,
2122 const Operand& rt,
2123 BranchDelaySlot bd) {
2124 int size = CallSize(t9, cond, rs, rt, bd);
2125 return size + 2 * kInstrSize;
2126}
2127
2128
2129void MacroAssembler::Call(Address target,
2130 RelocInfo::Mode rmode,
2131 Condition cond,
2132 Register rs,
2133 const Operand& rt,
2134 BranchDelaySlot bd) {
2135 BlockTrampolinePoolScope block_trampoline_pool(this);
2136 Label start;
2137 bind(&start);
2138 int32_t target_int = reinterpret_cast<int32_t>(target);
2139 // Must record previous source positions before the
2140 // li() generates a new code target.
2141 positions_recorder()->WriteRecordedPositions();
Ben Murdoch85b71792012-04-11 18:30:58 +01002142 li(t9, Operand(target_int, rmode), true);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002143 Call(t9, cond, rs, rt, bd);
2144 ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd),
2145 SizeOfCodeGeneratedSince(&start));
2146}
2147
2148
2149int MacroAssembler::CallSize(Handle<Code> code,
2150 RelocInfo::Mode rmode,
2151 unsigned ast_id,
2152 Condition cond,
2153 Register rs,
2154 const Operand& rt,
2155 BranchDelaySlot bd) {
2156 return CallSize(reinterpret_cast<Address>(code.location()),
2157 rmode, cond, rs, rt, bd);
2158}
2159
2160
2161void MacroAssembler::Call(Handle<Code> code,
2162 RelocInfo::Mode rmode,
2163 unsigned ast_id,
2164 Condition cond,
2165 Register rs,
2166 const Operand& rt,
2167 BranchDelaySlot bd) {
2168 BlockTrampolinePoolScope block_trampoline_pool(this);
2169 Label start;
2170 bind(&start);
2171 ASSERT(RelocInfo::IsCodeTarget(rmode));
2172 if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
2173 SetRecordedAstId(ast_id);
2174 rmode = RelocInfo::CODE_TARGET_WITH_ID;
2175 }
2176 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
Ben Murdoch85b71792012-04-11 18:30:58 +01002177 ASSERT_EQ(CallSize(code, rmode, ast_id, cond, rs, rt),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002178 SizeOfCodeGeneratedSince(&start));
2179}
2180
2181
2182void MacroAssembler::Ret(Condition cond,
2183 Register rs,
2184 const Operand& rt,
2185 BranchDelaySlot bd) {
2186 Jump(ra, cond, rs, rt, bd);
2187}
2188
2189
2190void MacroAssembler::J(Label* L, BranchDelaySlot bdslot) {
2191 BlockTrampolinePoolScope block_trampoline_pool(this);
2192
2193 uint32_t imm28;
2194 imm28 = jump_address(L);
2195 imm28 &= kImm28Mask;
2196 { BlockGrowBufferScope block_buf_growth(this);
2197 // Buffer growth (and relocation) must be blocked for internal references
2198 // until associated instructions are emitted and available to be patched.
2199 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2200 j(imm28);
2201 }
2202 // Emit a nop in the branch delay slot if required.
2203 if (bdslot == PROTECT)
2204 nop();
2205}
2206
2207
2208void MacroAssembler::Jr(Label* L, BranchDelaySlot bdslot) {
2209 BlockTrampolinePoolScope block_trampoline_pool(this);
2210
2211 uint32_t imm32;
2212 imm32 = jump_address(L);
2213 { BlockGrowBufferScope block_buf_growth(this);
2214 // Buffer growth (and relocation) must be blocked for internal references
2215 // until associated instructions are emitted and available to be patched.
2216 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2217 lui(at, (imm32 & kHiMask) >> kLuiShift);
2218 ori(at, at, (imm32 & kImm16Mask));
2219 }
2220 jr(at);
2221
2222 // Emit a nop in the branch delay slot if required.
2223 if (bdslot == PROTECT)
2224 nop();
2225}
2226
2227
2228void MacroAssembler::Jalr(Label* L, BranchDelaySlot bdslot) {
2229 BlockTrampolinePoolScope block_trampoline_pool(this);
2230
2231 uint32_t imm32;
2232 imm32 = jump_address(L);
2233 { BlockGrowBufferScope block_buf_growth(this);
2234 // Buffer growth (and relocation) must be blocked for internal references
2235 // until associated instructions are emitted and available to be patched.
2236 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2237 lui(at, (imm32 & kHiMask) >> kLuiShift);
2238 ori(at, at, (imm32 & kImm16Mask));
2239 }
2240 jalr(at);
2241
2242 // Emit a nop in the branch delay slot if required.
2243 if (bdslot == PROTECT)
2244 nop();
Steve Block44f0eee2011-05-26 01:26:41 +01002245}
2246
2247
2248void MacroAssembler::DropAndRet(int drop,
2249 Condition cond,
2250 Register r1,
2251 const Operand& r2) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002252 // This is a workaround to make sure only one branch instruction is
2253 // generated. It relies on Drop and Ret not creating branches if
2254 // cond == cc_always.
Steve Block44f0eee2011-05-26 01:26:41 +01002255 Label skip;
2256 if (cond != cc_always) {
2257 Branch(&skip, NegateCondition(cond), r1, r2);
2258 }
2259
2260 Drop(drop);
2261 Ret();
2262
2263 if (cond != cc_always) {
2264 bind(&skip);
2265 }
2266}
2267
2268
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002269void MacroAssembler::Drop(int count,
2270 Condition cond,
2271 Register reg,
2272 const Operand& op) {
2273 if (count <= 0) {
2274 return;
2275 }
2276
2277 Label skip;
2278
2279 if (cond != al) {
2280 Branch(&skip, NegateCondition(cond), reg, op);
2281 }
2282
2283 addiu(sp, sp, count * kPointerSize);
2284
2285 if (cond != al) {
2286 bind(&skip);
2287 }
2288}
2289
2290
2291
Steve Block44f0eee2011-05-26 01:26:41 +01002292void MacroAssembler::Swap(Register reg1,
2293 Register reg2,
2294 Register scratch) {
2295 if (scratch.is(no_reg)) {
2296 Xor(reg1, reg1, Operand(reg2));
2297 Xor(reg2, reg2, Operand(reg1));
2298 Xor(reg1, reg1, Operand(reg2));
2299 } else {
2300 mov(scratch, reg1);
2301 mov(reg1, reg2);
2302 mov(reg2, scratch);
2303 }
Andrei Popescu31002712010-02-23 13:46:05 +00002304}
2305
2306
2307void MacroAssembler::Call(Label* target) {
Steve Block44f0eee2011-05-26 01:26:41 +01002308 BranchAndLink(target);
2309}
2310
2311
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002312void MacroAssembler::Push(Handle<Object> handle) {
2313 li(at, Operand(handle));
2314 push(at);
2315}
2316
2317
Steve Block6ded16b2010-05-10 14:33:55 +01002318#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Block6ded16b2010-05-10 14:33:55 +01002319
Steve Block44f0eee2011-05-26 01:26:41 +01002320void MacroAssembler::DebugBreak() {
Ben Murdoch85b71792012-04-11 18:30:58 +01002321 ASSERT(allow_stub_calls());
2322 mov(a0, zero_reg);
2323 li(a1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01002324 CEntryStub ces(1);
2325 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
2326}
2327
2328#endif // ENABLE_DEBUGGER_SUPPORT
Steve Block6ded16b2010-05-10 14:33:55 +01002329
2330
Andrei Popescu31002712010-02-23 13:46:05 +00002331// ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00002332// Exception handling.
Andrei Popescu31002712010-02-23 13:46:05 +00002333
Ben Murdoch85b71792012-04-11 18:30:58 +01002334void MacroAssembler::PushTryHandler(CodeLocation try_location,
2335 HandlerType type) {
Steve Block6ded16b2010-05-10 14:33:55 +01002336 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002337 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2338 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch85b71792012-04-11 18:30:58 +01002339 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
2340 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
2341 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
2342 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002343
Ben Murdoch85b71792012-04-11 18:30:58 +01002344 // The return address is passed in register ra.
2345 if (try_location == IN_JAVASCRIPT) {
2346 if (type == TRY_CATCH_HANDLER) {
2347 li(t0, Operand(StackHandler::TRY_CATCH));
2348 } else {
2349 li(t0, Operand(StackHandler::TRY_FINALLY));
2350 }
2351 // Save the current handler as the next handler.
2352 li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
2353 lw(t1, MemOperand(t2));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002354
Ben Murdoch85b71792012-04-11 18:30:58 +01002355 addiu(sp, sp, -StackHandlerConstants::kSize);
2356 sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset));
2357 sw(fp, MemOperand(sp, StackHandlerConstants::kFPOffset));
2358 sw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
2359 sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset));
2360 sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
2361
2362 // Link this handler as the new current one.
2363 sw(sp, MemOperand(t2));
2364
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01002365 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01002366 // Must preserve a0-a3, and s0 (argv).
2367 ASSERT(try_location == IN_JS_ENTRY);
2368 // The frame pointer does not point to a JS frame so we save NULL
2369 // for fp. We expect the code throwing an exception to check fp
2370 // before dereferencing it to restore the context.
2371 li(t0, Operand(StackHandler::ENTRY));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002372
Ben Murdoch85b71792012-04-11 18:30:58 +01002373 // Save the current handler as the next handler.
2374 li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
2375 lw(t1, MemOperand(t2));
2376
2377 ASSERT(Smi::FromInt(0) == 0); // Used for no context.
2378
2379 addiu(sp, sp, -StackHandlerConstants::kSize);
2380 sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset));
2381 sw(zero_reg, MemOperand(sp, StackHandlerConstants::kFPOffset));
2382 sw(zero_reg, MemOperand(sp, StackHandlerConstants::kContextOffset));
2383 sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset));
2384 sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
2385
2386 // Link this handler as the new current one.
2387 sw(sp, MemOperand(t2));
2388 }
Andrei Popescu31002712010-02-23 13:46:05 +00002389}
2390
2391
2392void MacroAssembler::PopTryHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002393 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Block44f0eee2011-05-26 01:26:41 +01002394 pop(a1);
2395 Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
Ben Murdoch589d6972011-11-30 16:04:58 +00002396 li(at, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01002397 sw(a1, MemOperand(at));
Andrei Popescu31002712010-02-23 13:46:05 +00002398}
2399
2400
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002401void MacroAssembler::Throw(Register value) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002402 // v0 is expected to hold the exception.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002403 Move(v0, value);
2404
Ben Murdoch85b71792012-04-11 18:30:58 +01002405 // Adjust this code if not the case.
2406 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2407 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
2408 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
2409 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
2410 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
2411 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
2412
2413 // Drop the sp to the top of the handler.
Ben Murdoch589d6972011-11-30 16:04:58 +00002414 li(a3, Operand(ExternalReference(Isolate::kHandlerAddress,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002415 isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00002416 lw(sp, MemOperand(a3));
2417
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002418 // Restore the next handler.
Ben Murdoch257744e2011-11-30 15:57:28 +00002419 pop(a2);
2420 sw(a2, MemOperand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00002421
Ben Murdoch85b71792012-04-11 18:30:58 +01002422 // Restore context and frame pointer, discard state (a3).
2423 MultiPop(a3.bit() | cp.bit() | fp.bit());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002424
2425 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch85b71792012-04-11 18:30:58 +01002426 // (a3 == ENTRY) == (fp == 0) == (cp == 0), so we could test any
2427 // of them.
Ben Murdoch257744e2011-11-30 15:57:28 +00002428 Label done;
Ben Murdoch85b71792012-04-11 18:30:58 +01002429 Branch(&done, eq, fp, Operand(zero_reg));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002430 sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002431 bind(&done);
2432
Ben Murdoch85b71792012-04-11 18:30:58 +01002433#ifdef DEBUG
2434 // When emitting debug_code, set ra as return address for the jump.
2435 // 5 instructions: add: 1, pop: 2, jump: 2.
2436 const int kOffsetRaInstructions = 5;
2437 Label find_ra;
2438
2439 if (emit_debug_code()) {
2440 // Compute ra for the Jump(t9).
2441 const int kOffsetRaBytes = kOffsetRaInstructions * Assembler::kInstrSize;
2442
2443 // This branch-and-link sequence is needed to get the current PC on mips,
2444 // saved to the ra register. Then adjusted for instruction count.
2445 bal(&find_ra); // bal exposes branch-delay.
2446 nop(); // Branch delay slot nop.
2447 bind(&find_ra);
2448 addiu(ra, ra, kOffsetRaBytes);
2449 }
2450#endif
2451
2452 pop(t9); // 2 instructions: lw, add sp.
2453 Jump(t9); // 2 instructions: jr, nop (in delay slot).
2454
2455 if (emit_debug_code()) {
2456 // Make sure that the expected number of instructions were generated.
2457 ASSERT_EQ(kOffsetRaInstructions,
2458 InstructionsGeneratedSince(&find_ra));
2459 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002460}
2461
2462
Ben Murdoch85b71792012-04-11 18:30:58 +01002463void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
2464 Register value) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002465 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002466 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2467 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch85b71792012-04-11 18:30:58 +01002468 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
2469 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
2470 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
2471 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002472
Ben Murdoch85b71792012-04-11 18:30:58 +01002473 // v0 is expected to hold the exception.
2474 Move(v0, value);
2475
2476 // Drop sp to the top stack handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002477 li(a3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
2478 lw(sp, MemOperand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00002479
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002480 // Unwind the handlers until the ENTRY handler is found.
Ben Murdoch85b71792012-04-11 18:30:58 +01002481 Label loop, done;
2482 bind(&loop);
2483 // Load the type of the current stack handler.
2484 const int kStateOffset = StackHandlerConstants::kStateOffset;
2485 lw(a2, MemOperand(sp, kStateOffset));
2486 Branch(&done, eq, a2, Operand(StackHandler::ENTRY));
2487 // Fetch the next handler in the list.
2488 const int kNextOffset = StackHandlerConstants::kNextOffset;
2489 lw(sp, MemOperand(sp, kNextOffset));
2490 jmp(&loop);
2491 bind(&done);
Ben Murdoch257744e2011-11-30 15:57:28 +00002492
Ben Murdoch85b71792012-04-11 18:30:58 +01002493 // Set the top handler address to next handler past the current ENTRY handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002494 pop(a2);
2495 sw(a2, MemOperand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00002496
Ben Murdoch85b71792012-04-11 18:30:58 +01002497 if (type == OUT_OF_MEMORY) {
2498 // Set external caught exception to false.
2499 ExternalReference external_caught(
2500 Isolate::kExternalCaughtExceptionAddress, isolate());
2501 li(a0, Operand(false, RelocInfo::NONE));
2502 li(a2, Operand(external_caught));
2503 sw(a0, MemOperand(a2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002504
Ben Murdoch85b71792012-04-11 18:30:58 +01002505 // Set pending exception and v0 to out of memory exception.
2506 Failure* out_of_memory = Failure::OutOfMemoryException();
2507 li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
2508 li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2509 isolate())));
2510 sw(v0, MemOperand(a2));
2511 }
2512
2513 // Stack layout at this point. See also StackHandlerConstants.
2514 // sp -> state (ENTRY)
2515 // cp
2516 // fp
2517 // ra
2518
2519 // Restore context and frame pointer, discard state (r2).
2520 MultiPop(a2.bit() | cp.bit() | fp.bit());
2521
2522#ifdef DEBUG
2523 // When emitting debug_code, set ra as return address for the jump.
2524 // 5 instructions: add: 1, pop: 2, jump: 2.
2525 const int kOffsetRaInstructions = 5;
2526 Label find_ra;
2527
2528 if (emit_debug_code()) {
2529 // Compute ra for the Jump(t9).
2530 const int kOffsetRaBytes = kOffsetRaInstructions * Assembler::kInstrSize;
2531
2532 // This branch-and-link sequence is needed to get the current PC on mips,
2533 // saved to the ra register. Then adjusted for instruction count.
2534 bal(&find_ra); // bal exposes branch-delay slot.
2535 nop(); // Branch delay slot nop.
2536 bind(&find_ra);
2537 addiu(ra, ra, kOffsetRaBytes);
2538 }
2539#endif
2540 pop(t9); // 2 instructions: lw, add sp.
2541 Jump(t9); // 2 instructions: jr, nop (in delay slot).
2542
2543 if (emit_debug_code()) {
2544 // Make sure that the expected number of instructions were generated.
2545 ASSERT_EQ(kOffsetRaInstructions,
2546 InstructionsGeneratedSince(&find_ra));
2547 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002548}
2549
2550
Steve Block44f0eee2011-05-26 01:26:41 +01002551void MacroAssembler::AllocateInNewSpace(int object_size,
2552 Register result,
2553 Register scratch1,
2554 Register scratch2,
2555 Label* gc_required,
2556 AllocationFlags flags) {
2557 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002558 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002559 // Trash the registers to simulate an allocation failure.
2560 li(result, 0x7091);
2561 li(scratch1, 0x7191);
2562 li(scratch2, 0x7291);
2563 }
2564 jmp(gc_required);
2565 return;
Steve Block6ded16b2010-05-10 14:33:55 +01002566 }
2567
Steve Block44f0eee2011-05-26 01:26:41 +01002568 ASSERT(!result.is(scratch1));
2569 ASSERT(!result.is(scratch2));
2570 ASSERT(!scratch1.is(scratch2));
2571 ASSERT(!scratch1.is(t9));
2572 ASSERT(!scratch2.is(t9));
2573 ASSERT(!result.is(t9));
Steve Block6ded16b2010-05-10 14:33:55 +01002574
Steve Block44f0eee2011-05-26 01:26:41 +01002575 // Make object size into bytes.
2576 if ((flags & SIZE_IN_WORDS) != 0) {
2577 object_size *= kPointerSize;
2578 }
2579 ASSERT_EQ(0, object_size & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +01002580
Steve Block44f0eee2011-05-26 01:26:41 +01002581 // Check relative positions of allocation top and limit addresses.
2582 // ARM adds additional checks to make sure the ldm instruction can be
2583 // used. On MIPS we don't have ldm so we don't need additional checks either.
2584 ExternalReference new_space_allocation_top =
2585 ExternalReference::new_space_allocation_top_address(isolate());
2586 ExternalReference new_space_allocation_limit =
2587 ExternalReference::new_space_allocation_limit_address(isolate());
2588 intptr_t top =
2589 reinterpret_cast<intptr_t>(new_space_allocation_top.address());
2590 intptr_t limit =
2591 reinterpret_cast<intptr_t>(new_space_allocation_limit.address());
2592 ASSERT((limit - top) == kPointerSize);
2593
2594 // Set up allocation top address and object size registers.
2595 Register topaddr = scratch1;
2596 Register obj_size_reg = scratch2;
2597 li(topaddr, Operand(new_space_allocation_top));
2598 li(obj_size_reg, Operand(object_size));
2599
2600 // This code stores a temporary value in t9.
2601 if ((flags & RESULT_CONTAINS_TOP) == 0) {
2602 // Load allocation top into result and allocation limit into t9.
2603 lw(result, MemOperand(topaddr));
2604 lw(t9, MemOperand(topaddr, kPointerSize));
2605 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002606 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002607 // Assert that result actually contains top on entry. t9 is used
2608 // immediately below so this use of t9 does not cause difference with
2609 // respect to register content between debug and release mode.
2610 lw(t9, MemOperand(topaddr));
2611 Check(eq, "Unexpected allocation top", result, Operand(t9));
2612 }
2613 // Load allocation limit into t9. Result already contains allocation top.
2614 lw(t9, MemOperand(topaddr, limit - top));
2615 }
2616
2617 // Calculate new top and bail out if new space is exhausted. Use result
2618 // to calculate the new top.
2619 Addu(scratch2, result, Operand(obj_size_reg));
2620 Branch(gc_required, Ugreater, scratch2, Operand(t9));
2621 sw(scratch2, MemOperand(topaddr));
2622
2623 // Tag object if requested.
2624 if ((flags & TAG_OBJECT) != 0) {
2625 Addu(result, result, Operand(kHeapObjectTag));
2626 }
Steve Block6ded16b2010-05-10 14:33:55 +01002627}
2628
2629
Steve Block44f0eee2011-05-26 01:26:41 +01002630void MacroAssembler::AllocateInNewSpace(Register object_size,
2631 Register result,
2632 Register scratch1,
2633 Register scratch2,
2634 Label* gc_required,
2635 AllocationFlags flags) {
2636 if (!FLAG_inline_new) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002637 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002638 // Trash the registers to simulate an allocation failure.
2639 li(result, 0x7091);
2640 li(scratch1, 0x7191);
2641 li(scratch2, 0x7291);
2642 }
2643 jmp(gc_required);
2644 return;
2645 }
2646
2647 ASSERT(!result.is(scratch1));
2648 ASSERT(!result.is(scratch2));
2649 ASSERT(!scratch1.is(scratch2));
2650 ASSERT(!scratch1.is(t9) && !scratch2.is(t9) && !result.is(t9));
2651
2652 // Check relative positions of allocation top and limit addresses.
2653 // ARM adds additional checks to make sure the ldm instruction can be
2654 // used. On MIPS we don't have ldm so we don't need additional checks either.
2655 ExternalReference new_space_allocation_top =
2656 ExternalReference::new_space_allocation_top_address(isolate());
2657 ExternalReference new_space_allocation_limit =
2658 ExternalReference::new_space_allocation_limit_address(isolate());
2659 intptr_t top =
2660 reinterpret_cast<intptr_t>(new_space_allocation_top.address());
2661 intptr_t limit =
2662 reinterpret_cast<intptr_t>(new_space_allocation_limit.address());
2663 ASSERT((limit - top) == kPointerSize);
2664
2665 // Set up allocation top address and object size registers.
2666 Register topaddr = scratch1;
2667 li(topaddr, Operand(new_space_allocation_top));
2668
2669 // This code stores a temporary value in t9.
2670 if ((flags & RESULT_CONTAINS_TOP) == 0) {
2671 // Load allocation top into result and allocation limit into t9.
2672 lw(result, MemOperand(topaddr));
2673 lw(t9, MemOperand(topaddr, kPointerSize));
2674 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002675 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002676 // Assert that result actually contains top on entry. t9 is used
2677 // immediately below so this use of t9 does not cause difference with
2678 // respect to register content between debug and release mode.
2679 lw(t9, MemOperand(topaddr));
2680 Check(eq, "Unexpected allocation top", result, Operand(t9));
2681 }
2682 // Load allocation limit into t9. Result already contains allocation top.
2683 lw(t9, MemOperand(topaddr, limit - top));
2684 }
2685
2686 // Calculate new top and bail out if new space is exhausted. Use result
2687 // to calculate the new top. Object size may be in words so a shift is
2688 // required to get the number of bytes.
2689 if ((flags & SIZE_IN_WORDS) != 0) {
2690 sll(scratch2, object_size, kPointerSizeLog2);
2691 Addu(scratch2, result, scratch2);
2692 } else {
2693 Addu(scratch2, result, Operand(object_size));
2694 }
2695 Branch(gc_required, Ugreater, scratch2, Operand(t9));
2696
2697 // Update allocation top. result temporarily holds the new top.
Ben Murdoch257744e2011-11-30 15:57:28 +00002698 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002699 And(t9, scratch2, Operand(kObjectAlignmentMask));
2700 Check(eq, "Unaligned allocation in new space", t9, Operand(zero_reg));
2701 }
2702 sw(scratch2, MemOperand(topaddr));
2703
2704 // Tag object if requested.
2705 if ((flags & TAG_OBJECT) != 0) {
2706 Addu(result, result, Operand(kHeapObjectTag));
2707 }
2708}
2709
2710
2711void MacroAssembler::UndoAllocationInNewSpace(Register object,
2712 Register scratch) {
2713 ExternalReference new_space_allocation_top =
2714 ExternalReference::new_space_allocation_top_address(isolate());
2715
2716 // Make sure the object has no tag before resetting top.
2717 And(object, object, Operand(~kHeapObjectTagMask));
2718#ifdef DEBUG
2719 // Check that the object un-allocated is below the current top.
2720 li(scratch, Operand(new_space_allocation_top));
2721 lw(scratch, MemOperand(scratch));
2722 Check(less, "Undo allocation of non allocated memory",
2723 object, Operand(scratch));
2724#endif
2725 // Write the address of the object to un-allocate as the current top.
2726 li(scratch, Operand(new_space_allocation_top));
2727 sw(object, MemOperand(scratch));
2728}
2729
2730
2731void MacroAssembler::AllocateTwoByteString(Register result,
2732 Register length,
2733 Register scratch1,
2734 Register scratch2,
2735 Register scratch3,
2736 Label* gc_required) {
2737 // Calculate the number of bytes needed for the characters in the string while
2738 // observing object alignment.
2739 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2740 sll(scratch1, length, 1); // Length in bytes, not chars.
2741 addiu(scratch1, scratch1,
2742 kObjectAlignmentMask + SeqTwoByteString::kHeaderSize);
2743 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
2744
2745 // Allocate two-byte string in new space.
2746 AllocateInNewSpace(scratch1,
2747 result,
2748 scratch2,
2749 scratch3,
2750 gc_required,
2751 TAG_OBJECT);
2752
2753 // Set the map, length and hash field.
2754 InitializeNewString(result,
2755 length,
2756 Heap::kStringMapRootIndex,
2757 scratch1,
2758 scratch2);
2759}
2760
2761
2762void MacroAssembler::AllocateAsciiString(Register result,
2763 Register length,
2764 Register scratch1,
2765 Register scratch2,
2766 Register scratch3,
2767 Label* gc_required) {
2768 // Calculate the number of bytes needed for the characters in the string
2769 // while observing object alignment.
2770 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
2771 ASSERT(kCharSize == 1);
2772 addiu(scratch1, length, kObjectAlignmentMask + SeqAsciiString::kHeaderSize);
2773 And(scratch1, scratch1, Operand(~kObjectAlignmentMask));
2774
2775 // Allocate ASCII string in new space.
2776 AllocateInNewSpace(scratch1,
2777 result,
2778 scratch2,
2779 scratch3,
2780 gc_required,
2781 TAG_OBJECT);
2782
2783 // Set the map, length and hash field.
2784 InitializeNewString(result,
2785 length,
2786 Heap::kAsciiStringMapRootIndex,
2787 scratch1,
2788 scratch2);
2789}
2790
2791
2792void MacroAssembler::AllocateTwoByteConsString(Register result,
2793 Register length,
2794 Register scratch1,
2795 Register scratch2,
2796 Label* gc_required) {
2797 AllocateInNewSpace(ConsString::kSize,
2798 result,
2799 scratch1,
2800 scratch2,
2801 gc_required,
2802 TAG_OBJECT);
2803 InitializeNewString(result,
2804 length,
2805 Heap::kConsStringMapRootIndex,
2806 scratch1,
2807 scratch2);
2808}
2809
2810
2811void MacroAssembler::AllocateAsciiConsString(Register result,
2812 Register length,
2813 Register scratch1,
2814 Register scratch2,
2815 Label* gc_required) {
2816 AllocateInNewSpace(ConsString::kSize,
2817 result,
2818 scratch1,
2819 scratch2,
2820 gc_required,
2821 TAG_OBJECT);
2822 InitializeNewString(result,
2823 length,
2824 Heap::kConsAsciiStringMapRootIndex,
2825 scratch1,
2826 scratch2);
2827}
2828
2829
Ben Murdoch589d6972011-11-30 16:04:58 +00002830void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2831 Register length,
2832 Register scratch1,
2833 Register scratch2,
2834 Label* gc_required) {
2835 AllocateInNewSpace(SlicedString::kSize,
2836 result,
2837 scratch1,
2838 scratch2,
2839 gc_required,
2840 TAG_OBJECT);
2841
2842 InitializeNewString(result,
2843 length,
2844 Heap::kSlicedStringMapRootIndex,
2845 scratch1,
2846 scratch2);
2847}
2848
2849
2850void MacroAssembler::AllocateAsciiSlicedString(Register result,
2851 Register length,
2852 Register scratch1,
2853 Register scratch2,
2854 Label* gc_required) {
2855 AllocateInNewSpace(SlicedString::kSize,
2856 result,
2857 scratch1,
2858 scratch2,
2859 gc_required,
2860 TAG_OBJECT);
2861
2862 InitializeNewString(result,
2863 length,
2864 Heap::kSlicedAsciiStringMapRootIndex,
2865 scratch1,
2866 scratch2);
2867}
2868
2869
Steve Block44f0eee2011-05-26 01:26:41 +01002870// Allocates a heap number or jumps to the label if the young space is full and
2871// a scavenge is needed.
2872void MacroAssembler::AllocateHeapNumber(Register result,
2873 Register scratch1,
2874 Register scratch2,
2875 Register heap_number_map,
2876 Label* need_gc) {
2877 // Allocate an object in the heap for the heap number and tag it as a heap
2878 // object.
2879 AllocateInNewSpace(HeapNumber::kSize,
2880 result,
2881 scratch1,
2882 scratch2,
2883 need_gc,
2884 TAG_OBJECT);
2885
2886 // Store heap number map in the allocated object.
2887 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2888 sw(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
2889}
2890
2891
2892void MacroAssembler::AllocateHeapNumberWithValue(Register result,
2893 FPURegister value,
2894 Register scratch1,
2895 Register scratch2,
2896 Label* gc_required) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002897 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
2898 AllocateHeapNumber(result, scratch1, scratch2, t8, gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +01002899 sdc1(value, FieldMemOperand(result, HeapNumber::kValueOffset));
2900}
2901
2902
2903// Copies a fixed number of fields of heap objects from src to dst.
2904void MacroAssembler::CopyFields(Register dst,
2905 Register src,
2906 RegList temps,
2907 int field_count) {
2908 ASSERT((temps & dst.bit()) == 0);
2909 ASSERT((temps & src.bit()) == 0);
2910 // Primitive implementation using only one temporary register.
2911
2912 Register tmp = no_reg;
2913 // Find a temp register in temps list.
2914 for (int i = 0; i < kNumRegisters; i++) {
2915 if ((temps & (1 << i)) != 0) {
2916 tmp.code_ = i;
2917 break;
2918 }
2919 }
2920 ASSERT(!tmp.is(no_reg));
2921
2922 for (int i = 0; i < field_count; i++) {
2923 lw(tmp, FieldMemOperand(src, i * kPointerSize));
2924 sw(tmp, FieldMemOperand(dst, i * kPointerSize));
2925 }
2926}
2927
2928
Ben Murdoch257744e2011-11-30 15:57:28 +00002929void MacroAssembler::CopyBytes(Register src,
2930 Register dst,
2931 Register length,
2932 Register scratch) {
2933 Label align_loop, align_loop_1, word_loop, byte_loop, byte_loop_1, done;
2934
2935 // Align src before copying in word size chunks.
2936 bind(&align_loop);
2937 Branch(&done, eq, length, Operand(zero_reg));
2938 bind(&align_loop_1);
2939 And(scratch, src, kPointerSize - 1);
2940 Branch(&word_loop, eq, scratch, Operand(zero_reg));
2941 lbu(scratch, MemOperand(src));
2942 Addu(src, src, 1);
2943 sb(scratch, MemOperand(dst));
2944 Addu(dst, dst, 1);
2945 Subu(length, length, Operand(1));
2946 Branch(&byte_loop_1, ne, length, Operand(zero_reg));
2947
2948 // Copy bytes in word size chunks.
2949 bind(&word_loop);
2950 if (emit_debug_code()) {
2951 And(scratch, src, kPointerSize - 1);
2952 Assert(eq, "Expecting alignment for CopyBytes",
2953 scratch, Operand(zero_reg));
2954 }
2955 Branch(&byte_loop, lt, length, Operand(kPointerSize));
2956 lw(scratch, MemOperand(src));
2957 Addu(src, src, kPointerSize);
2958
2959 // TODO(kalmard) check if this can be optimized to use sw in most cases.
2960 // Can't use unaligned access - copy byte by byte.
2961 sb(scratch, MemOperand(dst, 0));
2962 srl(scratch, scratch, 8);
2963 sb(scratch, MemOperand(dst, 1));
2964 srl(scratch, scratch, 8);
2965 sb(scratch, MemOperand(dst, 2));
2966 srl(scratch, scratch, 8);
2967 sb(scratch, MemOperand(dst, 3));
2968 Addu(dst, dst, 4);
2969
2970 Subu(length, length, Operand(kPointerSize));
2971 Branch(&word_loop);
2972
2973 // Copy the last bytes if any left.
2974 bind(&byte_loop);
2975 Branch(&done, eq, length, Operand(zero_reg));
2976 bind(&byte_loop_1);
2977 lbu(scratch, MemOperand(src));
2978 Addu(src, src, 1);
2979 sb(scratch, MemOperand(dst));
2980 Addu(dst, dst, 1);
2981 Subu(length, length, Operand(1));
2982 Branch(&byte_loop_1, ne, length, Operand(zero_reg));
2983 bind(&done);
2984}
2985
2986
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002987void MacroAssembler::CheckFastElements(Register map,
2988 Register scratch,
2989 Label* fail) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002990 STATIC_ASSERT(FAST_ELEMENTS == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002991 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
2992 Branch(fail, hi, scratch, Operand(Map::kMaximumBitField2FastElementValue));
2993}
2994
2995
Steve Block44f0eee2011-05-26 01:26:41 +01002996void MacroAssembler::CheckMap(Register obj,
2997 Register scratch,
2998 Handle<Map> map,
2999 Label* fail,
Ben Murdoch85b71792012-04-11 18:30:58 +01003000 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003001 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01003002 JumpIfSmi(obj, fail);
3003 }
Ben Murdoch85b71792012-04-11 18:30:58 +01003004 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
3005 li(at, Operand(map));
3006 Branch(fail, ne, scratch, Operand(at));
Steve Block44f0eee2011-05-26 01:26:41 +01003007}
3008
3009
Ben Murdoch257744e2011-11-30 15:57:28 +00003010void MacroAssembler::DispatchMap(Register obj,
3011 Register scratch,
3012 Handle<Map> map,
3013 Handle<Code> success,
3014 SmiCheckType smi_check_type) {
3015 Label fail;
3016 if (smi_check_type == DO_SMI_CHECK) {
3017 JumpIfSmi(obj, &fail);
3018 }
3019 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
3020 Jump(success, RelocInfo::CODE_TARGET, eq, scratch, Operand(map));
3021 bind(&fail);
3022}
3023
3024
Steve Block44f0eee2011-05-26 01:26:41 +01003025void MacroAssembler::CheckMap(Register obj,
3026 Register scratch,
3027 Heap::RootListIndex index,
3028 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00003029 SmiCheckType smi_check_type) {
3030 if (smi_check_type == DO_SMI_CHECK) {
Steve Block44f0eee2011-05-26 01:26:41 +01003031 JumpIfSmi(obj, fail);
3032 }
3033 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
3034 LoadRoot(at, index);
3035 Branch(fail, ne, scratch, Operand(at));
Steve Block6ded16b2010-05-10 14:33:55 +01003036}
3037
3038
Ben Murdoch257744e2011-11-30 15:57:28 +00003039void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) {
3040 CpuFeatures::Scope scope(FPU);
3041 if (IsMipsSoftFloatABI) {
3042 Move(dst, v0, v1);
3043 } else {
3044 Move(dst, f0); // Reg f0 is o32 ABI FP return value.
3045 }
3046}
3047
3048
3049void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg) {
3050 CpuFeatures::Scope scope(FPU);
3051 if (!IsMipsSoftFloatABI) {
3052 Move(f12, dreg);
3053 } else {
3054 Move(a0, a1, dreg);
3055 }
3056}
3057
3058
3059void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg1,
3060 DoubleRegister dreg2) {
3061 CpuFeatures::Scope scope(FPU);
3062 if (!IsMipsSoftFloatABI) {
3063 if (dreg2.is(f12)) {
3064 ASSERT(!dreg1.is(f14));
3065 Move(f14, dreg2);
3066 Move(f12, dreg1);
3067 } else {
3068 Move(f12, dreg1);
3069 Move(f14, dreg2);
3070 }
3071 } else {
3072 Move(a0, a1, dreg1);
3073 Move(a2, a3, dreg2);
3074 }
3075}
3076
3077
3078void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg,
3079 Register reg) {
3080 CpuFeatures::Scope scope(FPU);
3081 if (!IsMipsSoftFloatABI) {
3082 Move(f12, dreg);
3083 Move(a2, reg);
3084 } else {
3085 Move(a2, reg);
3086 Move(a0, a1, dreg);
3087 }
3088}
3089
3090
3091void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
3092 // This macro takes the dst register to make the code more readable
3093 // at the call sites. However, the dst register has to be t1 to
3094 // follow the calling convention which requires the call type to be
3095 // in t1.
3096 ASSERT(dst.is(t1));
3097 if (call_kind == CALL_AS_FUNCTION) {
3098 li(dst, Operand(Smi::FromInt(1)));
3099 } else {
3100 li(dst, Operand(Smi::FromInt(0)));
3101 }
3102}
3103
3104
Steve Block6ded16b2010-05-10 14:33:55 +01003105// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00003106// JavaScript invokes.
Steve Block6ded16b2010-05-10 14:33:55 +01003107
3108void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3109 const ParameterCount& actual,
3110 Handle<Code> code_constant,
3111 Register code_reg,
3112 Label* done,
Steve Block44f0eee2011-05-26 01:26:41 +01003113 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003114 const CallWrapper& call_wrapper,
3115 CallKind call_kind) {
Steve Block6ded16b2010-05-10 14:33:55 +01003116 bool definitely_matches = false;
3117 Label regular_invoke;
3118
3119 // Check whether the expected and actual arguments count match. If not,
3120 // setup registers according to contract with ArgumentsAdaptorTrampoline:
3121 // a0: actual arguments count
3122 // a1: function (passed through to callee)
3123 // a2: expected arguments count
3124 // a3: callee code entry
3125
3126 // The code below is made a lot easier because the calling code already sets
3127 // up actual and expected registers according to the contract if values are
3128 // passed in registers.
3129 ASSERT(actual.is_immediate() || actual.reg().is(a0));
3130 ASSERT(expected.is_immediate() || expected.reg().is(a2));
3131 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(a3));
3132
3133 if (expected.is_immediate()) {
3134 ASSERT(actual.is_immediate());
3135 if (expected.immediate() == actual.immediate()) {
3136 definitely_matches = true;
3137 } else {
3138 li(a0, Operand(actual.immediate()));
3139 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3140 if (expected.immediate() == sentinel) {
3141 // Don't worry about adapting arguments for builtins that
3142 // don't want that done. Skip adaption code by making it look
3143 // like we have a match between expected and actual number of
3144 // arguments.
3145 definitely_matches = true;
3146 } else {
3147 li(a2, Operand(expected.immediate()));
3148 }
3149 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003150 } else if (actual.is_immediate()) {
3151 Branch(&regular_invoke, eq, expected.reg(), Operand(actual.immediate()));
3152 li(a0, Operand(actual.immediate()));
Steve Block6ded16b2010-05-10 14:33:55 +01003153 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003154 Branch(&regular_invoke, eq, expected.reg(), Operand(actual.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003155 }
3156
3157 if (!definitely_matches) {
3158 if (!code_constant.is_null()) {
3159 li(a3, Operand(code_constant));
3160 addiu(a3, a3, Code::kHeaderSize - kHeapObjectTag);
3161 }
3162
Steve Block44f0eee2011-05-26 01:26:41 +01003163 Handle<Code> adaptor =
3164 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Block6ded16b2010-05-10 14:33:55 +01003165 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003166 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch257744e2011-11-30 15:57:28 +00003167 SetCallKind(t1, call_kind);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003168 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00003169 call_wrapper.AfterCall();
Ben Murdoch85b71792012-04-11 18:30:58 +01003170 jmp(done);
Steve Block6ded16b2010-05-10 14:33:55 +01003171 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003172 SetCallKind(t1, call_kind);
Steve Block44f0eee2011-05-26 01:26:41 +01003173 Jump(adaptor, RelocInfo::CODE_TARGET);
Steve Block6ded16b2010-05-10 14:33:55 +01003174 }
3175 bind(&regular_invoke);
3176 }
3177}
3178
Steve Block44f0eee2011-05-26 01:26:41 +01003179
Steve Block6ded16b2010-05-10 14:33:55 +01003180void MacroAssembler::InvokeCode(Register code,
3181 const ParameterCount& expected,
3182 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +01003183 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003184 const CallWrapper& call_wrapper,
3185 CallKind call_kind) {
Steve Block6ded16b2010-05-10 14:33:55 +01003186 Label done;
3187
Ben Murdoch85b71792012-04-11 18:30:58 +01003188 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003189 call_wrapper, call_kind);
Ben Murdoch85b71792012-04-11 18:30:58 +01003190 if (flag == CALL_FUNCTION) {
3191 SetCallKind(t1, call_kind);
3192 Call(code);
3193 } else {
3194 ASSERT(flag == JUMP_FUNCTION);
3195 SetCallKind(t1, call_kind);
3196 Jump(code);
Steve Block6ded16b2010-05-10 14:33:55 +01003197 }
Ben Murdoch85b71792012-04-11 18:30:58 +01003198 // Continue here if InvokePrologue does handle the invocation due to
3199 // mismatched parameter counts.
3200 bind(&done);
Steve Block6ded16b2010-05-10 14:33:55 +01003201}
3202
3203
3204void MacroAssembler::InvokeCode(Handle<Code> code,
3205 const ParameterCount& expected,
3206 const ParameterCount& actual,
3207 RelocInfo::Mode rmode,
Ben Murdoch257744e2011-11-30 15:57:28 +00003208 InvokeFlag flag,
3209 CallKind call_kind) {
Steve Block6ded16b2010-05-10 14:33:55 +01003210 Label done;
3211
Ben Murdoch85b71792012-04-11 18:30:58 +01003212 InvokePrologue(expected, actual, code, no_reg, &done, flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003213 NullCallWrapper(), call_kind);
Ben Murdoch85b71792012-04-11 18:30:58 +01003214 if (flag == CALL_FUNCTION) {
3215 SetCallKind(t1, call_kind);
3216 Call(code, rmode);
3217 } else {
3218 SetCallKind(t1, call_kind);
3219 Jump(code, rmode);
Steve Block6ded16b2010-05-10 14:33:55 +01003220 }
Ben Murdoch85b71792012-04-11 18:30:58 +01003221 // Continue here if InvokePrologue does handle the invocation due to
3222 // mismatched parameter counts.
3223 bind(&done);
Steve Block6ded16b2010-05-10 14:33:55 +01003224}
3225
3226
3227void MacroAssembler::InvokeFunction(Register function,
3228 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +01003229 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00003230 const CallWrapper& call_wrapper,
3231 CallKind call_kind) {
Steve Block6ded16b2010-05-10 14:33:55 +01003232 // Contract with called JS functions requires that function is passed in a1.
3233 ASSERT(function.is(a1));
3234 Register expected_reg = a2;
3235 Register code_reg = a3;
3236
3237 lw(code_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3238 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
3239 lw(expected_reg,
3240 FieldMemOperand(code_reg,
3241 SharedFunctionInfo::kFormalParameterCountOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003242 sra(expected_reg, expected_reg, kSmiTagSize);
3243 lw(code_reg, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003244
3245 ParameterCount expected(expected_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003246 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind);
Steve Block44f0eee2011-05-26 01:26:41 +01003247}
3248
3249
Ben Murdoch85b71792012-04-11 18:30:58 +01003250void MacroAssembler::InvokeFunction(JSFunction* function,
Steve Block44f0eee2011-05-26 01:26:41 +01003251 const ParameterCount& actual,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003252 InvokeFlag flag,
3253 CallKind call_kind) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003254 ASSERT(function->is_compiled());
Steve Block44f0eee2011-05-26 01:26:41 +01003255
3256 // Get the function and setup the context.
Ben Murdoch85b71792012-04-11 18:30:58 +01003257 li(a1, Operand(Handle<JSFunction>(function)));
Steve Block44f0eee2011-05-26 01:26:41 +01003258 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
3259
Ben Murdoch85b71792012-04-11 18:30:58 +01003260 // Invoke the cached code.
3261 Handle<Code> code(function->code());
Steve Block44f0eee2011-05-26 01:26:41 +01003262 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoch85b71792012-04-11 18:30:58 +01003263 if (V8::UseCrankshaft()) {
3264 UNIMPLEMENTED_MIPS();
3265 } else {
3266 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag, call_kind);
3267 }
Steve Block44f0eee2011-05-26 01:26:41 +01003268}
3269
3270
3271void MacroAssembler::IsObjectJSObjectType(Register heap_object,
3272 Register map,
3273 Register scratch,
3274 Label* fail) {
3275 lw(map, FieldMemOperand(heap_object, HeapObject::kMapOffset));
3276 IsInstanceJSObjectType(map, scratch, fail);
3277}
3278
3279
3280void MacroAssembler::IsInstanceJSObjectType(Register map,
3281 Register scratch,
3282 Label* fail) {
3283 lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003284 Branch(fail, lt, scratch, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3285 Branch(fail, gt, scratch, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block44f0eee2011-05-26 01:26:41 +01003286}
3287
3288
3289void MacroAssembler::IsObjectJSStringType(Register object,
3290 Register scratch,
3291 Label* fail) {
3292 ASSERT(kNotStringTag != 0);
3293
3294 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3295 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3296 And(scratch, scratch, Operand(kIsNotStringMask));
3297 Branch(fail, ne, scratch, Operand(zero_reg));
Steve Block6ded16b2010-05-10 14:33:55 +01003298}
3299
3300
3301// ---------------------------------------------------------------------------
3302// Support functions.
3303
Steve Block44f0eee2011-05-26 01:26:41 +01003304
3305void MacroAssembler::TryGetFunctionPrototype(Register function,
3306 Register result,
3307 Register scratch,
Ben Murdoch85b71792012-04-11 18:30:58 +01003308 Label* miss) {
Steve Block44f0eee2011-05-26 01:26:41 +01003309 // Check that the receiver isn't a smi.
3310 JumpIfSmi(function, miss);
3311
3312 // Check that the function really is a function. Load map into result reg.
3313 GetObjectType(function, result, scratch);
3314 Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
3315
3316 // Make sure that the function has an instance prototype.
3317 Label non_instance;
3318 lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
3319 And(scratch, scratch, Operand(1 << Map::kHasNonInstancePrototype));
3320 Branch(&non_instance, ne, scratch, Operand(zero_reg));
3321
3322 // Get the prototype or initial map from the function.
3323 lw(result,
3324 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
3325
3326 // If the prototype or initial map is the hole, don't return it and
3327 // simply miss the cache instead. This will allow us to allocate a
3328 // prototype object on-demand in the runtime system.
3329 LoadRoot(t8, Heap::kTheHoleValueRootIndex);
3330 Branch(miss, eq, result, Operand(t8));
3331
3332 // If the function does not have an initial map, we're done.
3333 Label done;
3334 GetObjectType(result, scratch, scratch);
3335 Branch(&done, ne, scratch, Operand(MAP_TYPE));
3336
3337 // Get the prototype from the initial map.
3338 lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
3339 jmp(&done);
3340
3341 // Non-instance prototype: Fetch prototype from constructor field
3342 // in initial map.
3343 bind(&non_instance);
3344 lw(result, FieldMemOperand(result, Map::kConstructorOffset));
3345
3346 // All done.
3347 bind(&done);
3348}
Steve Block6ded16b2010-05-10 14:33:55 +01003349
3350
Steve Block44f0eee2011-05-26 01:26:41 +01003351void MacroAssembler::GetObjectType(Register object,
3352 Register map,
3353 Register type_reg) {
3354 lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
3355 lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3356}
Steve Block6ded16b2010-05-10 14:33:55 +01003357
3358
3359// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00003360// Runtime calls.
Steve Block6ded16b2010-05-10 14:33:55 +01003361
Ben Murdoch85b71792012-04-11 18:30:58 +01003362void MacroAssembler::CallStub(CodeStub* stub, Condition cond,
3363 Register r1, const Operand& r2) {
3364 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3365 Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond, r1, r2);
3366}
3367
3368
3369MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond,
3370 Register r1, const Operand& r2) {
3371 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3372 Object* result;
3373 { MaybeObject* maybe_result = stub->TryGetCode();
3374 if (!maybe_result->ToObject(&result)) return maybe_result;
3375 }
3376 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET,
3377 kNoASTId, cond, r1, r2);
3378 return result;
Andrei Popescu31002712010-02-23 13:46:05 +00003379}
3380
3381
Steve Block44f0eee2011-05-26 01:26:41 +01003382void MacroAssembler::TailCallStub(CodeStub* stub) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003383 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
Steve Block44f0eee2011-05-26 01:26:41 +01003384 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
Andrei Popescu31002712010-02-23 13:46:05 +00003385}
3386
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003387
Ben Murdoch85b71792012-04-11 18:30:58 +01003388MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub,
3389 Condition cond,
3390 Register r1,
3391 const Operand& r2) {
3392 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3393 Object* result;
3394 { MaybeObject* maybe_result = stub->TryGetCode();
3395 if (!maybe_result->ToObject(&result)) return maybe_result;
3396 }
3397 Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2);
3398 return result;
3399}
3400
3401
Ben Murdoch257744e2011-11-30 15:57:28 +00003402static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
3403 return ref0.address() - ref1.address();
3404}
3405
3406
Ben Murdoch85b71792012-04-11 18:30:58 +01003407MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
3408 ExternalReference function, int stack_space) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003409 ExternalReference next_address =
3410 ExternalReference::handle_scope_next_address();
3411 const int kNextOffset = 0;
3412 const int kLimitOffset = AddressOffset(
3413 ExternalReference::handle_scope_limit_address(),
3414 next_address);
3415 const int kLevelOffset = AddressOffset(
3416 ExternalReference::handle_scope_level_address(),
3417 next_address);
3418
3419 // Allocate HandleScope in callee-save registers.
3420 li(s3, Operand(next_address));
3421 lw(s0, MemOperand(s3, kNextOffset));
3422 lw(s1, MemOperand(s3, kLimitOffset));
3423 lw(s2, MemOperand(s3, kLevelOffset));
3424 Addu(s2, s2, Operand(1));
3425 sw(s2, MemOperand(s3, kLevelOffset));
3426
3427 // The O32 ABI requires us to pass a pointer in a0 where the returned struct
3428 // (4 bytes) will be placed. This is also built into the Simulator.
3429 // Set up the pointer to the returned value (a0). It was allocated in
3430 // EnterExitFrame.
3431 addiu(a0, fp, ExitFrameConstants::kStackSpaceOffset);
3432
3433 // Native call returns to the DirectCEntry stub which redirects to the
3434 // return address pushed on stack (could have moved after GC).
3435 // DirectCEntry stub itself is generated early and never moves.
3436 DirectCEntryStub stub;
3437 stub.GenerateCall(this, function);
3438
3439 // As mentioned above, on MIPS a pointer is returned - we need to dereference
3440 // it to get the actual return value (which is also a pointer).
3441 lw(v0, MemOperand(v0));
3442
3443 Label promote_scheduled_exception;
3444 Label delete_allocated_handles;
3445 Label leave_exit_frame;
3446
3447 // If result is non-zero, dereference to get the result value
3448 // otherwise set it to undefined.
3449 Label skip;
3450 LoadRoot(a0, Heap::kUndefinedValueRootIndex);
3451 Branch(&skip, eq, v0, Operand(zero_reg));
3452 lw(a0, MemOperand(v0));
3453 bind(&skip);
3454 mov(v0, a0);
3455
3456 // No more valid handles (the result handle was the last one). Restore
3457 // previous handle scope.
3458 sw(s0, MemOperand(s3, kNextOffset));
3459 if (emit_debug_code()) {
3460 lw(a1, MemOperand(s3, kLevelOffset));
3461 Check(eq, "Unexpected level after return from api call", a1, Operand(s2));
3462 }
3463 Subu(s2, s2, Operand(1));
3464 sw(s2, MemOperand(s3, kLevelOffset));
3465 lw(at, MemOperand(s3, kLimitOffset));
3466 Branch(&delete_allocated_handles, ne, s1, Operand(at));
3467
3468 // Check if the function scheduled an exception.
3469 bind(&leave_exit_frame);
3470 LoadRoot(t0, Heap::kTheHoleValueRootIndex);
3471 li(at, Operand(ExternalReference::scheduled_exception_address(isolate())));
3472 lw(t1, MemOperand(at));
3473 Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
3474 li(s0, Operand(stack_space));
Ben Murdoch85b71792012-04-11 18:30:58 +01003475 LeaveExitFrame(false, s0);
3476 Ret();
Ben Murdoch257744e2011-11-30 15:57:28 +00003477
3478 bind(&promote_scheduled_exception);
Ben Murdoch85b71792012-04-11 18:30:58 +01003479 MaybeObject* result = TryTailCallExternalReference(
3480 ExternalReference(Runtime::kPromoteScheduledException, isolate()), 0, 1);
3481 if (result->IsFailure()) {
3482 return result;
3483 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003484
3485 // HandleScope limit has changed. Delete allocated extensions.
3486 bind(&delete_allocated_handles);
3487 sw(s1, MemOperand(s3, kLimitOffset));
3488 mov(s0, v0);
3489 mov(a0, v0);
3490 PrepareCallCFunction(1, s1);
3491 li(a0, Operand(ExternalReference::isolate_address()));
3492 CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()),
3493 1);
3494 mov(v0, s0);
3495 jmp(&leave_exit_frame);
3496
Ben Murdoch85b71792012-04-11 18:30:58 +01003497 return result;
Ben Murdoch257744e2011-11-30 15:57:28 +00003498}
3499
Andrei Popescu31002712010-02-23 13:46:05 +00003500
Steve Block6ded16b2010-05-10 14:33:55 +01003501void MacroAssembler::IllegalOperation(int num_arguments) {
3502 if (num_arguments > 0) {
3503 addiu(sp, sp, num_arguments * kPointerSize);
3504 }
3505 LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3506}
3507
3508
Steve Block44f0eee2011-05-26 01:26:41 +01003509void MacroAssembler::IndexFromHash(Register hash,
3510 Register index) {
3511 // If the hash field contains an array index pick it out. The assert checks
3512 // that the constants for the maximum number of digits for an array index
3513 // cached in the hash field and the number of bits reserved for it does not
3514 // conflict.
3515 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
3516 (1 << String::kArrayIndexValueBits));
3517 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
3518 // the low kHashShift bits.
3519 STATIC_ASSERT(kSmiTag == 0);
3520 Ext(hash, hash, String::kHashShift, String::kArrayIndexValueBits);
3521 sll(index, hash, kSmiTagSize);
3522}
3523
3524
3525void MacroAssembler::ObjectToDoubleFPURegister(Register object,
3526 FPURegister result,
3527 Register scratch1,
3528 Register scratch2,
3529 Register heap_number_map,
3530 Label* not_number,
3531 ObjectToDoubleFlags flags) {
3532 Label done;
3533 if ((flags & OBJECT_NOT_SMI) == 0) {
3534 Label not_smi;
3535 JumpIfNotSmi(object, &not_smi);
3536 // Remove smi tag and convert to double.
3537 sra(scratch1, object, kSmiTagSize);
3538 mtc1(scratch1, result);
3539 cvt_d_w(result, result);
3540 Branch(&done);
3541 bind(&not_smi);
3542 }
3543 // Check for heap number and load double value from it.
3544 lw(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
3545 Branch(not_number, ne, scratch1, Operand(heap_number_map));
3546
3547 if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
3548 // If exponent is all ones the number is either a NaN or +/-Infinity.
3549 Register exponent = scratch1;
3550 Register mask_reg = scratch2;
3551 lw(exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
3552 li(mask_reg, HeapNumber::kExponentMask);
3553
3554 And(exponent, exponent, mask_reg);
3555 Branch(not_number, eq, exponent, Operand(mask_reg));
3556 }
3557 ldc1(result, FieldMemOperand(object, HeapNumber::kValueOffset));
3558 bind(&done);
3559}
3560
3561
Steve Block44f0eee2011-05-26 01:26:41 +01003562void MacroAssembler::SmiToDoubleFPURegister(Register smi,
3563 FPURegister value,
3564 Register scratch1) {
3565 sra(scratch1, smi, kSmiTagSize);
3566 mtc1(scratch1, value);
3567 cvt_d_w(value, value);
3568}
3569
3570
Ben Murdoch257744e2011-11-30 15:57:28 +00003571void MacroAssembler::AdduAndCheckForOverflow(Register dst,
3572 Register left,
3573 Register right,
3574 Register overflow_dst,
3575 Register scratch) {
3576 ASSERT(!dst.is(overflow_dst));
3577 ASSERT(!dst.is(scratch));
3578 ASSERT(!overflow_dst.is(scratch));
3579 ASSERT(!overflow_dst.is(left));
3580 ASSERT(!overflow_dst.is(right));
Ben Murdoch85b71792012-04-11 18:30:58 +01003581 ASSERT(!left.is(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00003582
Ben Murdoch257744e2011-11-30 15:57:28 +00003583 if (dst.is(left)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003584 mov(scratch, left); // Preserve left.
3585 addu(dst, left, right); // Left is overwritten.
3586 xor_(scratch, dst, scratch); // Original left.
3587 xor_(overflow_dst, dst, right);
3588 and_(overflow_dst, overflow_dst, scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003589 } else if (dst.is(right)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003590 mov(scratch, right); // Preserve right.
3591 addu(dst, left, right); // Right is overwritten.
3592 xor_(scratch, dst, scratch); // Original right.
3593 xor_(overflow_dst, dst, left);
3594 and_(overflow_dst, overflow_dst, scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003595 } else {
3596 addu(dst, left, right);
3597 xor_(overflow_dst, dst, left);
3598 xor_(scratch, dst, right);
3599 and_(overflow_dst, scratch, overflow_dst);
3600 }
3601}
3602
3603
3604void MacroAssembler::SubuAndCheckForOverflow(Register dst,
3605 Register left,
3606 Register right,
3607 Register overflow_dst,
3608 Register scratch) {
3609 ASSERT(!dst.is(overflow_dst));
3610 ASSERT(!dst.is(scratch));
3611 ASSERT(!overflow_dst.is(scratch));
3612 ASSERT(!overflow_dst.is(left));
3613 ASSERT(!overflow_dst.is(right));
Ben Murdoch85b71792012-04-11 18:30:58 +01003614 ASSERT(!left.is(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00003615 ASSERT(!scratch.is(left));
3616 ASSERT(!scratch.is(right));
3617
Ben Murdoch257744e2011-11-30 15:57:28 +00003618 if (dst.is(left)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003619 mov(scratch, left); // Preserve left.
3620 subu(dst, left, right); // Left is overwritten.
3621 xor_(overflow_dst, dst, scratch); // scratch is original left.
3622 xor_(scratch, scratch, right); // scratch is original left.
3623 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003624 } else if (dst.is(right)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003625 mov(scratch, right); // Preserve right.
3626 subu(dst, left, right); // Right is overwritten.
3627 xor_(overflow_dst, dst, left);
3628 xor_(scratch, left, scratch); // Original right.
3629 and_(overflow_dst, scratch, overflow_dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003630 } else {
3631 subu(dst, left, right);
3632 xor_(overflow_dst, dst, left);
3633 xor_(scratch, left, right);
3634 and_(overflow_dst, scratch, overflow_dst);
3635 }
3636}
3637
3638
Steve Block44f0eee2011-05-26 01:26:41 +01003639void MacroAssembler::CallRuntime(const Runtime::Function* f,
3640 int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01003641 // All parameters are on the stack. v0 has the return value after call.
3642
3643 // If the expected number of arguments of the runtime function is
3644 // constant, we check that the actual number of arguments match the
3645 // expectation.
3646 if (f->nargs >= 0 && f->nargs != num_arguments) {
3647 IllegalOperation(num_arguments);
3648 return;
3649 }
3650
3651 // TODO(1236192): Most runtime routines don't need the number of
3652 // arguments passed in because it is constant. At some point we
3653 // should remove this need and make the runtime routine entry code
3654 // smarter.
Ben Murdoch85b71792012-04-11 18:30:58 +01003655 li(a0, num_arguments);
3656 li(a1, Operand(ExternalReference(f, isolate())));
Steve Block6ded16b2010-05-10 14:33:55 +01003657 CEntryStub stub(1);
3658 CallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +00003659}
3660
3661
Steve Block44f0eee2011-05-26 01:26:41 +01003662void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
3663 const Runtime::Function* function = Runtime::FunctionForId(id);
Ben Murdoch85b71792012-04-11 18:30:58 +01003664 li(a0, Operand(function->nargs));
3665 li(a1, Operand(ExternalReference(function, isolate())));
3666 CEntryStub stub(1);
3667 stub.SaveDoubles();
Steve Block44f0eee2011-05-26 01:26:41 +01003668 CallStub(&stub);
3669}
3670
3671
Andrei Popescu31002712010-02-23 13:46:05 +00003672void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
Steve Block6ded16b2010-05-10 14:33:55 +01003673 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
3674}
3675
3676
Steve Block44f0eee2011-05-26 01:26:41 +01003677void MacroAssembler::CallExternalReference(const ExternalReference& ext,
Ben Murdoch85b71792012-04-11 18:30:58 +01003678 int num_arguments) {
3679 li(a0, Operand(num_arguments));
3680 li(a1, Operand(ext));
Steve Block44f0eee2011-05-26 01:26:41 +01003681
3682 CEntryStub stub(1);
Ben Murdoch85b71792012-04-11 18:30:58 +01003683 CallStub(&stub);
Steve Block44f0eee2011-05-26 01:26:41 +01003684}
3685
3686
Steve Block6ded16b2010-05-10 14:33:55 +01003687void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
3688 int num_arguments,
3689 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01003690 // TODO(1236192): Most runtime routines don't need the number of
3691 // arguments passed in because it is constant. At some point we
3692 // should remove this need and make the runtime routine entry code
3693 // smarter.
Ben Murdoch85b71792012-04-11 18:30:58 +01003694 li(a0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01003695 JumpToExternalReference(ext);
Andrei Popescu31002712010-02-23 13:46:05 +00003696}
3697
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003698
Ben Murdoch85b71792012-04-11 18:30:58 +01003699MaybeObject* MacroAssembler::TryTailCallExternalReference(
3700 const ExternalReference& ext, int num_arguments, int result_size) {
3701 // TODO(1236192): Most runtime routines don't need the number of
3702 // arguments passed in because it is constant. At some point we
3703 // should remove this need and make the runtime routine entry code
3704 // smarter.
3705 li(a0, num_arguments);
3706 return TryJumpToExternalReference(ext);
3707}
3708
3709
Steve Block6ded16b2010-05-10 14:33:55 +01003710void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
Andrei Popescu31002712010-02-23 13:46:05 +00003711 int num_arguments,
3712 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01003713 TailCallExternalReference(ExternalReference(fid, isolate()),
3714 num_arguments,
3715 result_size);
Andrei Popescu31002712010-02-23 13:46:05 +00003716}
3717
3718
Ben Murdoch85b71792012-04-11 18:30:58 +01003719void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
3720 li(a1, Operand(builtin));
Steve Block44f0eee2011-05-26 01:26:41 +01003721 CEntryStub stub(1);
Ben Murdoch85b71792012-04-11 18:30:58 +01003722 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3723}
3724
3725
3726MaybeObject* MacroAssembler::TryJumpToExternalReference(
3727 const ExternalReference& builtin) {
3728 li(a1, Operand(builtin));
3729 CEntryStub stub(1);
3730 return TryTailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +00003731}
3732
3733
3734void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
Ben Murdoch257744e2011-11-30 15:57:28 +00003735 InvokeFlag flag,
3736 const CallWrapper& call_wrapper) {
Steve Block44f0eee2011-05-26 01:26:41 +01003737 GetBuiltinEntry(t9, id);
Ben Murdoch257744e2011-11-30 15:57:28 +00003738 if (flag == CALL_FUNCTION) {
3739 call_wrapper.BeforeCall(CallSize(t9));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003740 SetCallKind(t1, CALL_AS_METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +01003741 Call(t9);
Ben Murdoch257744e2011-11-30 15:57:28 +00003742 call_wrapper.AfterCall();
Steve Block44f0eee2011-05-26 01:26:41 +01003743 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003744 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003745 SetCallKind(t1, CALL_AS_METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +01003746 Jump(t9);
3747 }
3748}
3749
3750
3751void MacroAssembler::GetBuiltinFunction(Register target,
3752 Builtins::JavaScript id) {
3753 // Load the builtins object into target register.
3754 lw(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
3755 lw(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
3756 // Load the JavaScript builtin function from the builtins object.
3757 lw(target, FieldMemOperand(target,
3758 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
Andrei Popescu31002712010-02-23 13:46:05 +00003759}
3760
3761
3762void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block44f0eee2011-05-26 01:26:41 +01003763 ASSERT(!target.is(a1));
3764 GetBuiltinFunction(a1, id);
3765 // Load the code entry point from the builtins object.
3766 lw(target, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Andrei Popescu31002712010-02-23 13:46:05 +00003767}
3768
3769
3770void MacroAssembler::SetCounter(StatsCounter* counter, int value,
3771 Register scratch1, Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +01003772 if (FLAG_native_code_counters && counter->Enabled()) {
3773 li(scratch1, Operand(value));
3774 li(scratch2, Operand(ExternalReference(counter)));
3775 sw(scratch1, MemOperand(scratch2));
3776 }
Andrei Popescu31002712010-02-23 13:46:05 +00003777}
3778
3779
3780void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
3781 Register scratch1, Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +01003782 ASSERT(value > 0);
3783 if (FLAG_native_code_counters && counter->Enabled()) {
3784 li(scratch2, Operand(ExternalReference(counter)));
3785 lw(scratch1, MemOperand(scratch2));
3786 Addu(scratch1, scratch1, Operand(value));
3787 sw(scratch1, MemOperand(scratch2));
3788 }
Andrei Popescu31002712010-02-23 13:46:05 +00003789}
3790
3791
3792void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
3793 Register scratch1, Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +01003794 ASSERT(value > 0);
3795 if (FLAG_native_code_counters && counter->Enabled()) {
3796 li(scratch2, Operand(ExternalReference(counter)));
3797 lw(scratch1, MemOperand(scratch2));
3798 Subu(scratch1, scratch1, Operand(value));
3799 sw(scratch1, MemOperand(scratch2));
3800 }
Andrei Popescu31002712010-02-23 13:46:05 +00003801}
3802
3803
Steve Block6ded16b2010-05-10 14:33:55 +01003804// -----------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00003805// Debugging.
Andrei Popescu31002712010-02-23 13:46:05 +00003806
3807void MacroAssembler::Assert(Condition cc, const char* msg,
3808 Register rs, Operand rt) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003809 if (emit_debug_code())
Steve Block44f0eee2011-05-26 01:26:41 +01003810 Check(cc, msg, rs, rt);
3811}
3812
3813
3814void MacroAssembler::AssertRegisterIsRoot(Register reg,
3815 Heap::RootListIndex index) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003816 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003817 LoadRoot(at, index);
3818 Check(eq, "Register did not match expected root", reg, Operand(at));
3819 }
3820}
3821
3822
3823void MacroAssembler::AssertFastElements(Register elements) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003824 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003825 ASSERT(!elements.is(at));
3826 Label ok;
Ben Murdoch257744e2011-11-30 15:57:28 +00003827 push(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01003828 lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
3829 LoadRoot(at, Heap::kFixedArrayMapRootIndex);
3830 Branch(&ok, eq, elements, Operand(at));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003831 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
3832 Branch(&ok, eq, elements, Operand(at));
Steve Block44f0eee2011-05-26 01:26:41 +01003833 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
3834 Branch(&ok, eq, elements, Operand(at));
3835 Abort("JSObject with fast elements map has slow elements");
3836 bind(&ok);
Ben Murdoch257744e2011-11-30 15:57:28 +00003837 pop(elements);
Steve Block44f0eee2011-05-26 01:26:41 +01003838 }
Andrei Popescu31002712010-02-23 13:46:05 +00003839}
3840
3841
3842void MacroAssembler::Check(Condition cc, const char* msg,
3843 Register rs, Operand rt) {
Steve Block44f0eee2011-05-26 01:26:41 +01003844 Label L;
3845 Branch(&L, cc, rs, rt);
3846 Abort(msg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003847 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01003848 bind(&L);
Andrei Popescu31002712010-02-23 13:46:05 +00003849}
3850
3851
3852void MacroAssembler::Abort(const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +01003853 Label abort_start;
3854 bind(&abort_start);
3855 // We want to pass the msg string like a smi to avoid GC
3856 // problems, however msg is not guaranteed to be aligned
3857 // properly. Instead, we pass an aligned pointer that is
3858 // a proper v8 smi, but also pass the alignment difference
3859 // from the real pointer as a smi.
3860 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
3861 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
3862 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
3863#ifdef DEBUG
3864 if (msg != NULL) {
3865 RecordComment("Abort message: ");
3866 RecordComment(msg);
3867 }
3868#endif
Ben Murdoch85b71792012-04-11 18:30:58 +01003869 // Disable stub call restrictions to always allow calls to abort.
3870 AllowStubCallsScope allow_scope(this, true);
Steve Block44f0eee2011-05-26 01:26:41 +01003871
3872 li(a0, Operand(p0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003873 push(a0);
Steve Block44f0eee2011-05-26 01:26:41 +01003874 li(a0, Operand(Smi::FromInt(p1 - p0)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003875 push(a0);
Ben Murdoch85b71792012-04-11 18:30:58 +01003876 CallRuntime(Runtime::kAbort, 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003877 // Will not return here.
Steve Block44f0eee2011-05-26 01:26:41 +01003878 if (is_trampoline_pool_blocked()) {
3879 // If the calling code cares about the exact number of
3880 // instructions generated, we insert padding here to keep the size
3881 // of the Abort macro constant.
3882 // Currently in debug mode with debug_code enabled the number of
3883 // generated instructions is 14, so we use this as a maximum value.
3884 static const int kExpectedAbortInstructions = 14;
3885 int abort_instructions = InstructionsGeneratedSince(&abort_start);
3886 ASSERT(abort_instructions <= kExpectedAbortInstructions);
3887 while (abort_instructions++ < kExpectedAbortInstructions) {
3888 nop();
3889 }
3890 }
3891}
3892
3893
3894void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
3895 if (context_chain_length > 0) {
3896 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003897 lw(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01003898 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003899 lw(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +01003900 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003901 } else {
3902 // Slot is in the current function context. Move it into the
3903 // destination register in case we store into it (the write barrier
3904 // cannot be allowed to destroy the context in esi).
3905 Move(dst, cp);
3906 }
Steve Block44f0eee2011-05-26 01:26:41 +01003907}
3908
3909
3910void MacroAssembler::LoadGlobalFunction(int index, Register function) {
3911 // Load the global or builtins object from the current context.
3912 lw(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
3913 // Load the global context from the global or builtins object.
3914 lw(function, FieldMemOperand(function,
3915 GlobalObject::kGlobalContextOffset));
3916 // Load the function from the global context.
3917 lw(function, MemOperand(function, Context::SlotOffset(index)));
3918}
3919
3920
3921void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
3922 Register map,
3923 Register scratch) {
3924 // Load the initial map. The global functions all have initial maps.
3925 lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003926 if (emit_debug_code()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003927 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00003928 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Steve Block44f0eee2011-05-26 01:26:41 +01003929 Branch(&ok);
3930 bind(&fail);
3931 Abort("Global functions must have initial map");
3932 bind(&ok);
3933 }
Andrei Popescu31002712010-02-23 13:46:05 +00003934}
3935
Steve Block6ded16b2010-05-10 14:33:55 +01003936
3937void MacroAssembler::EnterFrame(StackFrame::Type type) {
3938 addiu(sp, sp, -5 * kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01003939 li(t8, Operand(Smi::FromInt(type)));
Ben Murdoch85b71792012-04-11 18:30:58 +01003940 li(t9, Operand(CodeObject()));
Steve Block6ded16b2010-05-10 14:33:55 +01003941 sw(ra, MemOperand(sp, 4 * kPointerSize));
3942 sw(fp, MemOperand(sp, 3 * kPointerSize));
3943 sw(cp, MemOperand(sp, 2 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003944 sw(t8, MemOperand(sp, 1 * kPointerSize));
3945 sw(t9, MemOperand(sp, 0 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01003946 addiu(fp, sp, 3 * kPointerSize);
3947}
3948
3949
3950void MacroAssembler::LeaveFrame(StackFrame::Type type) {
3951 mov(sp, fp);
3952 lw(fp, MemOperand(sp, 0 * kPointerSize));
3953 lw(ra, MemOperand(sp, 1 * kPointerSize));
3954 addiu(sp, sp, 2 * kPointerSize);
3955}
3956
3957
Ben Murdoch257744e2011-11-30 15:57:28 +00003958void MacroAssembler::EnterExitFrame(bool save_doubles,
3959 int stack_space) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003960 // Setup the frame structure on the stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00003961 STATIC_ASSERT(2 * kPointerSize == ExitFrameConstants::kCallerSPDisplacement);
3962 STATIC_ASSERT(1 * kPointerSize == ExitFrameConstants::kCallerPCOffset);
3963 STATIC_ASSERT(0 * kPointerSize == ExitFrameConstants::kCallerFPOffset);
Steve Block6ded16b2010-05-10 14:33:55 +01003964
Ben Murdoch257744e2011-11-30 15:57:28 +00003965 // This is how the stack will look:
3966 // fp + 2 (==kCallerSPDisplacement) - old stack's end
3967 // [fp + 1 (==kCallerPCOffset)] - saved old ra
3968 // [fp + 0 (==kCallerFPOffset)] - saved old fp
3969 // [fp - 1 (==kSPOffset)] - sp of the called function
3970 // [fp - 2 (==kCodeOffset)] - CodeObject
3971 // fp - (2 + stack_space + alignment) == sp == [fp - kSPOffset] - top of the
3972 // new stack (will contain saved ra)
Steve Block6ded16b2010-05-10 14:33:55 +01003973
3974 // Save registers.
Ben Murdoch257744e2011-11-30 15:57:28 +00003975 addiu(sp, sp, -4 * kPointerSize);
3976 sw(ra, MemOperand(sp, 3 * kPointerSize));
3977 sw(fp, MemOperand(sp, 2 * kPointerSize));
Ben Murdoch85b71792012-04-11 18:30:58 +01003978 addiu(fp, sp, 2 * kPointerSize); // Setup new frame pointer.
Steve Block6ded16b2010-05-10 14:33:55 +01003979
Ben Murdoch257744e2011-11-30 15:57:28 +00003980 if (emit_debug_code()) {
3981 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset));
3982 }
3983
Ben Murdoch85b71792012-04-11 18:30:58 +01003984 li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00003985 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003986
3987 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00003988 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01003989 sw(fp, MemOperand(t8));
Ben Murdoch589d6972011-11-30 16:04:58 +00003990 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01003991 sw(cp, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01003992
Ben Murdoch257744e2011-11-30 15:57:28 +00003993 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
Steve Block44f0eee2011-05-26 01:26:41 +01003994 if (save_doubles) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003995 // The stack must be allign to 0 modulo 8 for stores with sdc1.
Steve Block44f0eee2011-05-26 01:26:41 +01003996 ASSERT(kDoubleSize == frame_alignment);
Ben Murdoch257744e2011-11-30 15:57:28 +00003997 if (frame_alignment > 0) {
3998 ASSERT(IsPowerOf2(frame_alignment));
3999 And(sp, sp, Operand(-frame_alignment)); // Align stack.
4000 }
4001 int space = FPURegister::kNumRegisters * kDoubleSize;
Steve Block44f0eee2011-05-26 01:26:41 +01004002 Subu(sp, sp, Operand(space));
4003 // Remember: we only need to save every 2nd double FPU value.
4004 for (int i = 0; i < FPURegister::kNumRegisters; i+=2) {
4005 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00004006 sdc1(reg, MemOperand(sp, i * kDoubleSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004007 }
Steve Block44f0eee2011-05-26 01:26:41 +01004008 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004009
4010 // Reserve place for the return address, stack space and an optional slot
4011 // (used by the DirectCEntryStub to hold the return value if a struct is
4012 // returned) and align the frame preparing for calling the runtime function.
4013 ASSERT(stack_space >= 0);
4014 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize));
4015 if (frame_alignment > 0) {
4016 ASSERT(IsPowerOf2(frame_alignment));
4017 And(sp, sp, Operand(-frame_alignment)); // Align stack.
4018 }
4019
4020 // Set the exit frame sp value to point just before the return address
4021 // location.
4022 addiu(at, sp, kPointerSize);
4023 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004024}
4025
4026
Ben Murdoch257744e2011-11-30 15:57:28 +00004027void MacroAssembler::LeaveExitFrame(bool save_doubles,
Ben Murdoch85b71792012-04-11 18:30:58 +01004028 Register argument_count) {
Steve Block44f0eee2011-05-26 01:26:41 +01004029 // Optionally restore all double registers.
4030 if (save_doubles) {
Steve Block44f0eee2011-05-26 01:26:41 +01004031 // Remember: we only need to restore every 2nd double FPU value.
Ben Murdoch257744e2011-11-30 15:57:28 +00004032 lw(t8, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004033 for (int i = 0; i < FPURegister::kNumRegisters; i+=2) {
4034 FPURegister reg = FPURegister::from_code(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00004035 ldc1(reg, MemOperand(t8, i * kDoubleSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004036 }
4037 }
4038
Steve Block6ded16b2010-05-10 14:33:55 +01004039 // Clear top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00004040 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004041 sw(zero_reg, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01004042
4043 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +00004044 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01004045 lw(cp, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01004046#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +01004047 sw(a3, MemOperand(t8));
Steve Block6ded16b2010-05-10 14:33:55 +01004048#endif
4049
4050 // Pop the arguments, restore registers, and return.
4051 mov(sp, fp); // Respect ABI stack constraint.
Ben Murdoch257744e2011-11-30 15:57:28 +00004052 lw(fp, MemOperand(sp, ExitFrameConstants::kCallerFPOffset));
4053 lw(ra, MemOperand(sp, ExitFrameConstants::kCallerPCOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01004054 addiu(sp, sp, 8);
Ben Murdoch257744e2011-11-30 15:57:28 +00004055 if (argument_count.is_valid()) {
4056 sll(t8, argument_count, kPointerSizeLog2);
4057 addu(sp, sp, t8);
4058 }
Steve Block6ded16b2010-05-10 14:33:55 +01004059}
4060
4061
Steve Block44f0eee2011-05-26 01:26:41 +01004062void MacroAssembler::InitializeNewString(Register string,
4063 Register length,
4064 Heap::RootListIndex map_index,
4065 Register scratch1,
4066 Register scratch2) {
4067 sll(scratch1, length, kSmiTagSize);
4068 LoadRoot(scratch2, map_index);
4069 sw(scratch1, FieldMemOperand(string, String::kLengthOffset));
4070 li(scratch1, Operand(String::kEmptyHashField));
4071 sw(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
4072 sw(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
4073}
4074
4075
4076int MacroAssembler::ActivationFrameAlignment() {
4077#if defined(V8_HOST_ARCH_MIPS)
4078 // Running on the real platform. Use the alignment as mandated by the local
4079 // environment.
4080 // Note: This will break if we ever start generating snapshots on one Mips
4081 // platform for another Mips platform with a different alignment.
4082 return OS::ActivationFrameAlignment();
4083#else // defined(V8_HOST_ARCH_MIPS)
4084 // If we are using the simulator then we should always align to the expected
4085 // alignment. As the simulator is used to generate snapshots we do not know
4086 // if the target platform will need alignment, so this is controlled from a
4087 // flag.
4088 return FLAG_sim_stack_alignment;
4089#endif // defined(V8_HOST_ARCH_MIPS)
4090}
4091
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004092
Ben Murdoch257744e2011-11-30 15:57:28 +00004093void MacroAssembler::AssertStackIsAligned() {
4094 if (emit_debug_code()) {
4095 const int frame_alignment = ActivationFrameAlignment();
4096 const int frame_alignment_mask = frame_alignment - 1;
Steve Block44f0eee2011-05-26 01:26:41 +01004097
Ben Murdoch257744e2011-11-30 15:57:28 +00004098 if (frame_alignment > kPointerSize) {
4099 Label alignment_as_expected;
4100 ASSERT(IsPowerOf2(frame_alignment));
4101 andi(at, sp, frame_alignment_mask);
4102 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
4103 // Don't use Check here, as it will call Runtime_Abort re-entering here.
4104 stop("Unexpected stack alignment");
4105 bind(&alignment_as_expected);
4106 }
Steve Block6ded16b2010-05-10 14:33:55 +01004107 }
Steve Block6ded16b2010-05-10 14:33:55 +01004108}
4109
Steve Block44f0eee2011-05-26 01:26:41 +01004110
Steve Block44f0eee2011-05-26 01:26:41 +01004111void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
4112 Register reg,
4113 Register scratch,
4114 Label* not_power_of_two_or_zero) {
4115 Subu(scratch, reg, Operand(1));
4116 Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt,
4117 scratch, Operand(zero_reg));
4118 and_(at, scratch, reg); // In the delay slot.
4119 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg));
4120}
4121
4122
4123void MacroAssembler::JumpIfNotBothSmi(Register reg1,
4124 Register reg2,
4125 Label* on_not_both_smi) {
4126 STATIC_ASSERT(kSmiTag == 0);
4127 ASSERT_EQ(1, kSmiTagMask);
4128 or_(at, reg1, reg2);
Ben Murdoch85b71792012-04-11 18:30:58 +01004129 andi(at, at, kSmiTagMask);
4130 Branch(on_not_both_smi, ne, at, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01004131}
4132
4133
4134void MacroAssembler::JumpIfEitherSmi(Register reg1,
4135 Register reg2,
4136 Label* on_either_smi) {
4137 STATIC_ASSERT(kSmiTag == 0);
4138 ASSERT_EQ(1, kSmiTagMask);
4139 // Both Smi tags must be 1 (not Smi).
4140 and_(at, reg1, reg2);
Ben Murdoch85b71792012-04-11 18:30:58 +01004141 andi(at, at, kSmiTagMask);
4142 Branch(on_either_smi, eq, at, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01004143}
4144
4145
4146void MacroAssembler::AbortIfSmi(Register object) {
4147 STATIC_ASSERT(kSmiTag == 0);
4148 andi(at, object, kSmiTagMask);
4149 Assert(ne, "Operand is a smi", at, Operand(zero_reg));
4150}
4151
4152
4153void MacroAssembler::AbortIfNotSmi(Register object) {
4154 STATIC_ASSERT(kSmiTag == 0);
4155 andi(at, object, kSmiTagMask);
4156 Assert(eq, "Operand is a smi", at, Operand(zero_reg));
4157}
4158
4159
Ben Murdoch257744e2011-11-30 15:57:28 +00004160void MacroAssembler::AbortIfNotString(Register object) {
4161 STATIC_ASSERT(kSmiTag == 0);
4162 And(t0, object, Operand(kSmiTagMask));
4163 Assert(ne, "Operand is not a string", t0, Operand(zero_reg));
4164 push(object);
4165 lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
4166 lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
4167 Assert(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE));
4168 pop(object);
4169}
4170
4171
Steve Block44f0eee2011-05-26 01:26:41 +01004172void MacroAssembler::AbortIfNotRootValue(Register src,
4173 Heap::RootListIndex root_value_index,
4174 const char* message) {
4175 ASSERT(!src.is(at));
4176 LoadRoot(at, root_value_index);
4177 Assert(eq, message, src, Operand(at));
4178}
4179
4180
4181void MacroAssembler::JumpIfNotHeapNumber(Register object,
4182 Register heap_number_map,
4183 Register scratch,
4184 Label* on_not_heap_number) {
4185 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4186 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4187 Branch(on_not_heap_number, ne, scratch, Operand(heap_number_map));
4188}
4189
4190
4191void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
4192 Register first,
4193 Register second,
4194 Register scratch1,
4195 Register scratch2,
4196 Label* failure) {
4197 // Test that both first and second are sequential ASCII strings.
4198 // Assume that they are non-smis.
4199 lw(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
4200 lw(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
4201 lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4202 lbu(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
4203
4204 JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
4205 scratch2,
4206 scratch1,
4207 scratch2,
4208 failure);
4209}
4210
4211
4212void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
4213 Register second,
4214 Register scratch1,
4215 Register scratch2,
4216 Label* failure) {
4217 // Check that neither is a smi.
4218 STATIC_ASSERT(kSmiTag == 0);
4219 And(scratch1, first, Operand(second));
Ben Murdoch85b71792012-04-11 18:30:58 +01004220 And(scratch1, scratch1, Operand(kSmiTagMask));
4221 Branch(failure, eq, scratch1, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01004222 JumpIfNonSmisNotBothSequentialAsciiStrings(first,
4223 second,
4224 scratch1,
4225 scratch2,
4226 failure);
4227}
4228
4229
4230void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
4231 Register first,
4232 Register second,
4233 Register scratch1,
4234 Register scratch2,
4235 Label* failure) {
4236 int kFlatAsciiStringMask =
4237 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
4238 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
4239 ASSERT(kFlatAsciiStringTag <= 0xffff); // Ensure this fits 16-bit immed.
4240 andi(scratch1, first, kFlatAsciiStringMask);
4241 Branch(failure, ne, scratch1, Operand(kFlatAsciiStringTag));
4242 andi(scratch2, second, kFlatAsciiStringMask);
4243 Branch(failure, ne, scratch2, Operand(kFlatAsciiStringTag));
4244}
4245
4246
4247void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
4248 Register scratch,
4249 Label* failure) {
4250 int kFlatAsciiStringMask =
4251 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
4252 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
4253 And(scratch, type, Operand(kFlatAsciiStringMask));
4254 Branch(failure, ne, scratch, Operand(kFlatAsciiStringTag));
4255}
4256
4257
4258static const int kRegisterPassedArguments = 4;
4259
Ben Murdoch85b71792012-04-11 18:30:58 +01004260void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01004261 int frame_alignment = ActivationFrameAlignment();
4262
Steve Block44f0eee2011-05-26 01:26:41 +01004263 // Up to four simple arguments are passed in registers a0..a3.
4264 // Those four arguments must have reserved argument slots on the stack for
4265 // mips, even though those argument slots are not normally used.
4266 // Remaining arguments are pushed on the stack, above (higher address than)
4267 // the argument slots.
Ben Murdoch85b71792012-04-11 18:30:58 +01004268 int stack_passed_arguments = ((num_arguments <= kRegisterPassedArguments) ?
4269 0 : num_arguments - kRegisterPassedArguments) +
4270 kCArgSlotCount;
Steve Block44f0eee2011-05-26 01:26:41 +01004271 if (frame_alignment > kPointerSize) {
4272 // Make stack end at alignment and make room for num_arguments - 4 words
4273 // and the original value of sp.
4274 mov(scratch, sp);
4275 Subu(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
4276 ASSERT(IsPowerOf2(frame_alignment));
4277 And(sp, sp, Operand(-frame_alignment));
4278 sw(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
4279 } else {
4280 Subu(sp, sp, Operand(stack_passed_arguments * kPointerSize));
4281 }
4282}
4283
4284
4285void MacroAssembler::CallCFunction(ExternalReference function,
Ben Murdoch85b71792012-04-11 18:30:58 +01004286 int num_arguments) {
4287 CallCFunctionHelper(no_reg, function, t8, num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01004288}
4289
4290
4291void MacroAssembler::CallCFunction(Register function,
Ben Murdoch85b71792012-04-11 18:30:58 +01004292 Register scratch,
Steve Block44f0eee2011-05-26 01:26:41 +01004293 int num_arguments) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004294 CallCFunctionHelper(function,
4295 ExternalReference::the_hole_value_location(isolate()),
4296 scratch,
4297 num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01004298}
4299
4300
4301void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch85b71792012-04-11 18:30:58 +01004302 ExternalReference function_reference,
4303 Register scratch,
4304 int num_arguments) {
Steve Block44f0eee2011-05-26 01:26:41 +01004305 // Make sure that the stack is aligned before calling a C function unless
4306 // running in the simulator. The simulator has its own alignment check which
4307 // provides more information.
4308 // The argument stots are presumed to have been set up by
4309 // PrepareCallCFunction. The C function must be called via t9, for mips ABI.
4310
4311#if defined(V8_HOST_ARCH_MIPS)
4312 if (emit_debug_code()) {
4313 int frame_alignment = OS::ActivationFrameAlignment();
4314 int frame_alignment_mask = frame_alignment - 1;
4315 if (frame_alignment > kPointerSize) {
4316 ASSERT(IsPowerOf2(frame_alignment));
4317 Label alignment_as_expected;
4318 And(at, sp, Operand(frame_alignment_mask));
4319 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
4320 // Don't use Check here, as it will call Runtime_Abort possibly
4321 // re-entering here.
4322 stop("Unexpected alignment in CallCFunction");
4323 bind(&alignment_as_expected);
4324 }
4325 }
4326#endif // V8_HOST_ARCH_MIPS
4327
4328 // Just call directly. The function called cannot cause a GC, or
4329 // allow preemption, so the return address in the link register
4330 // stays correct.
Steve Block44f0eee2011-05-26 01:26:41 +01004331
Ben Murdoch85b71792012-04-11 18:30:58 +01004332 if (function.is(no_reg)) {
4333 function = t9;
4334 li(function, Operand(function_reference));
4335 } else if (!function.is(t9)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004336 mov(t9, function);
Steve Block44f0eee2011-05-26 01:26:41 +01004337 function = t9;
4338 }
4339
4340 Call(function);
4341
Ben Murdoch85b71792012-04-11 18:30:58 +01004342 int stack_passed_arguments = ((num_arguments <= kRegisterPassedArguments) ?
4343 0 : num_arguments - kRegisterPassedArguments) +
4344 kCArgSlotCount;
Steve Block44f0eee2011-05-26 01:26:41 +01004345
4346 if (OS::ActivationFrameAlignment() > kPointerSize) {
4347 lw(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
4348 } else {
4349 Addu(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
4350 }
4351}
4352
4353
4354#undef BRANCH_ARGS_CHECK
4355
4356
Ben Murdoch257744e2011-11-30 15:57:28 +00004357void MacroAssembler::LoadInstanceDescriptors(Register map,
4358 Register descriptors) {
4359 lw(descriptors,
4360 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
4361 Label not_smi;
4362 JumpIfNotSmi(descriptors, &not_smi);
Ben Murdoch85b71792012-04-11 18:30:58 +01004363 li(descriptors, Operand(FACTORY->empty_descriptor_array()));
Ben Murdoch257744e2011-11-30 15:57:28 +00004364 bind(&not_smi);
4365}
4366
4367
Steve Block44f0eee2011-05-26 01:26:41 +01004368CodePatcher::CodePatcher(byte* address, int instructions)
4369 : address_(address),
4370 instructions_(instructions),
4371 size_(instructions * Assembler::kInstrSize),
Ben Murdoch257744e2011-11-30 15:57:28 +00004372 masm_(Isolate::Current(), address, size_ + Assembler::kGap) {
Steve Block44f0eee2011-05-26 01:26:41 +01004373 // Create a new macro assembler pointing to the address of the code to patch.
4374 // The size is adjusted with kGap on order for the assembler to generate size
4375 // bytes of instructions without failing with buffer size constraints.
4376 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4377}
4378
4379
4380CodePatcher::~CodePatcher() {
4381 // Indicate that code has changed.
4382 CPU::FlushICache(address_, size_);
4383
4384 // Check that the code was patched as expected.
4385 ASSERT(masm_.pc_ == address_ + size_);
4386 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4387}
4388
4389
Ben Murdoch257744e2011-11-30 15:57:28 +00004390void CodePatcher::Emit(Instr instr) {
4391 masm()->emit(instr);
Steve Block44f0eee2011-05-26 01:26:41 +01004392}
4393
4394
4395void CodePatcher::Emit(Address addr) {
4396 masm()->emit(reinterpret_cast<Instr>(addr));
4397}
4398
4399
Ben Murdoch257744e2011-11-30 15:57:28 +00004400void CodePatcher::ChangeBranchCondition(Condition cond) {
4401 Instr instr = Assembler::instr_at(masm_.pc_);
4402 ASSERT(Assembler::IsBranch(instr));
4403 uint32_t opcode = Assembler::GetOpcodeField(instr);
4404 // Currently only the 'eq' and 'ne' cond values are supported and the simple
4405 // branch instructions (with opcode being the branch type).
4406 // There are some special cases (see Assembler::IsBranch()) so extending this
4407 // would be tricky.
4408 ASSERT(opcode == BEQ ||
4409 opcode == BNE ||
4410 opcode == BLEZ ||
4411 opcode == BGTZ ||
4412 opcode == BEQL ||
4413 opcode == BNEL ||
4414 opcode == BLEZL ||
4415 opcode == BGTZL);
4416 opcode = (cond == eq) ? BEQ : BNE;
4417 instr = (instr & ~kOpcodeMask) | opcode;
4418 masm_.emit(instr);
4419}
Steve Block44f0eee2011-05-26 01:26:41 +01004420
4421
Andrei Popescu31002712010-02-23 13:46:05 +00004422} } // namespace v8::internal
4423
Leon Clarkef7060e22010-06-03 12:02:55 +01004424#endif // V8_TARGET_ARCH_MIPS