blob: bcb459ee04f3e69482c7fbffff98f39ce51b0e85 [file] [log] [blame]
Ben Murdoch257744e2011-11-30 15:57:28 +00001// Copyright 2011 the V8 project authors. All rights reserved.
Andrei Popescu31002712010-02-23 13:46:05 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_MIPS_MACRO_ASSEMBLER_MIPS_H_
29#define V8_MIPS_MACRO_ASSEMBLER_MIPS_H_
30
31#include "assembler.h"
32#include "mips/assembler-mips.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "v8globals.h"
Andrei Popescu31002712010-02-23 13:46:05 +000034
35namespace v8 {
36namespace internal {
37
38// Forward declaration.
39class JumpTarget;
40
Steve Block44f0eee2011-05-26 01:26:41 +010041// Reserved Register Usage Summary.
42//
43// Registers t8, t9, and at are reserved for use by the MacroAssembler.
44//
45// The programmer should know that the MacroAssembler may clobber these three,
46// but won't touch other registers except in special cases.
47//
48// Per the MIPS ABI, register t9 must be used for indirect function call
49// via 'jalr t9' or 'jr t9' instructions. This is relied upon by gcc when
50// trying to update gp register for position-independent-code. Whenever
51// MIPS generated code calls C code, it must be via t9 register.
Andrei Popescu31002712010-02-23 13:46:05 +000052
53// Registers aliases
Steve Block6ded16b2010-05-10 14:33:55 +010054// cp is assumed to be a callee saved register.
Steve Block44f0eee2011-05-26 01:26:41 +010055const Register roots = s6; // Roots array pointer.
Ben Murdoch257744e2011-11-30 15:57:28 +000056const Register cp = s7; // JavaScript context pointer.
57const Register fp = s8_fp; // Alias for fp.
58// Registers used for condition evaluation.
Steve Block44f0eee2011-05-26 01:26:41 +010059const Register condReg1 = s4;
60const Register condReg2 = s5;
Andrei Popescu31002712010-02-23 13:46:05 +000061
Steve Block44f0eee2011-05-26 01:26:41 +010062
63// Flags used for the AllocateInNewSpace functions.
64enum AllocationFlags {
65 // No special flags.
66 NO_ALLOCATION_FLAGS = 0,
67 // Return the pointer to the allocated already tagged as a heap object.
68 TAG_OBJECT = 1 << 0,
69 // The content of the result register already contains the allocation top in
70 // new space.
71 RESULT_CONTAINS_TOP = 1 << 1,
72 // Specify that the requested size of the space to allocate is specified in
73 // words instead of bytes.
74 SIZE_IN_WORDS = 1 << 2
75};
76
77// Flags used for the ObjectToDoubleFPURegister function.
78enum ObjectToDoubleFlags {
79 // No special flags.
80 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
81 // Object is known to be a non smi.
82 OBJECT_NOT_SMI = 1 << 0,
83 // Don't load NaNs or infinities, branch to the non number case instead.
84 AVOID_NANS_AND_INFINITIES = 1 << 1
85};
86
87// Allow programmer to use Branch Delay Slot of Branches, Jumps, Calls.
88enum BranchDelaySlot {
89 USE_DELAY_SLOT,
90 PROTECT
91};
92
Andrei Popescu31002712010-02-23 13:46:05 +000093// MacroAssembler implements a collection of frequently used macros.
94class MacroAssembler: public Assembler {
95 public:
Ben Murdoch257744e2011-11-30 15:57:28 +000096 // The isolate parameter can be NULL if the macro assembler should
97 // not use isolate-dependent functionality. In this case, it's the
98 // responsibility of the caller to never invoke such function on the
99 // macro assembler.
100 MacroAssembler(Isolate* isolate, void* buffer, int size);
Andrei Popescu31002712010-02-23 13:46:05 +0000101
Ben Murdoch257744e2011-11-30 15:57:28 +0000102// Arguments macros.
Steve Block44f0eee2011-05-26 01:26:41 +0100103#define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
104#define COND_ARGS cond, r1, r2
105
Ben Murdoch257744e2011-11-30 15:57:28 +0000106// Prototypes.
Steve Block44f0eee2011-05-26 01:26:41 +0100107
Ben Murdoch257744e2011-11-30 15:57:28 +0000108// Prototypes for functions with no target (eg Ret()).
Steve Block44f0eee2011-05-26 01:26:41 +0100109#define DECLARE_NOTARGET_PROTOTYPE(Name) \
110 void Name(BranchDelaySlot bd = PROTECT); \
111 void Name(COND_TYPED_ARGS, BranchDelaySlot bd = PROTECT); \
112 inline void Name(BranchDelaySlot bd, COND_TYPED_ARGS) { \
113 Name(COND_ARGS, bd); \
114 }
115
Ben Murdoch257744e2011-11-30 15:57:28 +0000116// Prototypes for functions with a target.
Steve Block44f0eee2011-05-26 01:26:41 +0100117
118// Cases when relocation may be needed.
119#define DECLARE_RELOC_PROTOTYPE(Name, target_type) \
120 void Name(target_type target, \
121 RelocInfo::Mode rmode, \
122 BranchDelaySlot bd = PROTECT); \
123 inline void Name(BranchDelaySlot bd, \
124 target_type target, \
125 RelocInfo::Mode rmode) { \
126 Name(target, rmode, bd); \
127 } \
128 void Name(target_type target, \
129 RelocInfo::Mode rmode, \
130 COND_TYPED_ARGS, \
131 BranchDelaySlot bd = PROTECT); \
132 inline void Name(BranchDelaySlot bd, \
133 target_type target, \
134 RelocInfo::Mode rmode, \
135 COND_TYPED_ARGS) { \
136 Name(target, rmode, COND_ARGS, bd); \
137 }
138
139// Cases when relocation is not needed.
140#define DECLARE_NORELOC_PROTOTYPE(Name, target_type) \
141 void Name(target_type target, BranchDelaySlot bd = PROTECT); \
142 inline void Name(BranchDelaySlot bd, target_type target) { \
143 Name(target, bd); \
144 } \
145 void Name(target_type target, \
146 COND_TYPED_ARGS, \
147 BranchDelaySlot bd = PROTECT); \
148 inline void Name(BranchDelaySlot bd, \
149 target_type target, \
150 COND_TYPED_ARGS) { \
151 Name(target, COND_ARGS, bd); \
152 }
153
Ben Murdoch257744e2011-11-30 15:57:28 +0000154// Target prototypes.
Steve Block44f0eee2011-05-26 01:26:41 +0100155
156#define DECLARE_JUMP_CALL_PROTOTYPES(Name) \
157 DECLARE_NORELOC_PROTOTYPE(Name, Register) \
158 DECLARE_NORELOC_PROTOTYPE(Name, const Operand&) \
159 DECLARE_RELOC_PROTOTYPE(Name, byte*) \
160 DECLARE_RELOC_PROTOTYPE(Name, Handle<Code>)
161
162#define DECLARE_BRANCH_PROTOTYPES(Name) \
163 DECLARE_NORELOC_PROTOTYPE(Name, Label*) \
164 DECLARE_NORELOC_PROTOTYPE(Name, int16_t)
165
166
167DECLARE_JUMP_CALL_PROTOTYPES(Jump)
168DECLARE_JUMP_CALL_PROTOTYPES(Call)
169
170DECLARE_BRANCH_PROTOTYPES(Branch)
171DECLARE_BRANCH_PROTOTYPES(BranchAndLink)
172
173DECLARE_NOTARGET_PROTOTYPE(Ret)
174
175#undef COND_TYPED_ARGS
176#undef COND_ARGS
177#undef DECLARE_NOTARGET_PROTOTYPE
178#undef DECLARE_NORELOC_PROTOTYPE
179#undef DECLARE_RELOC_PROTOTYPE
180#undef DECLARE_JUMP_CALL_PROTOTYPES
181#undef DECLARE_BRANCH_PROTOTYPES
Andrei Popescu31002712010-02-23 13:46:05 +0000182
Ben Murdoch257744e2011-11-30 15:57:28 +0000183 void CallWithAstId(Handle<Code> code,
184 RelocInfo::Mode rmode,
185 unsigned ast_id,
186 Condition cond = al,
187 Register r1 = zero_reg,
188 const Operand& r2 = Operand(zero_reg));
189
190 int CallSize(Register reg);
191 int CallSize(Handle<Code> code, RelocInfo::Mode rmode);
192
Andrei Popescu31002712010-02-23 13:46:05 +0000193 // Emit code to discard a non-negative number of pointer-sized elements
194 // from the stack, clobbering only the sp register.
Steve Block44f0eee2011-05-26 01:26:41 +0100195 void Drop(int count,
196 Condition cond = cc_always,
197 Register reg = no_reg,
198 const Operand& op = Operand(no_reg));
199
200 void DropAndRet(int drop = 0,
201 Condition cond = cc_always,
202 Register reg = no_reg,
203 const Operand& op = Operand(no_reg));
204
205 // Swap two registers. If the scratch register is omitted then a slightly
206 // less efficient form using xor instead of mov is emitted.
207 void Swap(Register reg1, Register reg2, Register scratch = no_reg);
Andrei Popescu31002712010-02-23 13:46:05 +0000208
209 void Call(Label* target);
Steve Block44f0eee2011-05-26 01:26:41 +0100210
Ben Murdoch257744e2011-11-30 15:57:28 +0000211 inline void Move(Register dst, Register src) {
212 if (!dst.is(src)) {
213 mov(dst, src);
214 }
215 }
216
217 inline void Move(FPURegister dst, FPURegister src) {
218 if (!dst.is(src)) {
219 mov_d(dst, src);
220 }
221 }
222
223 inline void Move(Register dst_low, Register dst_high, FPURegister src) {
224 mfc1(dst_low, src);
225 mfc1(dst_high, FPURegister::from_code(src.code() + 1));
226 }
227
228 inline void Move(FPURegister dst, Register src_low, Register src_high) {
229 mtc1(src_low, dst);
230 mtc1(src_high, FPURegister::from_code(dst.code() + 1));
231 }
Andrei Popescu31002712010-02-23 13:46:05 +0000232
233 // Jump unconditionally to given label.
234 // We NEED a nop in the branch delay slot, as it used by v8, for example in
235 // CodeGenerator::ProcessDeferred().
Steve Block6ded16b2010-05-10 14:33:55 +0100236 // Currently the branch delay slot is filled by the MacroAssembler.
Andrei Popescu31002712010-02-23 13:46:05 +0000237 // Use rather b(Label) for code generation.
238 void jmp(Label* L) {
Steve Block44f0eee2011-05-26 01:26:41 +0100239 Branch(L);
Andrei Popescu31002712010-02-23 13:46:05 +0000240 }
241
242 // Load an object from the root table.
243 void LoadRoot(Register destination,
244 Heap::RootListIndex index);
245 void LoadRoot(Register destination,
246 Heap::RootListIndex index,
247 Condition cond, Register src1, const Operand& src2);
248
Steve Block44f0eee2011-05-26 01:26:41 +0100249 // Store an object to the root table.
250 void StoreRoot(Register source,
251 Heap::RootListIndex index);
252 void StoreRoot(Register source,
253 Heap::RootListIndex index,
254 Condition cond, Register src1, const Operand& src2);
255
256
257 // Check if object is in new space.
258 // scratch can be object itself, but it will be clobbered.
259 void InNewSpace(Register object,
260 Register scratch,
261 Condition cc, // eq for new space, ne otherwise.
262 Label* branch);
263
264
265 // For the page containing |object| mark the region covering [address]
266 // dirty. The object address must be in the first 8K of an allocated page.
267 void RecordWriteHelper(Register object,
268 Register address,
269 Register scratch);
270
271 // For the page containing |object| mark the region covering
272 // [object+offset] dirty. The object address must be in the first 8K
273 // of an allocated page. The 'scratch' registers are used in the
274 // implementation and all 3 registers are clobbered by the
275 // operation, as well as the 'at' register. RecordWrite updates the
276 // write barrier even when storing smis.
277 void RecordWrite(Register object,
278 Operand offset,
279 Register scratch0,
280 Register scratch1);
281
282 // For the page containing |object| mark the region covering
283 // [address] dirty. The object address must be in the first 8K of an
284 // allocated page. All 3 registers are clobbered by the operation,
285 // as well as the ip register. RecordWrite updates the write barrier
286 // even when storing smis.
287 void RecordWrite(Register object,
288 Register address,
289 Register scratch);
290
291
292 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000293 // Inline caching support.
Steve Block44f0eee2011-05-26 01:26:41 +0100294
295 // Generate code for checking access rights - used for security checks
296 // on access to global objects across environments. The holder register
297 // is left untouched, whereas both scratch registers are clobbered.
298 void CheckAccessGlobalProxy(Register holder_reg,
299 Register scratch,
300 Label* miss);
301
302 inline void MarkCode(NopMarkerTypes type) {
303 nop(type);
Steve Block6ded16b2010-05-10 14:33:55 +0100304 }
305
Steve Block44f0eee2011-05-26 01:26:41 +0100306 // Check if the given instruction is a 'type' marker.
307 // ie. check if it is a sll zero_reg, zero_reg, <type> (referenced as
308 // nop(type)). These instructions are generated to mark special location in
309 // the code, like some special IC code.
310 static inline bool IsMarkedCode(Instr instr, int type) {
311 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
312 return IsNop(instr, type);
313 }
Andrei Popescu31002712010-02-23 13:46:05 +0000314
315
Steve Block44f0eee2011-05-26 01:26:41 +0100316 static inline int GetCodeMarker(Instr instr) {
317 uint32_t opcode = ((instr & kOpcodeMask));
318 uint32_t rt = ((instr & kRtFieldMask) >> kRtShift);
319 uint32_t rs = ((instr & kRsFieldMask) >> kRsShift);
320 uint32_t sa = ((instr & kSaFieldMask) >> kSaShift);
321
322 // Return <n> if we have a sll zero_reg, zero_reg, n
323 // else return -1.
324 bool sllzz = (opcode == SLL &&
325 rt == static_cast<uint32_t>(ToNumber(zero_reg)) &&
326 rs == static_cast<uint32_t>(ToNumber(zero_reg)));
327 int type =
328 (sllzz && FIRST_IC_MARKER <= sa && sa < LAST_CODE_MARKER) ? sa : -1;
329 ASSERT((type == -1) ||
330 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
331 return type;
332 }
333
334
335
336 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000337 // Allocation support.
Steve Block44f0eee2011-05-26 01:26:41 +0100338
339 // Allocate an object in new space. The object_size is specified
340 // either in bytes or in words if the allocation flag SIZE_IN_WORDS
341 // is passed. If the new space is exhausted control continues at the
342 // gc_required label. The allocated object is returned in result. If
343 // the flag tag_allocated_object is true the result is tagged as as
344 // a heap object. All registers are clobbered also when control
345 // continues at the gc_required label.
346 void AllocateInNewSpace(int object_size,
347 Register result,
348 Register scratch1,
349 Register scratch2,
350 Label* gc_required,
351 AllocationFlags flags);
352 void AllocateInNewSpace(Register object_size,
353 Register result,
354 Register scratch1,
355 Register scratch2,
356 Label* gc_required,
357 AllocationFlags flags);
358
359 // Undo allocation in new space. The object passed and objects allocated after
360 // it will no longer be allocated. The caller must make sure that no pointers
361 // are left to the object(s) no longer allocated as they would be invalid when
362 // allocation is undone.
363 void UndoAllocationInNewSpace(Register object, Register scratch);
364
365
366 void AllocateTwoByteString(Register result,
367 Register length,
368 Register scratch1,
369 Register scratch2,
370 Register scratch3,
371 Label* gc_required);
372 void AllocateAsciiString(Register result,
373 Register length,
374 Register scratch1,
375 Register scratch2,
376 Register scratch3,
377 Label* gc_required);
378 void AllocateTwoByteConsString(Register result,
379 Register length,
380 Register scratch1,
381 Register scratch2,
382 Label* gc_required);
383 void AllocateAsciiConsString(Register result,
384 Register length,
385 Register scratch1,
386 Register scratch2,
387 Label* gc_required);
388
389 // Allocates a heap number or jumps to the gc_required label if the young
390 // space is full and a scavenge is needed. All registers are clobbered also
391 // when control continues at the gc_required label.
392 void AllocateHeapNumber(Register result,
393 Register scratch1,
394 Register scratch2,
395 Register heap_number_map,
396 Label* gc_required);
397 void AllocateHeapNumberWithValue(Register result,
398 FPURegister value,
399 Register scratch1,
400 Register scratch2,
401 Label* gc_required);
402
Andrei Popescu31002712010-02-23 13:46:05 +0000403 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000404 // Instruction macros.
Andrei Popescu31002712010-02-23 13:46:05 +0000405
Steve Block44f0eee2011-05-26 01:26:41 +0100406#define DEFINE_INSTRUCTION(instr) \
Andrei Popescu31002712010-02-23 13:46:05 +0000407 void instr(Register rd, Register rs, const Operand& rt); \
408 void instr(Register rd, Register rs, Register rt) { \
409 instr(rd, rs, Operand(rt)); \
410 } \
411 void instr(Register rs, Register rt, int32_t j) { \
412 instr(rs, rt, Operand(j)); \
413 }
414
Steve Block44f0eee2011-05-26 01:26:41 +0100415#define DEFINE_INSTRUCTION2(instr) \
Andrei Popescu31002712010-02-23 13:46:05 +0000416 void instr(Register rs, const Operand& rt); \
417 void instr(Register rs, Register rt) { \
418 instr(rs, Operand(rt)); \
419 } \
420 void instr(Register rs, int32_t j) { \
421 instr(rs, Operand(j)); \
422 }
423
Andrei Popescu31002712010-02-23 13:46:05 +0000424 DEFINE_INSTRUCTION(Addu);
Steve Block44f0eee2011-05-26 01:26:41 +0100425 DEFINE_INSTRUCTION(Subu);
Andrei Popescu31002712010-02-23 13:46:05 +0000426 DEFINE_INSTRUCTION(Mul);
427 DEFINE_INSTRUCTION2(Mult);
428 DEFINE_INSTRUCTION2(Multu);
429 DEFINE_INSTRUCTION2(Div);
430 DEFINE_INSTRUCTION2(Divu);
431
432 DEFINE_INSTRUCTION(And);
433 DEFINE_INSTRUCTION(Or);
434 DEFINE_INSTRUCTION(Xor);
435 DEFINE_INSTRUCTION(Nor);
Ben Murdoch257744e2011-11-30 15:57:28 +0000436 DEFINE_INSTRUCTION2(Neg);
Andrei Popescu31002712010-02-23 13:46:05 +0000437
438 DEFINE_INSTRUCTION(Slt);
439 DEFINE_INSTRUCTION(Sltu);
440
Steve Block44f0eee2011-05-26 01:26:41 +0100441 // MIPS32 R2 instruction macro.
442 DEFINE_INSTRUCTION(Ror);
443
Andrei Popescu31002712010-02-23 13:46:05 +0000444#undef DEFINE_INSTRUCTION
445#undef DEFINE_INSTRUCTION2
446
447
Ben Murdoch257744e2011-11-30 15:57:28 +0000448 // ---------------------------------------------------------------------------
449 // Pseudo-instructions.
Andrei Popescu31002712010-02-23 13:46:05 +0000450
451 void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); }
Andrei Popescu31002712010-02-23 13:46:05 +0000452
Ben Murdoch257744e2011-11-30 15:57:28 +0000453 // Load int32 in the rd register.
Andrei Popescu31002712010-02-23 13:46:05 +0000454 void li(Register rd, Operand j, bool gen2instr = false);
455 inline void li(Register rd, int32_t j, bool gen2instr = false) {
456 li(rd, Operand(j), gen2instr);
457 }
Steve Block44f0eee2011-05-26 01:26:41 +0100458 inline void li(Register dst, Handle<Object> value, bool gen2instr = false) {
459 li(dst, Operand(value), gen2instr);
460 }
Andrei Popescu31002712010-02-23 13:46:05 +0000461
Ben Murdoch257744e2011-11-30 15:57:28 +0000462 // Exception-generating instructions and debugging support.
Andrei Popescu31002712010-02-23 13:46:05 +0000463 void stop(const char* msg);
464
Andrei Popescu31002712010-02-23 13:46:05 +0000465 // Push multiple registers on the stack.
Steve Block6ded16b2010-05-10 14:33:55 +0100466 // Registers are saved in numerical order, with higher numbered registers
Ben Murdoch257744e2011-11-30 15:57:28 +0000467 // saved in higher memory addresses.
Andrei Popescu31002712010-02-23 13:46:05 +0000468 void MultiPush(RegList regs);
469 void MultiPushReversed(RegList regs);
Steve Block44f0eee2011-05-26 01:26:41 +0100470
Ben Murdoch257744e2011-11-30 15:57:28 +0000471 // Lower case push() for compatibility with arch-independent code.
472 void push(Register src) {
Andrei Popescu31002712010-02-23 13:46:05 +0000473 Addu(sp, sp, Operand(-kPointerSize));
474 sw(src, MemOperand(sp, 0));
475 }
Steve Block44f0eee2011-05-26 01:26:41 +0100476
Ben Murdoch257744e2011-11-30 15:57:28 +0000477 // Push two registers. Pushes leftmost register first (to highest address).
478 void Push(Register src1, Register src2) {
Steve Block44f0eee2011-05-26 01:26:41 +0100479 Subu(sp, sp, Operand(2 * kPointerSize));
480 sw(src1, MemOperand(sp, 1 * kPointerSize));
481 sw(src2, MemOperand(sp, 0 * kPointerSize));
482 }
483
Ben Murdoch257744e2011-11-30 15:57:28 +0000484 // Push three registers. Pushes leftmost register first (to highest address).
485 void Push(Register src1, Register src2, Register src3) {
486 Subu(sp, sp, Operand(3 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100487 sw(src1, MemOperand(sp, 2 * kPointerSize));
488 sw(src2, MemOperand(sp, 1 * kPointerSize));
489 sw(src3, MemOperand(sp, 0 * kPointerSize));
490 }
491
Ben Murdoch257744e2011-11-30 15:57:28 +0000492 // Push four registers. Pushes leftmost register first (to highest address).
493 void Push(Register src1, Register src2, Register src3, Register src4) {
494 Subu(sp, sp, Operand(4 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100495 sw(src1, MemOperand(sp, 3 * kPointerSize));
496 sw(src2, MemOperand(sp, 2 * kPointerSize));
497 sw(src3, MemOperand(sp, 1 * kPointerSize));
498 sw(src4, MemOperand(sp, 0 * kPointerSize));
499 }
500
Andrei Popescu31002712010-02-23 13:46:05 +0000501 void Push(Register src, Condition cond, Register tst1, Register tst2) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000502 // Since we don't have conditional execution we use a Branch.
Steve Block44f0eee2011-05-26 01:26:41 +0100503 Branch(3, cond, tst1, Operand(tst2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000504 Subu(sp, sp, Operand(kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +0000505 sw(src, MemOperand(sp, 0));
506 }
507
508 // Pops multiple values from the stack and load them in the
509 // registers specified in regs. Pop order is the opposite as in MultiPush.
510 void MultiPop(RegList regs);
511 void MultiPopReversed(RegList regs);
Ben Murdoch257744e2011-11-30 15:57:28 +0000512
513 // Lower case pop() for compatibility with arch-independent code.
514 void pop(Register dst) {
Andrei Popescu31002712010-02-23 13:46:05 +0000515 lw(dst, MemOperand(sp, 0));
516 Addu(sp, sp, Operand(kPointerSize));
517 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000518
519 // Pop two registers. Pops rightmost register first (from lower address).
520 void Pop(Register src1, Register src2) {
521 ASSERT(!src1.is(src2));
522 lw(src2, MemOperand(sp, 0 * kPointerSize));
523 lw(src1, MemOperand(sp, 1 * kPointerSize));
524 Addu(sp, sp, 2 * kPointerSize);
525 }
526
Steve Block44f0eee2011-05-26 01:26:41 +0100527 void Pop(uint32_t count = 1) {
528 Addu(sp, sp, Operand(count * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +0000529 }
530
Steve Block44f0eee2011-05-26 01:26:41 +0100531 // Push and pop the registers that can hold pointers, as defined by the
532 // RegList constant kSafepointSavedRegisters.
Ben Murdoch257744e2011-11-30 15:57:28 +0000533 void PushSafepointRegisters();
534 void PopSafepointRegisters();
535 void PushSafepointRegistersAndDoubles();
536 void PopSafepointRegistersAndDoubles();
537 // Store value in register src in the safepoint stack slot for
538 // register dst.
539 void StoreToSafepointRegisterSlot(Register src, Register dst);
540 void StoreToSafepointRegistersAndDoublesSlot(Register src, Register dst);
541 // Load the value of the src register from its safepoint stack slot
542 // into register dst.
543 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Steve Block44f0eee2011-05-26 01:26:41 +0100544
545 // MIPS32 R2 instruction macro.
546 void Ins(Register rt, Register rs, uint16_t pos, uint16_t size);
547 void Ext(Register rt, Register rs, uint16_t pos, uint16_t size);
548
549 // Convert unsigned word to double.
550 void Cvt_d_uw(FPURegister fd, FPURegister fs);
551 void Cvt_d_uw(FPURegister fd, Register rs);
552
553 // Convert double to unsigned word.
554 void Trunc_uw_d(FPURegister fd, FPURegister fs);
555 void Trunc_uw_d(FPURegister fd, Register rs);
556
557 // Convert the HeapNumber pointed to by source to a 32bits signed integer
558 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
559 // to not_int32 label. If FPU is available double_scratch is used but not
560 // scratch2.
561 void ConvertToInt32(Register source,
562 Register dest,
563 Register scratch,
564 Register scratch2,
565 FPURegister double_scratch,
566 Label *not_int32);
567
Ben Murdoch257744e2011-11-30 15:57:28 +0000568 // Helper for EmitECMATruncate.
569 // This will truncate a floating-point value outside of the singed 32bit
570 // integer range to a 32bit signed integer.
571 // Expects the double value loaded in input_high and input_low.
572 // Exits with the answer in 'result'.
573 // Note that this code does not work for values in the 32bit range!
574 void EmitOutOfInt32RangeTruncate(Register result,
575 Register input_high,
576 Register input_low,
577 Register scratch);
578
Steve Block44f0eee2011-05-26 01:26:41 +0100579 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000580 // Activation frames.
Steve Block6ded16b2010-05-10 14:33:55 +0100581
582 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
583 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
584
Steve Block44f0eee2011-05-26 01:26:41 +0100585 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
586 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
587
588 // Enter exit frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000589 // argc - argument count to be dropped by LeaveExitFrame.
590 // save_doubles - saves FPU registers on stack, currently disabled.
591 // stack_space - extra stack space.
592 void EnterExitFrame(bool save_doubles,
593 int stack_space = 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100594
Ben Murdoch257744e2011-11-30 15:57:28 +0000595 // Leave the current exit frame.
596 void LeaveExitFrame(bool save_doubles, Register arg_count);
Steve Block6ded16b2010-05-10 14:33:55 +0100597
Steve Block44f0eee2011-05-26 01:26:41 +0100598 // Get the actual activation frame alignment for target environment.
599 static int ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +0100600
Ben Murdoch257744e2011-11-30 15:57:28 +0000601 // Make sure the stack is aligned. Only emits code in debug mode.
602 void AssertStackIsAligned();
603
Steve Block44f0eee2011-05-26 01:26:41 +0100604 void LoadContext(Register dst, int context_chain_length);
Steve Block6ded16b2010-05-10 14:33:55 +0100605
Steve Block44f0eee2011-05-26 01:26:41 +0100606 void LoadGlobalFunction(int index, Register function);
607
608 // Load the initial map from the global function. The registers
609 // function and map can be the same, function is then overwritten.
610 void LoadGlobalFunctionInitialMap(Register function,
611 Register map,
612 Register scratch);
613
614 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000615 // JavaScript invokes.
616
617 // Setup call kind marking in t1. The method takes t1 as an
618 // explicit first parameter to make the code more readable at the
619 // call sites.
620 void SetCallKind(Register dst, CallKind kind);
Steve Block6ded16b2010-05-10 14:33:55 +0100621
622 // Invoke the JavaScript function code by either calling or jumping.
623 void InvokeCode(Register code,
624 const ParameterCount& expected,
625 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +0100626 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000627 const CallWrapper& call_wrapper = NullCallWrapper(),
628 CallKind call_kind = CALL_AS_METHOD);
Steve Block6ded16b2010-05-10 14:33:55 +0100629
630 void InvokeCode(Handle<Code> code,
631 const ParameterCount& expected,
632 const ParameterCount& actual,
633 RelocInfo::Mode rmode,
Ben Murdoch257744e2011-11-30 15:57:28 +0000634 InvokeFlag flag,
635 CallKind call_kind = CALL_AS_METHOD);
Steve Block6ded16b2010-05-10 14:33:55 +0100636
637 // Invoke the JavaScript function in the given register. Changes the
638 // current context to the context in the function before invoking.
639 void InvokeFunction(Register function,
640 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +0100641 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000642 const CallWrapper& call_wrapper = NullCallWrapper(),
643 CallKind call_kind = CALL_AS_METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +0100644
645 void InvokeFunction(JSFunction* function,
646 const ParameterCount& actual,
Steve Block6ded16b2010-05-10 14:33:55 +0100647 InvokeFlag flag);
648
649
Steve Block44f0eee2011-05-26 01:26:41 +0100650 void IsObjectJSObjectType(Register heap_object,
651 Register map,
652 Register scratch,
653 Label* fail);
654
655 void IsInstanceJSObjectType(Register map,
656 Register scratch,
657 Label* fail);
658
659 void IsObjectJSStringType(Register object,
660 Register scratch,
661 Label* fail);
662
Steve Block6ded16b2010-05-10 14:33:55 +0100663#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Block44f0eee2011-05-26 01:26:41 +0100664 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000665 // Debugger Support.
Steve Block6ded16b2010-05-10 14:33:55 +0100666
Steve Block6ded16b2010-05-10 14:33:55 +0100667 void DebugBreak();
668#endif
669
670
Steve Block44f0eee2011-05-26 01:26:41 +0100671 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000672 // Exception handling.
Andrei Popescu31002712010-02-23 13:46:05 +0000673
674 // Push a new try handler and link into try handler chain.
Steve Block6ded16b2010-05-10 14:33:55 +0100675 // The return address must be passed in register ra.
Steve Block44f0eee2011-05-26 01:26:41 +0100676 // Clobber t0, t1, t2.
Andrei Popescu31002712010-02-23 13:46:05 +0000677 void PushTryHandler(CodeLocation try_location, HandlerType type);
678
679 // Unlink the stack handler on top of the stack from the try handler chain.
680 // Must preserve the result register.
681 void PopTryHandler();
682
Ben Murdoch257744e2011-11-30 15:57:28 +0000683 // Passes thrown value (in v0) to the handler of top of the try handler chain.
684 void Throw(Register value);
685
686 // Propagates an uncatchable exception to the top of the current JS stack's
687 // handler chain.
688 void ThrowUncatchable(UncatchableExceptionType type, Register value);
689
Steve Block44f0eee2011-05-26 01:26:41 +0100690 // Copies a fixed number of fields of heap objects from src to dst.
691 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000692
Ben Murdoch257744e2011-11-30 15:57:28 +0000693 // Copies a number of bytes from src to dst. All registers are clobbered. On
694 // exit src and dst will point to the place just after where the last byte was
695 // read or written and length will be zero.
696 void CopyBytes(Register src,
697 Register dst,
698 Register length,
699 Register scratch);
700
Steve Block44f0eee2011-05-26 01:26:41 +0100701 // -------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000702 // Support functions.
703
Steve Block44f0eee2011-05-26 01:26:41 +0100704 // Try to get function prototype of a function and puts the value in
705 // the result register. Checks that the function really is a
706 // function and jumps to the miss label if the fast checks fail. The
707 // function register will be untouched; the other registers may be
708 // clobbered.
709 void TryGetFunctionPrototype(Register function,
710 Register result,
711 Register scratch,
712 Label* miss);
713
Steve Block6ded16b2010-05-10 14:33:55 +0100714 void GetObjectType(Register function,
715 Register map,
716 Register type_reg);
717
Steve Block44f0eee2011-05-26 01:26:41 +0100718 // Check if the map of an object is equal to a specified map (either
719 // given directly or as an index into the root list) and branch to
720 // label if not. Skip the smi check if not required (object is known
Ben Murdoch257744e2011-11-30 15:57:28 +0000721 // to be a heap object).
Steve Block44f0eee2011-05-26 01:26:41 +0100722 void CheckMap(Register obj,
723 Register scratch,
724 Handle<Map> map,
725 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000726 SmiCheckType smi_check_type);
Andrei Popescu31002712010-02-23 13:46:05 +0000727
Steve Block44f0eee2011-05-26 01:26:41 +0100728 void CheckMap(Register obj,
729 Register scratch,
730 Heap::RootListIndex index,
731 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000732 SmiCheckType smi_check_type);
733
734 // Check if the map of an object is equal to a specified map and branch to a
735 // specified target if equal. Skip the smi check if not required (object is
736 // known to be a heap object)
737 void DispatchMap(Register obj,
738 Register scratch,
739 Handle<Map> map,
740 Handle<Code> success,
741 SmiCheckType smi_check_type);
Steve Block6ded16b2010-05-10 14:33:55 +0100742
743 // Generates code for reporting that an illegal operation has
744 // occurred.
745 void IllegalOperation(int num_arguments);
746
Steve Block44f0eee2011-05-26 01:26:41 +0100747 // Picks out an array index from the hash field.
748 // Register use:
749 // hash - holds the index's hash. Clobbered.
750 // index - holds the overwritten index on exit.
751 void IndexFromHash(Register hash, Register index);
Andrei Popescu31002712010-02-23 13:46:05 +0000752
Ben Murdoch257744e2011-11-30 15:57:28 +0000753 // Get the number of least significant bits from a register.
754 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
755 void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
756
Steve Block44f0eee2011-05-26 01:26:41 +0100757 // Load the value of a number object into a FPU double register. If the
758 // object is not a number a jump to the label not_number is performed
759 // and the FPU double register is unchanged.
760 void ObjectToDoubleFPURegister(
761 Register object,
762 FPURegister value,
763 Register scratch1,
764 Register scratch2,
765 Register heap_number_map,
766 Label* not_number,
767 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
768
769 // Load the value of a smi object into a FPU double register. The register
770 // scratch1 can be the same register as smi in which case smi will hold the
771 // untagged value afterwards.
772 void SmiToDoubleFPURegister(Register smi,
773 FPURegister value,
774 Register scratch1);
775
776 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000777 // Overflow handling functions.
778 // Usage: first call the appropriate arithmetic function, then call one of the
779 // jump functions with the overflow_dst register as the second parameter.
780
781 void AdduAndCheckForOverflow(Register dst,
782 Register left,
783 Register right,
784 Register overflow_dst,
785 Register scratch = at);
786
787 void SubuAndCheckForOverflow(Register dst,
788 Register left,
789 Register right,
790 Register overflow_dst,
791 Register scratch = at);
792
793 void BranchOnOverflow(Label* label,
794 Register overflow_check,
795 BranchDelaySlot bd = PROTECT) {
796 Branch(label, lt, overflow_check, Operand(zero_reg), bd);
797 }
798
799 void BranchOnNoOverflow(Label* label,
800 Register overflow_check,
801 BranchDelaySlot bd = PROTECT) {
802 Branch(label, ge, overflow_check, Operand(zero_reg), bd);
803 }
804
805 void RetOnOverflow(Register overflow_check, BranchDelaySlot bd = PROTECT) {
806 Ret(lt, overflow_check, Operand(zero_reg), bd);
807 }
808
809 void RetOnNoOverflow(Register overflow_check, BranchDelaySlot bd = PROTECT) {
810 Ret(ge, overflow_check, Operand(zero_reg), bd);
811 }
812
813 // -------------------------------------------------------------------------
814 // Runtime calls.
Andrei Popescu31002712010-02-23 13:46:05 +0000815
816 // Call a code stub.
817 void CallStub(CodeStub* stub, Condition cond = cc_always,
818 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +0100819
Ben Murdoch257744e2011-11-30 15:57:28 +0000820 // Call a code stub and return the code object called. Try to generate
821 // the code if necessary. Do not perform a GC but instead return a retry
822 // after GC failure.
823 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub,
824 Condition cond = cc_always,
825 Register r1 = zero_reg,
826 const Operand& r2 =
827 Operand(zero_reg));
828
Steve Block44f0eee2011-05-26 01:26:41 +0100829 // Tail call a code stub (jump).
830 void TailCallStub(CodeStub* stub);
831
Ben Murdoch257744e2011-11-30 15:57:28 +0000832 // Tail call a code stub (jump) and return the code object called. Try to
833 // generate the code if necessary. Do not perform a GC but instead return
834 // a retry after GC failure.
835 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub,
836 Condition cond = cc_always,
837 Register r1 = zero_reg,
838 const Operand& r2 =
839 Operand(zero_reg));
840
Andrei Popescu31002712010-02-23 13:46:05 +0000841 void CallJSExitStub(CodeStub* stub);
842
Andrei Popescu31002712010-02-23 13:46:05 +0000843 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100844 void CallRuntime(const Runtime::Function* f, int num_arguments);
845 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Andrei Popescu31002712010-02-23 13:46:05 +0000846
847 // Convenience function: Same as above, but takes the fid instead.
848 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
849
Steve Block44f0eee2011-05-26 01:26:41 +0100850 // Convenience function: call an external reference.
851 void CallExternalReference(const ExternalReference& ext,
852 int num_arguments);
853
Andrei Popescu31002712010-02-23 13:46:05 +0000854 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100855 // Like JumpToExternalReference, but also takes care of passing the number
Andrei Popescu31002712010-02-23 13:46:05 +0000856 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100857 void TailCallExternalReference(const ExternalReference& ext,
858 int num_arguments,
859 int result_size);
860
Ben Murdoch257744e2011-11-30 15:57:28 +0000861 // Tail call of a runtime routine (jump). Try to generate the code if
862 // necessary. Do not perform a GC but instead return a retry after GC
863 // failure.
864 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
865 const ExternalReference& ext, int num_arguments, int result_size);
866
Steve Block6ded16b2010-05-10 14:33:55 +0100867 // Convenience function: tail call a runtime routine (jump).
868 void TailCallRuntime(Runtime::FunctionId fid,
Andrei Popescu31002712010-02-23 13:46:05 +0000869 int num_arguments,
870 int result_size);
871
Steve Block44f0eee2011-05-26 01:26:41 +0100872 // Before calling a C-function from generated code, align arguments on stack
873 // and add space for the four mips argument slots.
874 // After aligning the frame, non-register arguments must be stored on the
875 // stack, after the argument-slots using helper: CFunctionArgumentOperand().
876 // The argument count assumes all arguments are word sized.
877 // Some compilers/platforms require the stack to be aligned when calling
878 // C++ code.
879 // Needs a scratch register to do some arithmetic. This register will be
880 // trashed.
881 void PrepareCallCFunction(int num_arguments, Register scratch);
882
883 // Arguments 1-4 are placed in registers a0 thru a3 respectively.
884 // Arguments 5..n are stored to stack using following:
885 // sw(t0, CFunctionArgumentOperand(5));
886
887 // Calls a C function and cleans up the space for arguments allocated
888 // by PrepareCallCFunction. The called function is not allowed to trigger a
889 // garbage collection, since that might move the code and invalidate the
890 // return address (unless this is somehow accounted for by the called
891 // function).
892 void CallCFunction(ExternalReference function, int num_arguments);
893 void CallCFunction(Register function, Register scratch, int num_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +0000894 void GetCFunctionDoubleResult(const DoubleRegister dst);
895
896 // There are two ways of passing double arguments on MIPS, depending on
897 // whether soft or hard floating point ABI is used. These functions
898 // abstract parameter passing for the three different ways we call
899 // C functions from generated code.
900 void SetCallCDoubleArguments(DoubleRegister dreg);
901 void SetCallCDoubleArguments(DoubleRegister dreg1, DoubleRegister dreg2);
902 void SetCallCDoubleArguments(DoubleRegister dreg, Register reg);
903
904 // Calls an API function. Allocates HandleScope, extracts returned value
905 // from handle and propagates exceptions. Restores context.
906 MaybeObject* TryCallApiFunctionAndReturn(ExternalReference function,
907 int stack_space);
Steve Block44f0eee2011-05-26 01:26:41 +0100908
Andrei Popescu31002712010-02-23 13:46:05 +0000909 // Jump to the builtin routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100910 void JumpToExternalReference(const ExternalReference& builtin);
Andrei Popescu31002712010-02-23 13:46:05 +0000911
Ben Murdoch257744e2011-11-30 15:57:28 +0000912 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
913
Andrei Popescu31002712010-02-23 13:46:05 +0000914 // Invoke specified builtin JavaScript function. Adds an entry to
915 // the unresolved list if the name does not resolve.
Steve Block44f0eee2011-05-26 01:26:41 +0100916 void InvokeBuiltin(Builtins::JavaScript id,
Ben Murdoch257744e2011-11-30 15:57:28 +0000917 InvokeFlag flag,
918 const CallWrapper& call_wrapper = NullCallWrapper());
Andrei Popescu31002712010-02-23 13:46:05 +0000919
920 // Store the code object for the given builtin in the target register and
Steve Block44f0eee2011-05-26 01:26:41 +0100921 // setup the function in a1.
Andrei Popescu31002712010-02-23 13:46:05 +0000922 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
923
Steve Block44f0eee2011-05-26 01:26:41 +0100924 // Store the function for the given builtin in the target register.
925 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
926
Andrei Popescu31002712010-02-23 13:46:05 +0000927 struct Unresolved {
928 int pc;
Ben Murdoch257744e2011-11-30 15:57:28 +0000929 uint32_t flags; // See Bootstrapper::FixupFlags decoders/encoders.
Andrei Popescu31002712010-02-23 13:46:05 +0000930 const char* name;
931 };
Andrei Popescu31002712010-02-23 13:46:05 +0000932
Ben Murdoch257744e2011-11-30 15:57:28 +0000933 Handle<Object> CodeObject() {
934 ASSERT(!code_object_.is_null());
935 return code_object_;
936 }
Andrei Popescu31002712010-02-23 13:46:05 +0000937
Steve Block44f0eee2011-05-26 01:26:41 +0100938 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000939 // StatsCounter support.
Andrei Popescu31002712010-02-23 13:46:05 +0000940
941 void SetCounter(StatsCounter* counter, int value,
942 Register scratch1, Register scratch2);
943 void IncrementCounter(StatsCounter* counter, int value,
944 Register scratch1, Register scratch2);
945 void DecrementCounter(StatsCounter* counter, int value,
946 Register scratch1, Register scratch2);
947
948
Steve Block44f0eee2011-05-26 01:26:41 +0100949 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000950 // Debugging.
Andrei Popescu31002712010-02-23 13:46:05 +0000951
952 // Calls Abort(msg) if the condition cc is not satisfied.
953 // Use --debug_code to enable.
954 void Assert(Condition cc, const char* msg, Register rs, Operand rt);
Steve Block44f0eee2011-05-26 01:26:41 +0100955 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
956 void AssertFastElements(Register elements);
Andrei Popescu31002712010-02-23 13:46:05 +0000957
958 // Like Assert(), but always enabled.
959 void Check(Condition cc, const char* msg, Register rs, Operand rt);
960
961 // Print a message to stdout and abort execution.
962 void Abort(const char* msg);
963
964 // Verify restrictions about code generated in stubs.
965 void set_generating_stub(bool value) { generating_stub_ = value; }
966 bool generating_stub() { return generating_stub_; }
967 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
968 bool allow_stub_calls() { return allow_stub_calls_; }
969
Steve Block44f0eee2011-05-26 01:26:41 +0100970 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000971 // Number utilities.
Steve Block6ded16b2010-05-10 14:33:55 +0100972
Steve Block44f0eee2011-05-26 01:26:41 +0100973 // Check whether the value of reg is a power of two and not zero. If not
974 // control continues at the label not_power_of_two. If reg is a power of two
975 // the register scratch contains the value of (reg - 1) when control falls
976 // through.
977 void JumpIfNotPowerOfTwoOrZero(Register reg,
978 Register scratch,
979 Label* not_power_of_two_or_zero);
980
981 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000982 // Smi utilities.
Steve Block44f0eee2011-05-26 01:26:41 +0100983
984 // Try to convert int32 to smi. If the value is to large, preserve
985 // the original value and jump to not_a_smi. Destroys scratch and
986 // sets flags.
987 // This is only used by crankshaft atm so it is unimplemented on MIPS.
988 void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
989 UNIMPLEMENTED_MIPS();
990 }
991
992 void SmiTag(Register reg) {
993 Addu(reg, reg, reg);
994 }
995
996 void SmiTag(Register dst, Register src) {
997 Addu(dst, src, src);
998 }
999
1000 void SmiUntag(Register reg) {
1001 sra(reg, reg, kSmiTagSize);
1002 }
1003
1004 void SmiUntag(Register dst, Register src) {
1005 sra(dst, src, kSmiTagSize);
1006 }
1007
1008 // Jump the register contains a smi.
1009 inline void JumpIfSmi(Register value, Label* smi_label,
1010 Register scratch = at) {
1011 ASSERT_EQ(0, kSmiTag);
1012 andi(scratch, value, kSmiTagMask);
1013 Branch(smi_label, eq, scratch, Operand(zero_reg));
1014 }
1015
1016 // Jump if the register contains a non-smi.
1017 inline void JumpIfNotSmi(Register value, Label* not_smi_label,
1018 Register scratch = at) {
1019 ASSERT_EQ(0, kSmiTag);
1020 andi(scratch, value, kSmiTagMask);
1021 Branch(not_smi_label, ne, scratch, Operand(zero_reg));
1022 }
1023
1024 // Jump if either of the registers contain a non-smi.
1025 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
1026 // Jump if either of the registers contain a smi.
1027 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
1028
1029 // Abort execution if argument is a smi. Used in debug code.
1030 void AbortIfSmi(Register object);
1031 void AbortIfNotSmi(Register object);
1032
Ben Murdoch257744e2011-11-30 15:57:28 +00001033 // Abort execution if argument is a string. Used in debug code.
1034 void AbortIfNotString(Register object);
1035
Steve Block44f0eee2011-05-26 01:26:41 +01001036 // Abort execution if argument is not the root value with the given index.
1037 void AbortIfNotRootValue(Register src,
1038 Heap::RootListIndex root_value_index,
1039 const char* message);
1040
1041 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00001042 // HeapNumber utilities.
Steve Block44f0eee2011-05-26 01:26:41 +01001043
1044 void JumpIfNotHeapNumber(Register object,
1045 Register heap_number_map,
1046 Register scratch,
1047 Label* on_not_heap_number);
1048
1049 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00001050 // String utilities.
Steve Block44f0eee2011-05-26 01:26:41 +01001051
1052 // Checks if both instance types are sequential ASCII strings and jumps to
1053 // label if either is not.
1054 void JumpIfBothInstanceTypesAreNotSequentialAscii(
1055 Register first_object_instance_type,
1056 Register second_object_instance_type,
1057 Register scratch1,
1058 Register scratch2,
1059 Label* failure);
1060
1061 // Check if instance type is sequential ASCII string and jump to label if
1062 // it is not.
1063 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1064 Register scratch,
1065 Label* failure);
1066
1067 // Test that both first and second are sequential ASCII strings.
1068 // Assume that they are non-smis.
1069 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register first,
1070 Register second,
1071 Register scratch1,
1072 Register scratch2,
1073 Label* failure);
1074
1075 // Test that both first and second are sequential ASCII strings.
1076 // Check that they are non-smis.
1077 void JumpIfNotBothSequentialAsciiStrings(Register first,
1078 Register second,
1079 Register scratch1,
1080 Register scratch2,
1081 Label* failure);
1082
Ben Murdoch257744e2011-11-30 15:57:28 +00001083 void LoadInstanceDescriptors(Register map, Register descriptors);
1084
Steve Block44f0eee2011-05-26 01:26:41 +01001085 private:
1086 void CallCFunctionHelper(Register function,
1087 ExternalReference function_reference,
1088 Register scratch,
1089 int num_arguments);
1090
1091 void Jump(intptr_t target, RelocInfo::Mode rmode,
1092 BranchDelaySlot bd = PROTECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001093 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Steve Block44f0eee2011-05-26 01:26:41 +01001094 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg),
1095 BranchDelaySlot bd = PROTECT);
1096 void Call(intptr_t target, RelocInfo::Mode rmode,
1097 BranchDelaySlot bd = PROTECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001098 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Steve Block44f0eee2011-05-26 01:26:41 +01001099 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg),
1100 BranchDelaySlot bd = PROTECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001101
1102 // Helper functions for generating invokes.
1103 void InvokePrologue(const ParameterCount& expected,
1104 const ParameterCount& actual,
1105 Handle<Code> code_constant,
1106 Register code_reg,
1107 Label* done,
Steve Block44f0eee2011-05-26 01:26:41 +01001108 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001109 const CallWrapper& call_wrapper = NullCallWrapper(),
1110 CallKind call_kind = CALL_AS_METHOD);
Steve Block6ded16b2010-05-10 14:33:55 +01001111
1112 // Get the code for the given builtin. Returns if able to resolve
1113 // the function in the 'resolved' flag.
1114 Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
1115
1116 // Activation support.
Steve Block6ded16b2010-05-10 14:33:55 +01001117 void EnterFrame(StackFrame::Type type);
1118 void LeaveFrame(StackFrame::Type type);
Steve Block44f0eee2011-05-26 01:26:41 +01001119
1120 void InitializeNewString(Register string,
1121 Register length,
1122 Heap::RootListIndex map_index,
1123 Register scratch1,
1124 Register scratch2);
1125
Ben Murdoch257744e2011-11-30 15:57:28 +00001126 // Compute memory operands for safepoint stack slots.
1127 static int SafepointRegisterStackIndex(int reg_code);
1128 MemOperand SafepointRegisterSlot(Register reg);
1129 MemOperand SafepointRegistersAndDoublesSlot(Register reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001130
1131 bool generating_stub_;
1132 bool allow_stub_calls_;
1133 // This handle will be patched with the code object on installation.
1134 Handle<Object> code_object_;
Ben Murdoch257744e2011-11-30 15:57:28 +00001135
1136 // Needs access to SafepointRegisterStackIndex for optimized frame
1137 // traversal.
1138 friend class OptimizedFrame;
Steve Block44f0eee2011-05-26 01:26:41 +01001139};
1140
1141
Steve Block44f0eee2011-05-26 01:26:41 +01001142// The code patcher is used to patch (typically) small parts of code e.g. for
1143// debugging and other types of instrumentation. When using the code patcher
1144// the exact number of bytes specified must be emitted. It is not legal to emit
1145// relocation information. If any of these constraints are violated it causes
1146// an assertion to fail.
1147class CodePatcher {
1148 public:
1149 CodePatcher(byte* address, int instructions);
1150 virtual ~CodePatcher();
1151
1152 // Macro assembler to emit code.
1153 MacroAssembler* masm() { return &masm_; }
1154
1155 // Emit an instruction directly.
Ben Murdoch257744e2011-11-30 15:57:28 +00001156 void Emit(Instr instr);
Steve Block44f0eee2011-05-26 01:26:41 +01001157
1158 // Emit an address directly.
1159 void Emit(Address addr);
1160
Ben Murdoch257744e2011-11-30 15:57:28 +00001161 // Change the condition part of an instruction leaving the rest of the current
1162 // instruction unchanged.
1163 void ChangeBranchCondition(Condition cond);
1164
Steve Block44f0eee2011-05-26 01:26:41 +01001165 private:
1166 byte* address_; // The address of the code being patched.
1167 int instructions_; // Number of instructions of the expected patch size.
1168 int size_; // Number of bytes of the expected patch size.
1169 MacroAssembler masm_; // Macro assembler used to generate the code.
1170};
Andrei Popescu31002712010-02-23 13:46:05 +00001171
1172
1173// -----------------------------------------------------------------------------
1174// Static helper functions.
1175
Steve Block44f0eee2011-05-26 01:26:41 +01001176static MemOperand ContextOperand(Register context, int index) {
1177 return MemOperand(context, Context::SlotOffset(index));
1178}
1179
1180
1181static inline MemOperand GlobalObjectOperand() {
1182 return ContextOperand(cp, Context::GLOBAL_INDEX);
1183}
1184
1185
Andrei Popescu31002712010-02-23 13:46:05 +00001186// Generate a MemOperand for loading a field from an object.
1187static inline MemOperand FieldMemOperand(Register object, int offset) {
1188 return MemOperand(object, offset - kHeapObjectTag);
1189}
1190
1191
Ben Murdoch257744e2011-11-30 15:57:28 +00001192// Generate a MemOperand for storing arguments 5..N on the stack
1193// when calling CallCFunction().
1194static inline MemOperand CFunctionArgumentOperand(int index) {
1195 ASSERT(index > StandardFrameConstants::kCArgSlotCount);
1196 // Argument 5 takes the slot just past the four Arg-slots.
1197 int offset =
1198 (index - 5) * kPointerSize + StandardFrameConstants::kCArgsSlotsSize;
1199 return MemOperand(sp, offset);
1200}
1201
Andrei Popescu31002712010-02-23 13:46:05 +00001202
1203#ifdef GENERATED_CODE_COVERAGE
1204#define CODE_COVERAGE_STRINGIFY(x) #x
1205#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1206#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1207#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
1208#else
1209#define ACCESS_MASM(masm) masm->
1210#endif
1211
1212} } // namespace v8::internal
1213
1214#endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_