blob: 5dd012e93e328c1ffd044d3bec4bd7e467d31a83 [file] [log] [blame]
Ben Murdoch257744e2011-11-30 15:57:28 +00001// Copyright 2011 the V8 project authors. All rights reserved.
Andrei Popescu31002712010-02-23 13:46:05 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_MIPS_MACRO_ASSEMBLER_MIPS_H_
29#define V8_MIPS_MACRO_ASSEMBLER_MIPS_H_
30
31#include "assembler.h"
32#include "mips/assembler-mips.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "v8globals.h"
Andrei Popescu31002712010-02-23 13:46:05 +000034
35namespace v8 {
36namespace internal {
37
38// Forward declaration.
39class JumpTarget;
40
Steve Block44f0eee2011-05-26 01:26:41 +010041// Reserved Register Usage Summary.
42//
43// Registers t8, t9, and at are reserved for use by the MacroAssembler.
44//
45// The programmer should know that the MacroAssembler may clobber these three,
46// but won't touch other registers except in special cases.
47//
48// Per the MIPS ABI, register t9 must be used for indirect function call
49// via 'jalr t9' or 'jr t9' instructions. This is relied upon by gcc when
50// trying to update gp register for position-independent-code. Whenever
51// MIPS generated code calls C code, it must be via t9 register.
Andrei Popescu31002712010-02-23 13:46:05 +000052
53// Registers aliases
Steve Block6ded16b2010-05-10 14:33:55 +010054// cp is assumed to be a callee saved register.
Steve Block44f0eee2011-05-26 01:26:41 +010055const Register roots = s6; // Roots array pointer.
Ben Murdoch257744e2011-11-30 15:57:28 +000056const Register cp = s7; // JavaScript context pointer.
57const Register fp = s8_fp; // Alias for fp.
58// Registers used for condition evaluation.
Steve Block44f0eee2011-05-26 01:26:41 +010059const Register condReg1 = s4;
60const Register condReg2 = s5;
Andrei Popescu31002712010-02-23 13:46:05 +000061
Steve Block44f0eee2011-05-26 01:26:41 +010062
63// Flags used for the AllocateInNewSpace functions.
64enum AllocationFlags {
65 // No special flags.
66 NO_ALLOCATION_FLAGS = 0,
67 // Return the pointer to the allocated already tagged as a heap object.
68 TAG_OBJECT = 1 << 0,
69 // The content of the result register already contains the allocation top in
70 // new space.
71 RESULT_CONTAINS_TOP = 1 << 1,
72 // Specify that the requested size of the space to allocate is specified in
73 // words instead of bytes.
74 SIZE_IN_WORDS = 1 << 2
75};
76
77// Flags used for the ObjectToDoubleFPURegister function.
78enum ObjectToDoubleFlags {
79 // No special flags.
80 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
81 // Object is known to be a non smi.
82 OBJECT_NOT_SMI = 1 << 0,
83 // Don't load NaNs or infinities, branch to the non number case instead.
84 AVOID_NANS_AND_INFINITIES = 1 << 1
85};
86
87// Allow programmer to use Branch Delay Slot of Branches, Jumps, Calls.
88enum BranchDelaySlot {
89 USE_DELAY_SLOT,
90 PROTECT
91};
92
Andrei Popescu31002712010-02-23 13:46:05 +000093// MacroAssembler implements a collection of frequently used macros.
94class MacroAssembler: public Assembler {
95 public:
Ben Murdoch257744e2011-11-30 15:57:28 +000096 // The isolate parameter can be NULL if the macro assembler should
97 // not use isolate-dependent functionality. In this case, it's the
98 // responsibility of the caller to never invoke such function on the
99 // macro assembler.
100 MacroAssembler(Isolate* isolate, void* buffer, int size);
Andrei Popescu31002712010-02-23 13:46:05 +0000101
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000102 // Arguments macros.
Steve Block44f0eee2011-05-26 01:26:41 +0100103#define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
104#define COND_ARGS cond, r1, r2
105
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000106 // Cases when relocation is not needed.
Steve Block44f0eee2011-05-26 01:26:41 +0100107#define DECLARE_NORELOC_PROTOTYPE(Name, target_type) \
108 void Name(target_type target, BranchDelaySlot bd = PROTECT); \
109 inline void Name(BranchDelaySlot bd, target_type target) { \
110 Name(target, bd); \
111 } \
112 void Name(target_type target, \
113 COND_TYPED_ARGS, \
114 BranchDelaySlot bd = PROTECT); \
115 inline void Name(BranchDelaySlot bd, \
116 target_type target, \
117 COND_TYPED_ARGS) { \
118 Name(target, COND_ARGS, bd); \
119 }
120
Steve Block44f0eee2011-05-26 01:26:41 +0100121#define DECLARE_BRANCH_PROTOTYPES(Name) \
122 DECLARE_NORELOC_PROTOTYPE(Name, Label*) \
123 DECLARE_NORELOC_PROTOTYPE(Name, int16_t)
124
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000125 DECLARE_BRANCH_PROTOTYPES(Branch)
126 DECLARE_BRANCH_PROTOTYPES(BranchAndLink)
Steve Block44f0eee2011-05-26 01:26:41 +0100127
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000128#undef DECLARE_BRANCH_PROTOTYPES
Steve Block44f0eee2011-05-26 01:26:41 +0100129#undef COND_TYPED_ARGS
130#undef COND_ARGS
Andrei Popescu31002712010-02-23 13:46:05 +0000131
Ben Murdoch257744e2011-11-30 15:57:28 +0000132
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000133 // Jump, Call, and Ret pseudo instructions implementing inter-working.
134#define COND_ARGS Condition cond = al, Register rs = zero_reg, \
135 const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
136
137 void Jump(Register target, COND_ARGS);
138 void Jump(intptr_t target, RelocInfo::Mode rmode, COND_ARGS);
139 void Jump(Address target, RelocInfo::Mode rmode, COND_ARGS);
140 void Jump(Handle<Code> code, RelocInfo::Mode rmode, COND_ARGS);
141 int CallSize(Register target, COND_ARGS);
142 void Call(Register target, COND_ARGS);
143 int CallSize(Address target, RelocInfo::Mode rmode, COND_ARGS);
144 void Call(Address target, RelocInfo::Mode rmode, COND_ARGS);
145 int CallSize(Handle<Code> code,
146 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
147 unsigned ast_id = kNoASTId,
148 COND_ARGS);
149 void Call(Handle<Code> code,
150 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
151 unsigned ast_id = kNoASTId,
152 COND_ARGS);
153 void Ret(COND_ARGS);
154 inline void Ret(BranchDelaySlot bd) {
155 Ret(al, zero_reg, Operand(zero_reg), bd);
156 }
157
158#undef COND_ARGS
Ben Murdoch257744e2011-11-30 15:57:28 +0000159
Andrei Popescu31002712010-02-23 13:46:05 +0000160 // Emit code to discard a non-negative number of pointer-sized elements
161 // from the stack, clobbering only the sp register.
Steve Block44f0eee2011-05-26 01:26:41 +0100162 void Drop(int count,
163 Condition cond = cc_always,
164 Register reg = no_reg,
165 const Operand& op = Operand(no_reg));
166
167 void DropAndRet(int drop = 0,
168 Condition cond = cc_always,
169 Register reg = no_reg,
170 const Operand& op = Operand(no_reg));
171
172 // Swap two registers. If the scratch register is omitted then a slightly
173 // less efficient form using xor instead of mov is emitted.
174 void Swap(Register reg1, Register reg2, Register scratch = no_reg);
Andrei Popescu31002712010-02-23 13:46:05 +0000175
176 void Call(Label* target);
Steve Block44f0eee2011-05-26 01:26:41 +0100177
Ben Murdoch257744e2011-11-30 15:57:28 +0000178 inline void Move(Register dst, Register src) {
179 if (!dst.is(src)) {
180 mov(dst, src);
181 }
182 }
183
184 inline void Move(FPURegister dst, FPURegister src) {
185 if (!dst.is(src)) {
186 mov_d(dst, src);
187 }
188 }
189
190 inline void Move(Register dst_low, Register dst_high, FPURegister src) {
191 mfc1(dst_low, src);
192 mfc1(dst_high, FPURegister::from_code(src.code() + 1));
193 }
194
195 inline void Move(FPURegister dst, Register src_low, Register src_high) {
196 mtc1(src_low, dst);
197 mtc1(src_high, FPURegister::from_code(dst.code() + 1));
198 }
Andrei Popescu31002712010-02-23 13:46:05 +0000199
200 // Jump unconditionally to given label.
201 // We NEED a nop in the branch delay slot, as it used by v8, for example in
202 // CodeGenerator::ProcessDeferred().
Steve Block6ded16b2010-05-10 14:33:55 +0100203 // Currently the branch delay slot is filled by the MacroAssembler.
Andrei Popescu31002712010-02-23 13:46:05 +0000204 // Use rather b(Label) for code generation.
205 void jmp(Label* L) {
Steve Block44f0eee2011-05-26 01:26:41 +0100206 Branch(L);
Andrei Popescu31002712010-02-23 13:46:05 +0000207 }
208
209 // Load an object from the root table.
210 void LoadRoot(Register destination,
211 Heap::RootListIndex index);
212 void LoadRoot(Register destination,
213 Heap::RootListIndex index,
214 Condition cond, Register src1, const Operand& src2);
215
Steve Block44f0eee2011-05-26 01:26:41 +0100216 // Store an object to the root table.
217 void StoreRoot(Register source,
218 Heap::RootListIndex index);
219 void StoreRoot(Register source,
220 Heap::RootListIndex index,
221 Condition cond, Register src1, const Operand& src2);
222
223
224 // Check if object is in new space.
225 // scratch can be object itself, but it will be clobbered.
226 void InNewSpace(Register object,
227 Register scratch,
228 Condition cc, // eq for new space, ne otherwise.
229 Label* branch);
230
231
232 // For the page containing |object| mark the region covering [address]
233 // dirty. The object address must be in the first 8K of an allocated page.
234 void RecordWriteHelper(Register object,
235 Register address,
236 Register scratch);
237
238 // For the page containing |object| mark the region covering
239 // [object+offset] dirty. The object address must be in the first 8K
240 // of an allocated page. The 'scratch' registers are used in the
241 // implementation and all 3 registers are clobbered by the
242 // operation, as well as the 'at' register. RecordWrite updates the
243 // write barrier even when storing smis.
244 void RecordWrite(Register object,
245 Operand offset,
246 Register scratch0,
247 Register scratch1);
248
249 // For the page containing |object| mark the region covering
250 // [address] dirty. The object address must be in the first 8K of an
251 // allocated page. All 3 registers are clobbered by the operation,
252 // as well as the ip register. RecordWrite updates the write barrier
253 // even when storing smis.
254 void RecordWrite(Register object,
255 Register address,
256 Register scratch);
257
258
259 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000260 // Inline caching support.
Steve Block44f0eee2011-05-26 01:26:41 +0100261
262 // Generate code for checking access rights - used for security checks
263 // on access to global objects across environments. The holder register
264 // is left untouched, whereas both scratch registers are clobbered.
265 void CheckAccessGlobalProxy(Register holder_reg,
266 Register scratch,
267 Label* miss);
268
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000269
270 void LoadFromNumberDictionary(Label* miss,
271 Register elements,
272 Register key,
273 Register result,
274 Register reg0,
275 Register reg1,
276 Register reg2);
277
278
Steve Block44f0eee2011-05-26 01:26:41 +0100279 inline void MarkCode(NopMarkerTypes type) {
280 nop(type);
Steve Block6ded16b2010-05-10 14:33:55 +0100281 }
282
Steve Block44f0eee2011-05-26 01:26:41 +0100283 // Check if the given instruction is a 'type' marker.
284 // ie. check if it is a sll zero_reg, zero_reg, <type> (referenced as
285 // nop(type)). These instructions are generated to mark special location in
286 // the code, like some special IC code.
287 static inline bool IsMarkedCode(Instr instr, int type) {
288 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
289 return IsNop(instr, type);
290 }
Andrei Popescu31002712010-02-23 13:46:05 +0000291
292
Steve Block44f0eee2011-05-26 01:26:41 +0100293 static inline int GetCodeMarker(Instr instr) {
294 uint32_t opcode = ((instr & kOpcodeMask));
295 uint32_t rt = ((instr & kRtFieldMask) >> kRtShift);
296 uint32_t rs = ((instr & kRsFieldMask) >> kRsShift);
297 uint32_t sa = ((instr & kSaFieldMask) >> kSaShift);
298
299 // Return <n> if we have a sll zero_reg, zero_reg, n
300 // else return -1.
301 bool sllzz = (opcode == SLL &&
302 rt == static_cast<uint32_t>(ToNumber(zero_reg)) &&
303 rs == static_cast<uint32_t>(ToNumber(zero_reg)));
304 int type =
305 (sllzz && FIRST_IC_MARKER <= sa && sa < LAST_CODE_MARKER) ? sa : -1;
306 ASSERT((type == -1) ||
307 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
308 return type;
309 }
310
311
312
313 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000314 // Allocation support.
Steve Block44f0eee2011-05-26 01:26:41 +0100315
316 // Allocate an object in new space. The object_size is specified
317 // either in bytes or in words if the allocation flag SIZE_IN_WORDS
318 // is passed. If the new space is exhausted control continues at the
319 // gc_required label. The allocated object is returned in result. If
320 // the flag tag_allocated_object is true the result is tagged as as
321 // a heap object. All registers are clobbered also when control
322 // continues at the gc_required label.
323 void AllocateInNewSpace(int object_size,
324 Register result,
325 Register scratch1,
326 Register scratch2,
327 Label* gc_required,
328 AllocationFlags flags);
329 void AllocateInNewSpace(Register object_size,
330 Register result,
331 Register scratch1,
332 Register scratch2,
333 Label* gc_required,
334 AllocationFlags flags);
335
336 // Undo allocation in new space. The object passed and objects allocated after
337 // it will no longer be allocated. The caller must make sure that no pointers
338 // are left to the object(s) no longer allocated as they would be invalid when
339 // allocation is undone.
340 void UndoAllocationInNewSpace(Register object, Register scratch);
341
342
343 void AllocateTwoByteString(Register result,
344 Register length,
345 Register scratch1,
346 Register scratch2,
347 Register scratch3,
348 Label* gc_required);
349 void AllocateAsciiString(Register result,
350 Register length,
351 Register scratch1,
352 Register scratch2,
353 Register scratch3,
354 Label* gc_required);
355 void AllocateTwoByteConsString(Register result,
356 Register length,
357 Register scratch1,
358 Register scratch2,
359 Label* gc_required);
360 void AllocateAsciiConsString(Register result,
361 Register length,
362 Register scratch1,
363 Register scratch2,
364 Label* gc_required);
Ben Murdoch589d6972011-11-30 16:04:58 +0000365 void AllocateTwoByteSlicedString(Register result,
366 Register length,
367 Register scratch1,
368 Register scratch2,
369 Label* gc_required);
370 void AllocateAsciiSlicedString(Register result,
371 Register length,
372 Register scratch1,
373 Register scratch2,
374 Label* gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +0100375
376 // Allocates a heap number or jumps to the gc_required label if the young
377 // space is full and a scavenge is needed. All registers are clobbered also
378 // when control continues at the gc_required label.
379 void AllocateHeapNumber(Register result,
380 Register scratch1,
381 Register scratch2,
382 Register heap_number_map,
383 Label* gc_required);
384 void AllocateHeapNumberWithValue(Register result,
385 FPURegister value,
386 Register scratch1,
387 Register scratch2,
388 Label* gc_required);
389
Andrei Popescu31002712010-02-23 13:46:05 +0000390 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000391 // Instruction macros.
Andrei Popescu31002712010-02-23 13:46:05 +0000392
Steve Block44f0eee2011-05-26 01:26:41 +0100393#define DEFINE_INSTRUCTION(instr) \
Andrei Popescu31002712010-02-23 13:46:05 +0000394 void instr(Register rd, Register rs, const Operand& rt); \
395 void instr(Register rd, Register rs, Register rt) { \
396 instr(rd, rs, Operand(rt)); \
397 } \
398 void instr(Register rs, Register rt, int32_t j) { \
399 instr(rs, rt, Operand(j)); \
400 }
401
Steve Block44f0eee2011-05-26 01:26:41 +0100402#define DEFINE_INSTRUCTION2(instr) \
Andrei Popescu31002712010-02-23 13:46:05 +0000403 void instr(Register rs, const Operand& rt); \
404 void instr(Register rs, Register rt) { \
405 instr(rs, Operand(rt)); \
406 } \
407 void instr(Register rs, int32_t j) { \
408 instr(rs, Operand(j)); \
409 }
410
Andrei Popescu31002712010-02-23 13:46:05 +0000411 DEFINE_INSTRUCTION(Addu);
Steve Block44f0eee2011-05-26 01:26:41 +0100412 DEFINE_INSTRUCTION(Subu);
Andrei Popescu31002712010-02-23 13:46:05 +0000413 DEFINE_INSTRUCTION(Mul);
414 DEFINE_INSTRUCTION2(Mult);
415 DEFINE_INSTRUCTION2(Multu);
416 DEFINE_INSTRUCTION2(Div);
417 DEFINE_INSTRUCTION2(Divu);
418
419 DEFINE_INSTRUCTION(And);
420 DEFINE_INSTRUCTION(Or);
421 DEFINE_INSTRUCTION(Xor);
422 DEFINE_INSTRUCTION(Nor);
Ben Murdoch257744e2011-11-30 15:57:28 +0000423 DEFINE_INSTRUCTION2(Neg);
Andrei Popescu31002712010-02-23 13:46:05 +0000424
425 DEFINE_INSTRUCTION(Slt);
426 DEFINE_INSTRUCTION(Sltu);
427
Steve Block44f0eee2011-05-26 01:26:41 +0100428 // MIPS32 R2 instruction macro.
429 DEFINE_INSTRUCTION(Ror);
430
Andrei Popescu31002712010-02-23 13:46:05 +0000431#undef DEFINE_INSTRUCTION
432#undef DEFINE_INSTRUCTION2
433
434
Ben Murdoch257744e2011-11-30 15:57:28 +0000435 // ---------------------------------------------------------------------------
436 // Pseudo-instructions.
Andrei Popescu31002712010-02-23 13:46:05 +0000437
438 void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); }
Andrei Popescu31002712010-02-23 13:46:05 +0000439
Ben Murdoch257744e2011-11-30 15:57:28 +0000440 // Load int32 in the rd register.
Andrei Popescu31002712010-02-23 13:46:05 +0000441 void li(Register rd, Operand j, bool gen2instr = false);
442 inline void li(Register rd, int32_t j, bool gen2instr = false) {
443 li(rd, Operand(j), gen2instr);
444 }
Steve Block44f0eee2011-05-26 01:26:41 +0100445 inline void li(Register dst, Handle<Object> value, bool gen2instr = false) {
446 li(dst, Operand(value), gen2instr);
447 }
Andrei Popescu31002712010-02-23 13:46:05 +0000448
Andrei Popescu31002712010-02-23 13:46:05 +0000449 // Push multiple registers on the stack.
Steve Block6ded16b2010-05-10 14:33:55 +0100450 // Registers are saved in numerical order, with higher numbered registers
Ben Murdoch257744e2011-11-30 15:57:28 +0000451 // saved in higher memory addresses.
Andrei Popescu31002712010-02-23 13:46:05 +0000452 void MultiPush(RegList regs);
453 void MultiPushReversed(RegList regs);
Steve Block44f0eee2011-05-26 01:26:41 +0100454
Ben Murdoch589d6972011-11-30 16:04:58 +0000455 void MultiPushFPU(RegList regs);
456 void MultiPushReversedFPU(RegList regs);
457
Ben Murdoch257744e2011-11-30 15:57:28 +0000458 // Lower case push() for compatibility with arch-independent code.
459 void push(Register src) {
Andrei Popescu31002712010-02-23 13:46:05 +0000460 Addu(sp, sp, Operand(-kPointerSize));
461 sw(src, MemOperand(sp, 0));
462 }
Steve Block44f0eee2011-05-26 01:26:41 +0100463
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000464 // Push a handle.
465 void Push(Handle<Object> handle);
466
Ben Murdoch257744e2011-11-30 15:57:28 +0000467 // Push two registers. Pushes leftmost register first (to highest address).
468 void Push(Register src1, Register src2) {
Steve Block44f0eee2011-05-26 01:26:41 +0100469 Subu(sp, sp, Operand(2 * kPointerSize));
470 sw(src1, MemOperand(sp, 1 * kPointerSize));
471 sw(src2, MemOperand(sp, 0 * kPointerSize));
472 }
473
Ben Murdoch257744e2011-11-30 15:57:28 +0000474 // Push three registers. Pushes leftmost register first (to highest address).
475 void Push(Register src1, Register src2, Register src3) {
476 Subu(sp, sp, Operand(3 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100477 sw(src1, MemOperand(sp, 2 * kPointerSize));
478 sw(src2, MemOperand(sp, 1 * kPointerSize));
479 sw(src3, MemOperand(sp, 0 * kPointerSize));
480 }
481
Ben Murdoch257744e2011-11-30 15:57:28 +0000482 // Push four registers. Pushes leftmost register first (to highest address).
483 void Push(Register src1, Register src2, Register src3, Register src4) {
484 Subu(sp, sp, Operand(4 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100485 sw(src1, MemOperand(sp, 3 * kPointerSize));
486 sw(src2, MemOperand(sp, 2 * kPointerSize));
487 sw(src3, MemOperand(sp, 1 * kPointerSize));
488 sw(src4, MemOperand(sp, 0 * kPointerSize));
489 }
490
Andrei Popescu31002712010-02-23 13:46:05 +0000491 void Push(Register src, Condition cond, Register tst1, Register tst2) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000492 // Since we don't have conditional execution we use a Branch.
Steve Block44f0eee2011-05-26 01:26:41 +0100493 Branch(3, cond, tst1, Operand(tst2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000494 Subu(sp, sp, Operand(kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +0000495 sw(src, MemOperand(sp, 0));
496 }
497
498 // Pops multiple values from the stack and load them in the
499 // registers specified in regs. Pop order is the opposite as in MultiPush.
500 void MultiPop(RegList regs);
501 void MultiPopReversed(RegList regs);
Ben Murdoch257744e2011-11-30 15:57:28 +0000502
Ben Murdoch589d6972011-11-30 16:04:58 +0000503 void MultiPopFPU(RegList regs);
504 void MultiPopReversedFPU(RegList regs);
505
Ben Murdoch257744e2011-11-30 15:57:28 +0000506 // Lower case pop() for compatibility with arch-independent code.
507 void pop(Register dst) {
Andrei Popescu31002712010-02-23 13:46:05 +0000508 lw(dst, MemOperand(sp, 0));
509 Addu(sp, sp, Operand(kPointerSize));
510 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000511
512 // Pop two registers. Pops rightmost register first (from lower address).
513 void Pop(Register src1, Register src2) {
514 ASSERT(!src1.is(src2));
515 lw(src2, MemOperand(sp, 0 * kPointerSize));
516 lw(src1, MemOperand(sp, 1 * kPointerSize));
517 Addu(sp, sp, 2 * kPointerSize);
518 }
519
Steve Block44f0eee2011-05-26 01:26:41 +0100520 void Pop(uint32_t count = 1) {
521 Addu(sp, sp, Operand(count * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +0000522 }
523
Steve Block44f0eee2011-05-26 01:26:41 +0100524 // Push and pop the registers that can hold pointers, as defined by the
525 // RegList constant kSafepointSavedRegisters.
Ben Murdoch257744e2011-11-30 15:57:28 +0000526 void PushSafepointRegisters();
527 void PopSafepointRegisters();
528 void PushSafepointRegistersAndDoubles();
529 void PopSafepointRegistersAndDoubles();
530 // Store value in register src in the safepoint stack slot for
531 // register dst.
532 void StoreToSafepointRegisterSlot(Register src, Register dst);
533 void StoreToSafepointRegistersAndDoublesSlot(Register src, Register dst);
534 // Load the value of the src register from its safepoint stack slot
535 // into register dst.
536 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Steve Block44f0eee2011-05-26 01:26:41 +0100537
538 // MIPS32 R2 instruction macro.
539 void Ins(Register rt, Register rs, uint16_t pos, uint16_t size);
540 void Ext(Register rt, Register rs, uint16_t pos, uint16_t size);
541
542 // Convert unsigned word to double.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000543 void Cvt_d_uw(FPURegister fd, FPURegister fs, FPURegister scratch);
544 void Cvt_d_uw(FPURegister fd, Register rs, FPURegister scratch);
Steve Block44f0eee2011-05-26 01:26:41 +0100545
546 // Convert double to unsigned word.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000547 void Trunc_uw_d(FPURegister fd, FPURegister fs, FPURegister scratch);
548 void Trunc_uw_d(FPURegister fd, Register rs, FPURegister scratch);
Steve Block44f0eee2011-05-26 01:26:41 +0100549
550 // Convert the HeapNumber pointed to by source to a 32bits signed integer
551 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
552 // to not_int32 label. If FPU is available double_scratch is used but not
553 // scratch2.
554 void ConvertToInt32(Register source,
555 Register dest,
556 Register scratch,
557 Register scratch2,
558 FPURegister double_scratch,
559 Label *not_int32);
560
Ben Murdoch257744e2011-11-30 15:57:28 +0000561 // Helper for EmitECMATruncate.
562 // This will truncate a floating-point value outside of the singed 32bit
563 // integer range to a 32bit signed integer.
564 // Expects the double value loaded in input_high and input_low.
565 // Exits with the answer in 'result'.
566 // Note that this code does not work for values in the 32bit range!
567 void EmitOutOfInt32RangeTruncate(Register result,
568 Register input_high,
569 Register input_low,
570 Register scratch);
571
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000572 // Performs a truncating conversion of a floating point number as used by
573 // the JS bitwise operations. See ECMA-262 9.5: ToInt32.
574 // Exits with 'result' holding the answer and all other registers clobbered.
575 void EmitECMATruncate(Register result,
576 FPURegister double_input,
577 FPURegister single_scratch,
578 Register scratch,
579 Register scratch2,
580 Register scratch3);
581
Steve Block44f0eee2011-05-26 01:26:41 +0100582 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000583 // Activation frames.
Steve Block6ded16b2010-05-10 14:33:55 +0100584
585 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
586 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
587
Steve Block44f0eee2011-05-26 01:26:41 +0100588 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
589 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
590
591 // Enter exit frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000592 // argc - argument count to be dropped by LeaveExitFrame.
593 // save_doubles - saves FPU registers on stack, currently disabled.
594 // stack_space - extra stack space.
595 void EnterExitFrame(bool save_doubles,
596 int stack_space = 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100597
Ben Murdoch257744e2011-11-30 15:57:28 +0000598 // Leave the current exit frame.
599 void LeaveExitFrame(bool save_doubles, Register arg_count);
Steve Block6ded16b2010-05-10 14:33:55 +0100600
Steve Block44f0eee2011-05-26 01:26:41 +0100601 // Get the actual activation frame alignment for target environment.
602 static int ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +0100603
Ben Murdoch257744e2011-11-30 15:57:28 +0000604 // Make sure the stack is aligned. Only emits code in debug mode.
605 void AssertStackIsAligned();
606
Steve Block44f0eee2011-05-26 01:26:41 +0100607 void LoadContext(Register dst, int context_chain_length);
Steve Block6ded16b2010-05-10 14:33:55 +0100608
Steve Block44f0eee2011-05-26 01:26:41 +0100609 void LoadGlobalFunction(int index, Register function);
610
611 // Load the initial map from the global function. The registers
612 // function and map can be the same, function is then overwritten.
613 void LoadGlobalFunctionInitialMap(Register function,
614 Register map,
615 Register scratch);
616
617 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000618 // JavaScript invokes.
619
620 // Setup call kind marking in t1. The method takes t1 as an
621 // explicit first parameter to make the code more readable at the
622 // call sites.
623 void SetCallKind(Register dst, CallKind kind);
Steve Block6ded16b2010-05-10 14:33:55 +0100624
625 // Invoke the JavaScript function code by either calling or jumping.
626 void InvokeCode(Register code,
627 const ParameterCount& expected,
628 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +0100629 InvokeFlag flag,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000630 const CallWrapper& call_wrapper,
631 CallKind call_kind);
Steve Block6ded16b2010-05-10 14:33:55 +0100632
633 void InvokeCode(Handle<Code> code,
634 const ParameterCount& expected,
635 const ParameterCount& actual,
636 RelocInfo::Mode rmode,
Ben Murdoch257744e2011-11-30 15:57:28 +0000637 InvokeFlag flag,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000638 CallKind call_kind);
Steve Block6ded16b2010-05-10 14:33:55 +0100639
640 // Invoke the JavaScript function in the given register. Changes the
641 // current context to the context in the function before invoking.
642 void InvokeFunction(Register function,
643 const ParameterCount& actual,
Steve Block44f0eee2011-05-26 01:26:41 +0100644 InvokeFlag flag,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000645 const CallWrapper& call_wrapper,
646 CallKind call_kind);
Steve Block44f0eee2011-05-26 01:26:41 +0100647
648 void InvokeFunction(JSFunction* function,
649 const ParameterCount& actual,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000650 InvokeFlag flag,
651 CallKind call_kind);
Steve Block6ded16b2010-05-10 14:33:55 +0100652
653
Steve Block44f0eee2011-05-26 01:26:41 +0100654 void IsObjectJSObjectType(Register heap_object,
655 Register map,
656 Register scratch,
657 Label* fail);
658
659 void IsInstanceJSObjectType(Register map,
660 Register scratch,
661 Label* fail);
662
663 void IsObjectJSStringType(Register object,
664 Register scratch,
665 Label* fail);
666
Steve Block6ded16b2010-05-10 14:33:55 +0100667#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Block44f0eee2011-05-26 01:26:41 +0100668 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000669 // Debugger Support.
Steve Block6ded16b2010-05-10 14:33:55 +0100670
Steve Block6ded16b2010-05-10 14:33:55 +0100671 void DebugBreak();
672#endif
673
674
Steve Block44f0eee2011-05-26 01:26:41 +0100675 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000676 // Exception handling.
Andrei Popescu31002712010-02-23 13:46:05 +0000677
678 // Push a new try handler and link into try handler chain.
Steve Block6ded16b2010-05-10 14:33:55 +0100679 // The return address must be passed in register ra.
Steve Block44f0eee2011-05-26 01:26:41 +0100680 // Clobber t0, t1, t2.
Andrei Popescu31002712010-02-23 13:46:05 +0000681 void PushTryHandler(CodeLocation try_location, HandlerType type);
682
683 // Unlink the stack handler on top of the stack from the try handler chain.
684 // Must preserve the result register.
685 void PopTryHandler();
686
Ben Murdoch257744e2011-11-30 15:57:28 +0000687 // Passes thrown value (in v0) to the handler of top of the try handler chain.
688 void Throw(Register value);
689
690 // Propagates an uncatchable exception to the top of the current JS stack's
691 // handler chain.
692 void ThrowUncatchable(UncatchableExceptionType type, Register value);
693
Steve Block44f0eee2011-05-26 01:26:41 +0100694 // Copies a fixed number of fields of heap objects from src to dst.
695 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000696
Ben Murdoch257744e2011-11-30 15:57:28 +0000697 // Copies a number of bytes from src to dst. All registers are clobbered. On
698 // exit src and dst will point to the place just after where the last byte was
699 // read or written and length will be zero.
700 void CopyBytes(Register src,
701 Register dst,
702 Register length,
703 Register scratch);
704
Steve Block44f0eee2011-05-26 01:26:41 +0100705 // -------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000706 // Support functions.
707
Steve Block44f0eee2011-05-26 01:26:41 +0100708 // Try to get function prototype of a function and puts the value in
709 // the result register. Checks that the function really is a
710 // function and jumps to the miss label if the fast checks fail. The
711 // function register will be untouched; the other registers may be
712 // clobbered.
713 void TryGetFunctionPrototype(Register function,
714 Register result,
715 Register scratch,
716 Label* miss);
717
Steve Block6ded16b2010-05-10 14:33:55 +0100718 void GetObjectType(Register function,
719 Register map,
720 Register type_reg);
721
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000722 // Check if a map for a JSObject indicates that the object has fast elements.
723 // Jump to the specified label if it does not.
724 void CheckFastElements(Register map,
725 Register scratch,
726 Label* fail);
727
Steve Block44f0eee2011-05-26 01:26:41 +0100728 // Check if the map of an object is equal to a specified map (either
729 // given directly or as an index into the root list) and branch to
730 // label if not. Skip the smi check if not required (object is known
Ben Murdoch257744e2011-11-30 15:57:28 +0000731 // to be a heap object).
Steve Block44f0eee2011-05-26 01:26:41 +0100732 void CheckMap(Register obj,
733 Register scratch,
734 Handle<Map> map,
735 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000736 SmiCheckType smi_check_type);
Andrei Popescu31002712010-02-23 13:46:05 +0000737
Steve Block44f0eee2011-05-26 01:26:41 +0100738 void CheckMap(Register obj,
739 Register scratch,
740 Heap::RootListIndex index,
741 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000742 SmiCheckType smi_check_type);
743
744 // Check if the map of an object is equal to a specified map and branch to a
745 // specified target if equal. Skip the smi check if not required (object is
746 // known to be a heap object)
747 void DispatchMap(Register obj,
748 Register scratch,
749 Handle<Map> map,
750 Handle<Code> success,
751 SmiCheckType smi_check_type);
Steve Block6ded16b2010-05-10 14:33:55 +0100752
753 // Generates code for reporting that an illegal operation has
754 // occurred.
755 void IllegalOperation(int num_arguments);
756
Steve Block44f0eee2011-05-26 01:26:41 +0100757 // Picks out an array index from the hash field.
758 // Register use:
759 // hash - holds the index's hash. Clobbered.
760 // index - holds the overwritten index on exit.
761 void IndexFromHash(Register hash, Register index);
Andrei Popescu31002712010-02-23 13:46:05 +0000762
Ben Murdoch257744e2011-11-30 15:57:28 +0000763 // Get the number of least significant bits from a register.
764 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
765 void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
766
Steve Block44f0eee2011-05-26 01:26:41 +0100767 // Load the value of a number object into a FPU double register. If the
768 // object is not a number a jump to the label not_number is performed
769 // and the FPU double register is unchanged.
770 void ObjectToDoubleFPURegister(
771 Register object,
772 FPURegister value,
773 Register scratch1,
774 Register scratch2,
775 Register heap_number_map,
776 Label* not_number,
777 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
778
779 // Load the value of a smi object into a FPU double register. The register
780 // scratch1 can be the same register as smi in which case smi will hold the
781 // untagged value afterwards.
782 void SmiToDoubleFPURegister(Register smi,
783 FPURegister value,
784 Register scratch1);
785
786 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000787 // Overflow handling functions.
788 // Usage: first call the appropriate arithmetic function, then call one of the
789 // jump functions with the overflow_dst register as the second parameter.
790
791 void AdduAndCheckForOverflow(Register dst,
792 Register left,
793 Register right,
794 Register overflow_dst,
795 Register scratch = at);
796
797 void SubuAndCheckForOverflow(Register dst,
798 Register left,
799 Register right,
800 Register overflow_dst,
801 Register scratch = at);
802
803 void BranchOnOverflow(Label* label,
804 Register overflow_check,
805 BranchDelaySlot bd = PROTECT) {
806 Branch(label, lt, overflow_check, Operand(zero_reg), bd);
807 }
808
809 void BranchOnNoOverflow(Label* label,
810 Register overflow_check,
811 BranchDelaySlot bd = PROTECT) {
812 Branch(label, ge, overflow_check, Operand(zero_reg), bd);
813 }
814
815 void RetOnOverflow(Register overflow_check, BranchDelaySlot bd = PROTECT) {
816 Ret(lt, overflow_check, Operand(zero_reg), bd);
817 }
818
819 void RetOnNoOverflow(Register overflow_check, BranchDelaySlot bd = PROTECT) {
820 Ret(ge, overflow_check, Operand(zero_reg), bd);
821 }
822
823 // -------------------------------------------------------------------------
824 // Runtime calls.
Andrei Popescu31002712010-02-23 13:46:05 +0000825
826 // Call a code stub.
827 void CallStub(CodeStub* stub, Condition cond = cc_always,
828 Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +0100829
Ben Murdoch257744e2011-11-30 15:57:28 +0000830 // Call a code stub and return the code object called. Try to generate
831 // the code if necessary. Do not perform a GC but instead return a retry
832 // after GC failure.
833 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub,
834 Condition cond = cc_always,
835 Register r1 = zero_reg,
836 const Operand& r2 =
837 Operand(zero_reg));
838
Steve Block44f0eee2011-05-26 01:26:41 +0100839 // Tail call a code stub (jump).
840 void TailCallStub(CodeStub* stub);
841
Ben Murdoch257744e2011-11-30 15:57:28 +0000842 // Tail call a code stub (jump) and return the code object called. Try to
843 // generate the code if necessary. Do not perform a GC but instead return
844 // a retry after GC failure.
845 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub,
846 Condition cond = cc_always,
847 Register r1 = zero_reg,
848 const Operand& r2 =
849 Operand(zero_reg));
850
Andrei Popescu31002712010-02-23 13:46:05 +0000851 void CallJSExitStub(CodeStub* stub);
852
Andrei Popescu31002712010-02-23 13:46:05 +0000853 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100854 void CallRuntime(const Runtime::Function* f, int num_arguments);
855 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Andrei Popescu31002712010-02-23 13:46:05 +0000856
857 // Convenience function: Same as above, but takes the fid instead.
858 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
859
Steve Block44f0eee2011-05-26 01:26:41 +0100860 // Convenience function: call an external reference.
861 void CallExternalReference(const ExternalReference& ext,
862 int num_arguments);
863
Andrei Popescu31002712010-02-23 13:46:05 +0000864 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100865 // Like JumpToExternalReference, but also takes care of passing the number
Andrei Popescu31002712010-02-23 13:46:05 +0000866 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100867 void TailCallExternalReference(const ExternalReference& ext,
868 int num_arguments,
869 int result_size);
870
Ben Murdoch257744e2011-11-30 15:57:28 +0000871 // Tail call of a runtime routine (jump). Try to generate the code if
872 // necessary. Do not perform a GC but instead return a retry after GC
873 // failure.
874 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
875 const ExternalReference& ext, int num_arguments, int result_size);
876
Steve Block6ded16b2010-05-10 14:33:55 +0100877 // Convenience function: tail call a runtime routine (jump).
878 void TailCallRuntime(Runtime::FunctionId fid,
Andrei Popescu31002712010-02-23 13:46:05 +0000879 int num_arguments,
880 int result_size);
881
Steve Block44f0eee2011-05-26 01:26:41 +0100882 // Before calling a C-function from generated code, align arguments on stack
883 // and add space for the four mips argument slots.
884 // After aligning the frame, non-register arguments must be stored on the
885 // stack, after the argument-slots using helper: CFunctionArgumentOperand().
886 // The argument count assumes all arguments are word sized.
887 // Some compilers/platforms require the stack to be aligned when calling
888 // C++ code.
889 // Needs a scratch register to do some arithmetic. This register will be
890 // trashed.
891 void PrepareCallCFunction(int num_arguments, Register scratch);
892
893 // Arguments 1-4 are placed in registers a0 thru a3 respectively.
894 // Arguments 5..n are stored to stack using following:
895 // sw(t0, CFunctionArgumentOperand(5));
896
897 // Calls a C function and cleans up the space for arguments allocated
898 // by PrepareCallCFunction. The called function is not allowed to trigger a
899 // garbage collection, since that might move the code and invalidate the
900 // return address (unless this is somehow accounted for by the called
901 // function).
902 void CallCFunction(ExternalReference function, int num_arguments);
903 void CallCFunction(Register function, Register scratch, int num_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +0000904 void GetCFunctionDoubleResult(const DoubleRegister dst);
905
906 // There are two ways of passing double arguments on MIPS, depending on
907 // whether soft or hard floating point ABI is used. These functions
908 // abstract parameter passing for the three different ways we call
909 // C functions from generated code.
910 void SetCallCDoubleArguments(DoubleRegister dreg);
911 void SetCallCDoubleArguments(DoubleRegister dreg1, DoubleRegister dreg2);
912 void SetCallCDoubleArguments(DoubleRegister dreg, Register reg);
913
914 // Calls an API function. Allocates HandleScope, extracts returned value
915 // from handle and propagates exceptions. Restores context.
916 MaybeObject* TryCallApiFunctionAndReturn(ExternalReference function,
917 int stack_space);
Steve Block44f0eee2011-05-26 01:26:41 +0100918
Andrei Popescu31002712010-02-23 13:46:05 +0000919 // Jump to the builtin routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100920 void JumpToExternalReference(const ExternalReference& builtin);
Andrei Popescu31002712010-02-23 13:46:05 +0000921
Ben Murdoch257744e2011-11-30 15:57:28 +0000922 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
923
Andrei Popescu31002712010-02-23 13:46:05 +0000924 // Invoke specified builtin JavaScript function. Adds an entry to
925 // the unresolved list if the name does not resolve.
Steve Block44f0eee2011-05-26 01:26:41 +0100926 void InvokeBuiltin(Builtins::JavaScript id,
Ben Murdoch257744e2011-11-30 15:57:28 +0000927 InvokeFlag flag,
928 const CallWrapper& call_wrapper = NullCallWrapper());
Andrei Popescu31002712010-02-23 13:46:05 +0000929
930 // Store the code object for the given builtin in the target register and
Steve Block44f0eee2011-05-26 01:26:41 +0100931 // setup the function in a1.
Andrei Popescu31002712010-02-23 13:46:05 +0000932 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
933
Steve Block44f0eee2011-05-26 01:26:41 +0100934 // Store the function for the given builtin in the target register.
935 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
936
Andrei Popescu31002712010-02-23 13:46:05 +0000937 struct Unresolved {
938 int pc;
Ben Murdoch257744e2011-11-30 15:57:28 +0000939 uint32_t flags; // See Bootstrapper::FixupFlags decoders/encoders.
Andrei Popescu31002712010-02-23 13:46:05 +0000940 const char* name;
941 };
Andrei Popescu31002712010-02-23 13:46:05 +0000942
Ben Murdoch257744e2011-11-30 15:57:28 +0000943 Handle<Object> CodeObject() {
944 ASSERT(!code_object_.is_null());
945 return code_object_;
946 }
Andrei Popescu31002712010-02-23 13:46:05 +0000947
Steve Block44f0eee2011-05-26 01:26:41 +0100948 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000949 // StatsCounter support.
Andrei Popescu31002712010-02-23 13:46:05 +0000950
951 void SetCounter(StatsCounter* counter, int value,
952 Register scratch1, Register scratch2);
953 void IncrementCounter(StatsCounter* counter, int value,
954 Register scratch1, Register scratch2);
955 void DecrementCounter(StatsCounter* counter, int value,
956 Register scratch1, Register scratch2);
957
958
Steve Block44f0eee2011-05-26 01:26:41 +0100959 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000960 // Debugging.
Andrei Popescu31002712010-02-23 13:46:05 +0000961
962 // Calls Abort(msg) if the condition cc is not satisfied.
963 // Use --debug_code to enable.
964 void Assert(Condition cc, const char* msg, Register rs, Operand rt);
Steve Block44f0eee2011-05-26 01:26:41 +0100965 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
966 void AssertFastElements(Register elements);
Andrei Popescu31002712010-02-23 13:46:05 +0000967
968 // Like Assert(), but always enabled.
969 void Check(Condition cc, const char* msg, Register rs, Operand rt);
970
971 // Print a message to stdout and abort execution.
972 void Abort(const char* msg);
973
974 // Verify restrictions about code generated in stubs.
975 void set_generating_stub(bool value) { generating_stub_ = value; }
976 bool generating_stub() { return generating_stub_; }
977 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
978 bool allow_stub_calls() { return allow_stub_calls_; }
979
Steve Block44f0eee2011-05-26 01:26:41 +0100980 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000981 // Number utilities.
Steve Block6ded16b2010-05-10 14:33:55 +0100982
Steve Block44f0eee2011-05-26 01:26:41 +0100983 // Check whether the value of reg is a power of two and not zero. If not
984 // control continues at the label not_power_of_two. If reg is a power of two
985 // the register scratch contains the value of (reg - 1) when control falls
986 // through.
987 void JumpIfNotPowerOfTwoOrZero(Register reg,
988 Register scratch,
989 Label* not_power_of_two_or_zero);
990
991 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000992 // Smi utilities.
Steve Block44f0eee2011-05-26 01:26:41 +0100993
994 // Try to convert int32 to smi. If the value is to large, preserve
995 // the original value and jump to not_a_smi. Destroys scratch and
996 // sets flags.
997 // This is only used by crankshaft atm so it is unimplemented on MIPS.
998 void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
999 UNIMPLEMENTED_MIPS();
1000 }
1001
1002 void SmiTag(Register reg) {
1003 Addu(reg, reg, reg);
1004 }
1005
1006 void SmiTag(Register dst, Register src) {
1007 Addu(dst, src, src);
1008 }
1009
1010 void SmiUntag(Register reg) {
1011 sra(reg, reg, kSmiTagSize);
1012 }
1013
1014 void SmiUntag(Register dst, Register src) {
1015 sra(dst, src, kSmiTagSize);
1016 }
1017
1018 // Jump the register contains a smi.
1019 inline void JumpIfSmi(Register value, Label* smi_label,
1020 Register scratch = at) {
1021 ASSERT_EQ(0, kSmiTag);
1022 andi(scratch, value, kSmiTagMask);
1023 Branch(smi_label, eq, scratch, Operand(zero_reg));
1024 }
1025
1026 // Jump if the register contains a non-smi.
1027 inline void JumpIfNotSmi(Register value, Label* not_smi_label,
1028 Register scratch = at) {
1029 ASSERT_EQ(0, kSmiTag);
1030 andi(scratch, value, kSmiTagMask);
1031 Branch(not_smi_label, ne, scratch, Operand(zero_reg));
1032 }
1033
1034 // Jump if either of the registers contain a non-smi.
1035 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
1036 // Jump if either of the registers contain a smi.
1037 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
1038
1039 // Abort execution if argument is a smi. Used in debug code.
1040 void AbortIfSmi(Register object);
1041 void AbortIfNotSmi(Register object);
1042
Ben Murdoch257744e2011-11-30 15:57:28 +00001043 // Abort execution if argument is a string. Used in debug code.
1044 void AbortIfNotString(Register object);
1045
Steve Block44f0eee2011-05-26 01:26:41 +01001046 // Abort execution if argument is not the root value with the given index.
1047 void AbortIfNotRootValue(Register src,
1048 Heap::RootListIndex root_value_index,
1049 const char* message);
1050
1051 // ---------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00001052 // HeapNumber utilities.
Steve Block44f0eee2011-05-26 01:26:41 +01001053
1054 void JumpIfNotHeapNumber(Register object,
1055 Register heap_number_map,
1056 Register scratch,
1057 Label* on_not_heap_number);
1058
1059 // -------------------------------------------------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00001060 // String utilities.
Steve Block44f0eee2011-05-26 01:26:41 +01001061
1062 // Checks if both instance types are sequential ASCII strings and jumps to
1063 // label if either is not.
1064 void JumpIfBothInstanceTypesAreNotSequentialAscii(
1065 Register first_object_instance_type,
1066 Register second_object_instance_type,
1067 Register scratch1,
1068 Register scratch2,
1069 Label* failure);
1070
1071 // Check if instance type is sequential ASCII string and jump to label if
1072 // it is not.
1073 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1074 Register scratch,
1075 Label* failure);
1076
1077 // Test that both first and second are sequential ASCII strings.
1078 // Assume that they are non-smis.
1079 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register first,
1080 Register second,
1081 Register scratch1,
1082 Register scratch2,
1083 Label* failure);
1084
1085 // Test that both first and second are sequential ASCII strings.
1086 // Check that they are non-smis.
1087 void JumpIfNotBothSequentialAsciiStrings(Register first,
1088 Register second,
1089 Register scratch1,
1090 Register scratch2,
1091 Label* failure);
1092
Ben Murdoch257744e2011-11-30 15:57:28 +00001093 void LoadInstanceDescriptors(Register map, Register descriptors);
1094
Steve Block44f0eee2011-05-26 01:26:41 +01001095 private:
1096 void CallCFunctionHelper(Register function,
1097 ExternalReference function_reference,
1098 Register scratch,
1099 int num_arguments);
1100
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001101 void BranchShort(int16_t offset, BranchDelaySlot bdslot = PROTECT);
1102 void BranchShort(int16_t offset, Condition cond, Register rs,
1103 const Operand& rt,
1104 BranchDelaySlot bdslot = PROTECT);
1105 void BranchShort(Label* L, BranchDelaySlot bdslot = PROTECT);
1106 void BranchShort(Label* L, Condition cond, Register rs,
1107 const Operand& rt,
1108 BranchDelaySlot bdslot = PROTECT);
1109 void BranchAndLinkShort(int16_t offset, BranchDelaySlot bdslot = PROTECT);
1110 void BranchAndLinkShort(int16_t offset, Condition cond, Register rs,
1111 const Operand& rt,
1112 BranchDelaySlot bdslot = PROTECT);
1113 void BranchAndLinkShort(Label* L, BranchDelaySlot bdslot = PROTECT);
1114 void BranchAndLinkShort(Label* L, Condition cond, Register rs,
1115 const Operand& rt,
1116 BranchDelaySlot bdslot = PROTECT);
1117 void J(Label* L, BranchDelaySlot bdslot);
1118 void Jr(Label* L, BranchDelaySlot bdslot);
1119 void Jalr(Label* L, BranchDelaySlot bdslot);
Steve Block6ded16b2010-05-10 14:33:55 +01001120
1121 // Helper functions for generating invokes.
1122 void InvokePrologue(const ParameterCount& expected,
1123 const ParameterCount& actual,
1124 Handle<Code> code_constant,
1125 Register code_reg,
1126 Label* done,
Steve Block44f0eee2011-05-26 01:26:41 +01001127 InvokeFlag flag,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001128 const CallWrapper& call_wrapper,
1129 CallKind call_kind);
Steve Block6ded16b2010-05-10 14:33:55 +01001130
1131 // Get the code for the given builtin. Returns if able to resolve
1132 // the function in the 'resolved' flag.
1133 Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
1134
1135 // Activation support.
Steve Block6ded16b2010-05-10 14:33:55 +01001136 void EnterFrame(StackFrame::Type type);
1137 void LeaveFrame(StackFrame::Type type);
Steve Block44f0eee2011-05-26 01:26:41 +01001138
1139 void InitializeNewString(Register string,
1140 Register length,
1141 Heap::RootListIndex map_index,
1142 Register scratch1,
1143 Register scratch2);
1144
Ben Murdoch257744e2011-11-30 15:57:28 +00001145 // Compute memory operands for safepoint stack slots.
1146 static int SafepointRegisterStackIndex(int reg_code);
1147 MemOperand SafepointRegisterSlot(Register reg);
1148 MemOperand SafepointRegistersAndDoublesSlot(Register reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001149
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001150 bool UseAbsoluteCodePointers();
1151
Steve Block44f0eee2011-05-26 01:26:41 +01001152 bool generating_stub_;
1153 bool allow_stub_calls_;
1154 // This handle will be patched with the code object on installation.
1155 Handle<Object> code_object_;
Ben Murdoch257744e2011-11-30 15:57:28 +00001156
1157 // Needs access to SafepointRegisterStackIndex for optimized frame
1158 // traversal.
1159 friend class OptimizedFrame;
Steve Block44f0eee2011-05-26 01:26:41 +01001160};
1161
1162
Steve Block44f0eee2011-05-26 01:26:41 +01001163// The code patcher is used to patch (typically) small parts of code e.g. for
1164// debugging and other types of instrumentation. When using the code patcher
1165// the exact number of bytes specified must be emitted. It is not legal to emit
1166// relocation information. If any of these constraints are violated it causes
1167// an assertion to fail.
1168class CodePatcher {
1169 public:
1170 CodePatcher(byte* address, int instructions);
1171 virtual ~CodePatcher();
1172
1173 // Macro assembler to emit code.
1174 MacroAssembler* masm() { return &masm_; }
1175
1176 // Emit an instruction directly.
Ben Murdoch257744e2011-11-30 15:57:28 +00001177 void Emit(Instr instr);
Steve Block44f0eee2011-05-26 01:26:41 +01001178
1179 // Emit an address directly.
1180 void Emit(Address addr);
1181
Ben Murdoch257744e2011-11-30 15:57:28 +00001182 // Change the condition part of an instruction leaving the rest of the current
1183 // instruction unchanged.
1184 void ChangeBranchCondition(Condition cond);
1185
Steve Block44f0eee2011-05-26 01:26:41 +01001186 private:
1187 byte* address_; // The address of the code being patched.
1188 int instructions_; // Number of instructions of the expected patch size.
1189 int size_; // Number of bytes of the expected patch size.
1190 MacroAssembler masm_; // Macro assembler used to generate the code.
1191};
Andrei Popescu31002712010-02-23 13:46:05 +00001192
1193
1194// -----------------------------------------------------------------------------
1195// Static helper functions.
1196
Steve Block44f0eee2011-05-26 01:26:41 +01001197static MemOperand ContextOperand(Register context, int index) {
1198 return MemOperand(context, Context::SlotOffset(index));
1199}
1200
1201
1202static inline MemOperand GlobalObjectOperand() {
1203 return ContextOperand(cp, Context::GLOBAL_INDEX);
1204}
1205
1206
Andrei Popescu31002712010-02-23 13:46:05 +00001207// Generate a MemOperand for loading a field from an object.
1208static inline MemOperand FieldMemOperand(Register object, int offset) {
1209 return MemOperand(object, offset - kHeapObjectTag);
1210}
1211
1212
Ben Murdoch257744e2011-11-30 15:57:28 +00001213// Generate a MemOperand for storing arguments 5..N on the stack
1214// when calling CallCFunction().
1215static inline MemOperand CFunctionArgumentOperand(int index) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001216 ASSERT(index > kCArgSlotCount);
Ben Murdoch257744e2011-11-30 15:57:28 +00001217 // Argument 5 takes the slot just past the four Arg-slots.
Ben Murdoch589d6972011-11-30 16:04:58 +00001218 int offset = (index - 5) * kPointerSize + kCArgsSlotsSize;
Ben Murdoch257744e2011-11-30 15:57:28 +00001219 return MemOperand(sp, offset);
1220}
1221
Andrei Popescu31002712010-02-23 13:46:05 +00001222
1223#ifdef GENERATED_CODE_COVERAGE
1224#define CODE_COVERAGE_STRINGIFY(x) #x
1225#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1226#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1227#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
1228#else
1229#define ACCESS_MASM(masm) masm->
1230#endif
1231
1232} } // namespace v8::internal
1233
1234#endif // V8_MIPS_MACRO_ASSEMBLER_MIPS_H_