blob: aaf4458ede865cfa682800bc8de0fb2bb00eb1ad [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Ben Murdochb8e0da22011-05-16 14:20:40 +010036// Forward declaration.
37class PostCallGenerator;
38
Andrei Popescu31002712010-02-23 13:46:05 +000039// ----------------------------------------------------------------------------
40// Static helper functions
41
42// Generate a MemOperand for loading a field from an object.
43static inline MemOperand FieldMemOperand(Register object, int offset) {
44 return MemOperand(object, offset - kHeapObjectTag);
45}
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047
Steve Block1e0659c2011-05-24 12:43:12 +010048static inline Operand SmiUntagOperand(Register object) {
49 return Operand(object, ASR, kSmiTagSize);
50}
51
52
53
Steve Blocka7e24c12009-10-30 11:49:00 +000054// Give alias names to registers
55const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000056const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000057
58enum InvokeJSFlags {
59 CALL_JS,
60 JUMP_JS
61};
62
63
Kristian Monsen25f61362010-05-21 11:50:48 +010064// Flags used for the AllocateInNewSpace functions.
65enum AllocationFlags {
66 // No special flags.
67 NO_ALLOCATION_FLAGS = 0,
68 // Return the pointer to the allocated already tagged as a heap object.
69 TAG_OBJECT = 1 << 0,
70 // The content of the result register already contains the allocation top in
71 // new space.
72 RESULT_CONTAINS_TOP = 1 << 1,
73 // Specify that the requested size of the space to allocate is specified in
74 // words instead of bytes.
75 SIZE_IN_WORDS = 1 << 2
76};
77
78
Steve Block8defd9f2010-07-08 12:39:36 +010079// Flags used for the ObjectToDoubleVFPRegister function.
80enum ObjectToDoubleFlags {
81 // No special flags.
82 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
83 // Object is known to be a non smi.
84 OBJECT_NOT_SMI = 1 << 0,
85 // Don't load NaNs or infinities, branch to the non number case instead.
86 AVOID_NANS_AND_INFINITIES = 1 << 1
87};
88
89
Steve Blocka7e24c12009-10-30 11:49:00 +000090// MacroAssembler implements a collection of frequently used macros.
91class MacroAssembler: public Assembler {
92 public:
93 MacroAssembler(void* buffer, int size);
94
Andrei Popescu31002712010-02-23 13:46:05 +000095 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000096 void Jump(Register target, Condition cond = al);
97 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
98 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
99 void Call(Register target, Condition cond = al);
100 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
101 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
102 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +0000103
104 // Emit code to discard a non-negative number of pointer-sized elements
105 // from the stack, clobbering only the sp register.
106 void Drop(int count, Condition cond = al);
107
Ben Murdochb0fe1622011-05-05 13:52:32 +0100108 void Ret(int drop, Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100109
110 // Swap two registers. If the scratch register is omitted then a slightly
111 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100112 void Swap(Register reg1,
113 Register reg2,
114 Register scratch = no_reg,
115 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100116
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100117
118 void And(Register dst, Register src1, const Operand& src2,
119 Condition cond = al);
120 void Ubfx(Register dst, Register src, int lsb, int width,
121 Condition cond = al);
122 void Sbfx(Register dst, Register src, int lsb, int width,
123 Condition cond = al);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100124 // The scratch register is not used for ARMv7.
125 // scratch can be the same register as src (in which case it is trashed), but
126 // not the same as dst.
127 void Bfi(Register dst,
128 Register src,
129 Register scratch,
130 int lsb,
131 int width,
132 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133 void Bfc(Register dst, int lsb, int width, Condition cond = al);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100134 void Usat(Register dst, int satpos, const Operand& src,
135 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100136
Leon Clarkee46be812010-01-19 14:06:41 +0000137 void Call(Label* target);
138 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +0100139 // May do nothing if the registers are identical.
140 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000141 // Jumps to the label at the index given by the Smi in "index".
142 void SmiJumpTable(Register index, Vector<Label*> targets);
143 // Load an object from the root table.
144 void LoadRoot(Register destination,
145 Heap::RootListIndex index,
146 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100147 // Store an object to the root table.
148 void StoreRoot(Register source,
149 Heap::RootListIndex index,
150 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000151
Steve Block6ded16b2010-05-10 14:33:55 +0100152
153 // Check if object is in new space.
154 // scratch can be object itself, but it will be clobbered.
155 void InNewSpace(Register object,
156 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100157 Condition cond, // eq for new space, ne otherwise
Steve Block6ded16b2010-05-10 14:33:55 +0100158 Label* branch);
159
160
Steve Block8defd9f2010-07-08 12:39:36 +0100161 // For the page containing |object| mark the region covering [address]
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100162 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100163 void RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100164 Register address,
165 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100166
Steve Block8defd9f2010-07-08 12:39:36 +0100167 // For the page containing |object| mark the region covering
168 // [object+offset] dirty. The object address must be in the first 8K
169 // of an allocated page. The 'scratch' registers are used in the
170 // implementation and all 3 registers are clobbered by the
171 // operation, as well as the ip register. RecordWrite updates the
172 // write barrier even when storing smis.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100173 void RecordWrite(Register object,
174 Operand offset,
175 Register scratch0,
176 Register scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000177
Steve Block8defd9f2010-07-08 12:39:36 +0100178 // For the page containing |object| mark the region covering
179 // [address] dirty. The object address must be in the first 8K of an
180 // allocated page. All 3 registers are clobbered by the operation,
181 // as well as the ip register. RecordWrite updates the write barrier
182 // even when storing smis.
183 void RecordWrite(Register object,
184 Register address,
185 Register scratch);
186
Steve Block6ded16b2010-05-10 14:33:55 +0100187 // Push two registers. Pushes leftmost register first (to highest address).
188 void Push(Register src1, Register src2, Condition cond = al) {
189 ASSERT(!src1.is(src2));
190 if (src1.code() > src2.code()) {
191 stm(db_w, sp, src1.bit() | src2.bit(), cond);
192 } else {
193 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
194 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
195 }
196 }
197
198 // Push three registers. Pushes leftmost register first (to highest address).
199 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
200 ASSERT(!src1.is(src2));
201 ASSERT(!src2.is(src3));
202 ASSERT(!src1.is(src3));
203 if (src1.code() > src2.code()) {
204 if (src2.code() > src3.code()) {
205 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
206 } else {
207 stm(db_w, sp, src1.bit() | src2.bit(), cond);
208 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
209 }
210 } else {
211 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
212 Push(src2, src3, cond);
213 }
214 }
215
216 // Push four registers. Pushes leftmost register first (to highest address).
217 void Push(Register src1, Register src2,
218 Register src3, Register src4, Condition cond = al) {
219 ASSERT(!src1.is(src2));
220 ASSERT(!src2.is(src3));
221 ASSERT(!src1.is(src3));
222 ASSERT(!src1.is(src4));
223 ASSERT(!src2.is(src4));
224 ASSERT(!src3.is(src4));
225 if (src1.code() > src2.code()) {
226 if (src2.code() > src3.code()) {
227 if (src3.code() > src4.code()) {
228 stm(db_w,
229 sp,
230 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
231 cond);
232 } else {
233 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
234 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
235 }
236 } else {
237 stm(db_w, sp, src1.bit() | src2.bit(), cond);
238 Push(src3, src4, cond);
239 }
240 } else {
241 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
242 Push(src2, src3, src4, cond);
243 }
244 }
245
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100246 // Pop two registers. Pops rightmost register first (from lower address).
247 void Pop(Register src1, Register src2, Condition cond = al) {
248 ASSERT(!src1.is(src2));
249 if (src1.code() > src2.code()) {
250 ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
251 } else {
252 ldr(src2, MemOperand(sp, 4, PostIndex), cond);
253 ldr(src1, MemOperand(sp, 4, PostIndex), cond);
254 }
255 }
256
Ben Murdochb0fe1622011-05-05 13:52:32 +0100257 // Push and pop the registers that can hold pointers, as defined by the
258 // RegList constant kSafepointSavedRegisters.
259 void PushSafepointRegisters();
260 void PopSafepointRegisters();
Ben Murdochb8e0da22011-05-16 14:20:40 +0100261 void PushSafepointRegistersAndDoubles();
262 void PopSafepointRegistersAndDoubles();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100263 // Store value in register src in the safepoint stack slot for
264 // register dst.
265 void StoreToSafepointRegisterSlot(Register src, Register dst);
266 void StoreToSafepointRegistersAndDoublesSlot(Register src, Register dst);
267 // Load the value of the src register from its safepoint stack slot
268 // into register dst.
269 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100270
Leon Clarkef7060e22010-06-03 12:02:55 +0100271 // Load two consecutive registers with two consecutive memory locations.
272 void Ldrd(Register dst1,
273 Register dst2,
274 const MemOperand& src,
275 Condition cond = al);
276
277 // Store two consecutive registers to two consecutive memory locations.
278 void Strd(Register src1,
279 Register src2,
280 const MemOperand& dst,
281 Condition cond = al);
282
Ben Murdochb8e0da22011-05-16 14:20:40 +0100283 // Clear specified FPSCR bits.
284 void ClearFPSCRBits(const uint32_t bits_to_clear,
285 const Register scratch,
286 const Condition cond = al);
287
288 // Compare double values and move the result to the normal condition flags.
289 void VFPCompareAndSetFlags(const DwVfpRegister src1,
290 const DwVfpRegister src2,
291 const Condition cond = al);
292 void VFPCompareAndSetFlags(const DwVfpRegister src1,
293 const double src2,
294 const Condition cond = al);
295
296 // Compare double values and then load the fpscr flags to a register.
297 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
298 const DwVfpRegister src2,
299 const Register fpscr_flags,
300 const Condition cond = al);
301 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
302 const double src2,
303 const Register fpscr_flags,
304 const Condition cond = al);
305
Ben Murdoch086aeea2011-05-13 15:57:08 +0100306
Steve Blocka7e24c12009-10-30 11:49:00 +0000307 // ---------------------------------------------------------------------------
308 // Activation frames
309
310 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
311 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
312
313 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
314 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
315
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100316 // Enter exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +0100317 // stack_space - extra stack space, used for alignment before call to C.
318 void EnterExitFrame(bool save_doubles, int stack_space = 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000319
320 // Leave the current exit frame. Expects the return value in r0.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100321 // Expect the number of values, pushed prior to the exit frame, to
322 // remove in a register (or no_reg, if there is nothing to remove).
323 void LeaveExitFrame(bool save_doubles, Register argument_count);
Steve Blocka7e24c12009-10-30 11:49:00 +0000324
Steve Block6ded16b2010-05-10 14:33:55 +0100325 // Get the actual activation frame alignment for target environment.
326 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000327
Steve Blockd0582a62009-12-15 09:54:21 +0000328 void LoadContext(Register dst, int context_chain_length);
329
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800330 void LoadGlobalFunction(int index, Register function);
331
332 // Load the initial map from the global function. The registers
333 // function and map can be the same, function is then overwritten.
334 void LoadGlobalFunctionInitialMap(Register function,
335 Register map,
336 Register scratch);
337
Steve Blocka7e24c12009-10-30 11:49:00 +0000338 // ---------------------------------------------------------------------------
339 // JavaScript invokes
340
341 // Invoke the JavaScript function code by either calling or jumping.
342 void InvokeCode(Register code,
343 const ParameterCount& expected,
344 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100345 InvokeFlag flag,
346 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000347
348 void InvokeCode(Handle<Code> code,
349 const ParameterCount& expected,
350 const ParameterCount& actual,
351 RelocInfo::Mode rmode,
352 InvokeFlag flag);
353
354 // Invoke the JavaScript function in the given register. Changes the
355 // current context to the context in the function before invoking.
356 void InvokeFunction(Register function,
357 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100358 InvokeFlag flag,
359 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000360
Andrei Popescu402d9372010-02-26 13:31:12 +0000361 void InvokeFunction(JSFunction* function,
362 const ParameterCount& actual,
363 InvokeFlag flag);
364
Ben Murdochb0fe1622011-05-05 13:52:32 +0100365 void IsObjectJSObjectType(Register heap_object,
366 Register map,
367 Register scratch,
368 Label* fail);
369
370 void IsInstanceJSObjectType(Register map,
371 Register scratch,
372 Label* fail);
373
374 void IsObjectJSStringType(Register object,
375 Register scratch,
376 Label* fail);
Steve Blocka7e24c12009-10-30 11:49:00 +0000377
378#ifdef ENABLE_DEBUGGER_SUPPORT
379 // ---------------------------------------------------------------------------
380 // Debugger Support
381
Andrei Popescu402d9372010-02-26 13:31:12 +0000382 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000383#endif
384
385 // ---------------------------------------------------------------------------
386 // Exception handling
387
388 // Push a new try handler and link into try handler chain.
389 // The return address must be passed in register lr.
390 // On exit, r0 contains TOS (code slot).
391 void PushTryHandler(CodeLocation try_location, HandlerType type);
392
Leon Clarkee46be812010-01-19 14:06:41 +0000393 // Unlink the stack handler on top of the stack from the try handler chain.
394 // Must preserve the result register.
395 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000396
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100397 // Passes thrown value (in r0) to the handler of top of the try handler chain.
398 void Throw(Register value);
399
400 // Propagates an uncatchable exception to the top of the current JS stack's
401 // handler chain.
402 void ThrowUncatchable(UncatchableExceptionType type, Register value);
403
Steve Blocka7e24c12009-10-30 11:49:00 +0000404 // ---------------------------------------------------------------------------
405 // Inline caching support
406
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 // Generate code for checking access rights - used for security checks
408 // on access to global objects across environments. The holder register
409 // is left untouched, whereas both scratch registers are clobbered.
410 void CheckAccessGlobalProxy(Register holder_reg,
411 Register scratch,
412 Label* miss);
413
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800414 inline void MarkCode(NopMarkerTypes type) {
415 nop(type);
416 }
417
418 // Check if the given instruction is a 'type' marker.
419 // ie. check if is is a mov r<type>, r<type> (referenced as nop(type))
420 // These instructions are generated to mark special location in the code,
421 // like some special IC code.
422 static inline bool IsMarkedCode(Instr instr, int type) {
423 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
424 return IsNop(instr, type);
425 }
426
427
428 static inline int GetCodeMarker(Instr instr) {
429 int dst_reg_offset = 12;
430 int dst_mask = 0xf << dst_reg_offset;
431 int src_mask = 0xf;
432 int dst_reg = (instr & dst_mask) >> dst_reg_offset;
433 int src_reg = instr & src_mask;
434 uint32_t non_register_mask = ~(dst_mask | src_mask);
435 uint32_t mov_mask = al | 13 << 21;
436
437 // Return <n> if we have a mov rn rn, else return -1.
438 int type = ((instr & non_register_mask) == mov_mask) &&
439 (dst_reg == src_reg) &&
440 (FIRST_IC_MARKER <= dst_reg) && (dst_reg < LAST_CODE_MARKER)
441 ? src_reg
442 : -1;
443 ASSERT((type == -1) ||
444 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
445 return type;
446 }
447
Steve Blocka7e24c12009-10-30 11:49:00 +0000448
449 // ---------------------------------------------------------------------------
450 // Allocation support
451
Ben Murdoch086aeea2011-05-13 15:57:08 +0100452 // Allocate an object in new space. The object_size is specified
453 // either in bytes or in words if the allocation flag SIZE_IN_WORDS
454 // is passed. If the new space is exhausted control continues at the
455 // gc_required label. The allocated object is returned in result. If
456 // the flag tag_allocated_object is true the result is tagged as as
457 // a heap object. All registers are clobbered also when control
458 // continues at the gc_required label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000459 void AllocateInNewSpace(int object_size,
460 Register result,
461 Register scratch1,
462 Register scratch2,
463 Label* gc_required,
464 AllocationFlags flags);
465 void AllocateInNewSpace(Register object_size,
466 Register result,
467 Register scratch1,
468 Register scratch2,
469 Label* gc_required,
470 AllocationFlags flags);
471
472 // Undo allocation in new space. The object passed and objects allocated after
473 // it will no longer be allocated. The caller must make sure that no pointers
474 // are left to the object(s) no longer allocated as they would be invalid when
475 // allocation is undone.
476 void UndoAllocationInNewSpace(Register object, Register scratch);
477
Andrei Popescu31002712010-02-23 13:46:05 +0000478
479 void AllocateTwoByteString(Register result,
480 Register length,
481 Register scratch1,
482 Register scratch2,
483 Register scratch3,
484 Label* gc_required);
485 void AllocateAsciiString(Register result,
486 Register length,
487 Register scratch1,
488 Register scratch2,
489 Register scratch3,
490 Label* gc_required);
491 void AllocateTwoByteConsString(Register result,
492 Register length,
493 Register scratch1,
494 Register scratch2,
495 Label* gc_required);
496 void AllocateAsciiConsString(Register result,
497 Register length,
498 Register scratch1,
499 Register scratch2,
500 Label* gc_required);
501
Kristian Monsen25f61362010-05-21 11:50:48 +0100502 // Allocates a heap number or jumps to the gc_required label if the young
503 // space is full and a scavenge is needed. All registers are clobbered also
504 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100505 void AllocateHeapNumber(Register result,
506 Register scratch1,
507 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100508 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100509 Label* gc_required);
Steve Block8defd9f2010-07-08 12:39:36 +0100510 void AllocateHeapNumberWithValue(Register result,
511 DwVfpRegister value,
512 Register scratch1,
513 Register scratch2,
514 Register heap_number_map,
515 Label* gc_required);
516
Ben Murdochbb769b22010-08-11 14:56:33 +0100517 // Copies a fixed number of fields of heap objects from src to dst.
518 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000519
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100520 // Copies a number of bytes from src to dst. All registers are clobbered. On
521 // exit src and dst will point to the place just after where the last byte was
522 // read or written and length will be zero.
523 void CopyBytes(Register src,
524 Register dst,
525 Register length,
526 Register scratch);
527
Steve Blocka7e24c12009-10-30 11:49:00 +0000528 // ---------------------------------------------------------------------------
529 // Support functions.
530
531 // Try to get function prototype of a function and puts the value in
532 // the result register. Checks that the function really is a
533 // function and jumps to the miss label if the fast checks fail. The
534 // function register will be untouched; the other registers may be
535 // clobbered.
536 void TryGetFunctionPrototype(Register function,
537 Register result,
538 Register scratch,
539 Label* miss);
540
541 // Compare object type for heap object. heap_object contains a non-Smi
542 // whose object type should be compared with the given type. This both
543 // sets the flags and leaves the object type in the type_reg register.
544 // It leaves the map in the map register (unless the type_reg and map register
545 // are the same register). It leaves the heap object in the heap_object
546 // register unless the heap_object register is the same register as one of the
547 // other registers.
548 void CompareObjectType(Register heap_object,
549 Register map,
550 Register type_reg,
551 InstanceType type);
552
553 // Compare instance type in a map. map contains a valid map object whose
554 // object type should be compared with the given type. This both
555 // sets the flags and leaves the object type in the type_reg register. It
556 // leaves the heap object in the heap_object register unless the heap_object
557 // register is the same register as type_reg.
558 void CompareInstanceType(Register map,
559 Register type_reg,
560 InstanceType type);
561
Andrei Popescu31002712010-02-23 13:46:05 +0000562
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100563 // Check if the map of an object is equal to a specified map (either
564 // given directly or as an index into the root list) and branch to
565 // label if not. Skip the smi check if not required (object is known
566 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000567 void CheckMap(Register obj,
568 Register scratch,
569 Handle<Map> map,
570 Label* fail,
571 bool is_heap_object);
572
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100573 void CheckMap(Register obj,
574 Register scratch,
575 Heap::RootListIndex index,
576 Label* fail,
577 bool is_heap_object);
578
579
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100580 // Compare the object in a register to a value from the root list.
581 // Uses the ip register as scratch.
582 void CompareRoot(Register obj, Heap::RootListIndex index);
583
584
Andrei Popescu31002712010-02-23 13:46:05 +0000585 // Load and check the instance type of an object for being a string.
586 // Loads the type into the second argument register.
587 // Returns a condition that will be enabled if the object was a string.
588 Condition IsObjectStringType(Register obj,
589 Register type) {
590 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
591 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
592 tst(type, Operand(kIsNotStringMask));
593 ASSERT_EQ(0, kStringTag);
594 return eq;
595 }
596
597
Steve Blocka7e24c12009-10-30 11:49:00 +0000598 // Generates code for reporting that an illegal operation has
599 // occurred.
600 void IllegalOperation(int num_arguments);
601
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100602 // Picks out an array index from the hash field.
603 // Register use:
604 // hash - holds the index's hash. Clobbered.
605 // index - holds the overwritten index on exit.
606 void IndexFromHash(Register hash, Register index);
607
Andrei Popescu31002712010-02-23 13:46:05 +0000608 // Get the number of least significant bits from a register
609 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
Steve Block1e0659c2011-05-24 12:43:12 +0100610 void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +0000611
Steve Blockd0582a62009-12-15 09:54:21 +0000612 // Uses VFP instructions to Convert a Smi to a double.
613 void IntegerToDoubleConversionWithVFP3(Register inReg,
614 Register outHighReg,
615 Register outLowReg);
616
Steve Block8defd9f2010-07-08 12:39:36 +0100617 // Load the value of a number object into a VFP double register. If the object
618 // is not a number a jump to the label not_number is performed and the VFP
619 // double register is unchanged.
620 void ObjectToDoubleVFPRegister(
621 Register object,
622 DwVfpRegister value,
623 Register scratch1,
624 Register scratch2,
625 Register heap_number_map,
626 SwVfpRegister scratch3,
627 Label* not_number,
628 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
629
630 // Load the value of a smi object into a VFP double register. The register
631 // scratch1 can be the same register as smi in which case smi will hold the
632 // untagged value afterwards.
633 void SmiToDoubleVFPRegister(Register smi,
634 DwVfpRegister value,
635 Register scratch1,
636 SwVfpRegister scratch2);
637
Iain Merrick9ac36c92010-09-13 15:29:50 +0100638 // Convert the HeapNumber pointed to by source to a 32bits signed integer
639 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
Steve Block1e0659c2011-05-24 12:43:12 +0100640 // to not_int32 label. If VFP3 is available double_scratch is used but not
641 // scratch2.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100642 void ConvertToInt32(Register source,
643 Register dest,
644 Register scratch,
645 Register scratch2,
Steve Block1e0659c2011-05-24 12:43:12 +0100646 DwVfpRegister double_scratch,
Iain Merrick9ac36c92010-09-13 15:29:50 +0100647 Label *not_int32);
648
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100649// Truncates a double using a specific rounding mode.
650// Clears the z flag (ne condition) if an overflow occurs.
651// If exact_conversion is true, the z flag is also cleared if the conversion
652// was inexact, ie. if the double value could not be converted exactly
653// to a 32bit integer.
654 void EmitVFPTruncate(VFPRoundingMode rounding_mode,
655 SwVfpRegister result,
656 DwVfpRegister double_input,
657 Register scratch1,
658 Register scratch2,
659 CheckForInexactConversion check
660 = kDontCheckForInexactConversion);
661
Steve Block6ded16b2010-05-10 14:33:55 +0100662 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
663 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
Steve Block8defd9f2010-07-08 12:39:36 +0100664 // for 0 (31 instead of 32). Source and scratch can be the same in which case
665 // the source is clobbered. Source and zeros can also be the same in which
666 // case scratch should be a different register.
667 void CountLeadingZeros(Register zeros,
668 Register source,
669 Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000670
671 // ---------------------------------------------------------------------------
672 // Runtime calls
673
674 // Call a code stub.
675 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000676
Andrei Popescu31002712010-02-23 13:46:05 +0000677 // Call a code stub.
678 void TailCallStub(CodeStub* stub, Condition cond = al);
679
Steve Block1e0659c2011-05-24 12:43:12 +0100680 // Tail call a code stub (jump) and return the code object called. Try to
681 // generate the code if necessary. Do not perform a GC but instead return
682 // a retry after GC failure.
683 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub,
684 Condition cond = al);
685
Steve Blocka7e24c12009-10-30 11:49:00 +0000686 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000687 void CallRuntime(Runtime::Function* f, int num_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100688 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000689
690 // Convenience function: Same as above, but takes the fid instead.
691 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
692
Andrei Popescu402d9372010-02-26 13:31:12 +0000693 // Convenience function: call an external reference.
694 void CallExternalReference(const ExternalReference& ext,
695 int num_arguments);
696
Steve Blocka7e24c12009-10-30 11:49:00 +0000697 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100698 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100700 void TailCallExternalReference(const ExternalReference& ext,
701 int num_arguments,
702 int result_size);
703
Steve Block1e0659c2011-05-24 12:43:12 +0100704 // Tail call of a runtime routine (jump). Try to generate the code if
705 // necessary. Do not perform a GC but instead return a retry after GC
706 // failure.
707 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
708 const ExternalReference& ext, int num_arguments, int result_size);
709
Steve Block6ded16b2010-05-10 14:33:55 +0100710 // Convenience function: tail call a runtime routine (jump).
711 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000712 int num_arguments,
713 int result_size);
714
Steve Block6ded16b2010-05-10 14:33:55 +0100715 // Before calling a C-function from generated code, align arguments on stack.
716 // After aligning the frame, non-register arguments must be stored in
717 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
718 // are word sized.
719 // Some compilers/platforms require the stack to be aligned when calling
720 // C++ code.
721 // Needs a scratch register to do some arithmetic. This register will be
722 // trashed.
723 void PrepareCallCFunction(int num_arguments, Register scratch);
724
725 // Calls a C function and cleans up the space for arguments allocated
726 // by PrepareCallCFunction. The called function is not allowed to trigger a
727 // garbage collection, since that might move the code and invalidate the
728 // return address (unless this is somehow accounted for by the called
729 // function).
730 void CallCFunction(ExternalReference function, int num_arguments);
731 void CallCFunction(Register function, int num_arguments);
732
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100733 void GetCFunctionDoubleResult(const DoubleRegister dst);
734
Steve Block1e0659c2011-05-24 12:43:12 +0100735 // Calls an API function. Allocates HandleScope, extracts returned value
736 // from handle and propagates exceptions. Restores context.
737 // stack_space - space to be unwound on exit (includes the call js
738 // arguments space and the additional space allocated for the fast call).
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100739 MaybeObject* TryCallApiFunctionAndReturn(ExternalReference function,
Steve Block1e0659c2011-05-24 12:43:12 +0100740 int stack_space);
741
Steve Blocka7e24c12009-10-30 11:49:00 +0000742 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100743 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000744
Steve Block1e0659c2011-05-24 12:43:12 +0100745 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
746
Steve Blocka7e24c12009-10-30 11:49:00 +0000747 // Invoke specified builtin JavaScript function. Adds an entry to
748 // the unresolved list if the name does not resolve.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100749 void InvokeBuiltin(Builtins::JavaScript id,
750 InvokeJSFlags flags,
751 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000752
753 // Store the code object for the given builtin in the target register and
754 // setup the function in r1.
755 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
756
Steve Block791712a2010-08-27 10:21:07 +0100757 // Store the function for the given builtin in the target register.
758 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
759
Steve Blocka7e24c12009-10-30 11:49:00 +0000760 Handle<Object> CodeObject() { return code_object_; }
761
762
763 // ---------------------------------------------------------------------------
764 // StatsCounter support
765
766 void SetCounter(StatsCounter* counter, int value,
767 Register scratch1, Register scratch2);
768 void IncrementCounter(StatsCounter* counter, int value,
769 Register scratch1, Register scratch2);
770 void DecrementCounter(StatsCounter* counter, int value,
771 Register scratch1, Register scratch2);
772
773
774 // ---------------------------------------------------------------------------
775 // Debugging
776
Steve Block1e0659c2011-05-24 12:43:12 +0100777 // Calls Abort(msg) if the condition cond is not satisfied.
Steve Blocka7e24c12009-10-30 11:49:00 +0000778 // Use --debug_code to enable.
Steve Block1e0659c2011-05-24 12:43:12 +0100779 void Assert(Condition cond, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100780 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Iain Merrick75681382010-08-19 15:07:18 +0100781 void AssertFastElements(Register elements);
Steve Blocka7e24c12009-10-30 11:49:00 +0000782
783 // Like Assert(), but always enabled.
Steve Block1e0659c2011-05-24 12:43:12 +0100784 void Check(Condition cond, const char* msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000785
786 // Print a message to stdout and abort execution.
787 void Abort(const char* msg);
788
789 // Verify restrictions about code generated in stubs.
790 void set_generating_stub(bool value) { generating_stub_ = value; }
791 bool generating_stub() { return generating_stub_; }
792 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
793 bool allow_stub_calls() { return allow_stub_calls_; }
794
Leon Clarked91b9f72010-01-27 17:25:45 +0000795 // ---------------------------------------------------------------------------
Steve Block1e0659c2011-05-24 12:43:12 +0100796 // Number utilities
797
798 // Check whether the value of reg is a power of two and not zero. If not
799 // control continues at the label not_power_of_two. If reg is a power of two
800 // the register scratch contains the value of (reg - 1) when control falls
801 // through.
802 void JumpIfNotPowerOfTwoOrZero(Register reg,
803 Register scratch,
804 Label* not_power_of_two_or_zero);
805
806 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000807 // Smi utilities
808
Ben Murdochb0fe1622011-05-05 13:52:32 +0100809 void SmiTag(Register reg, SBit s = LeaveCC) {
810 add(reg, reg, Operand(reg), s);
811 }
Steve Block1e0659c2011-05-24 12:43:12 +0100812 void SmiTag(Register dst, Register src, SBit s = LeaveCC) {
813 add(dst, src, Operand(src), s);
814 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100815
Ben Murdochb8e0da22011-05-16 14:20:40 +0100816 // Try to convert int32 to smi. If the value is to large, preserve
817 // the original value and jump to not_a_smi. Destroys scratch and
818 // sets flags.
819 void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
820 mov(scratch, reg);
821 SmiTag(scratch, SetCC);
822 b(vs, not_a_smi);
823 mov(reg, scratch);
824 }
825
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100826 void SmiUntag(Register reg, SBit s = LeaveCC) {
827 mov(reg, Operand(reg, ASR, kSmiTagSize), s);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100828 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100829 void SmiUntag(Register dst, Register src, SBit s = LeaveCC) {
830 mov(dst, Operand(src, ASR, kSmiTagSize), s);
Steve Block1e0659c2011-05-24 12:43:12 +0100831 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100832
Steve Block1e0659c2011-05-24 12:43:12 +0100833 // Jump the register contains a smi.
834 inline void JumpIfSmi(Register value, Label* smi_label) {
835 tst(value, Operand(kSmiTagMask));
836 b(eq, smi_label);
837 }
838 // Jump if either of the registers contain a non-smi.
839 inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
840 tst(value, Operand(kSmiTagMask));
841 b(ne, not_smi_label);
842 }
Andrei Popescu31002712010-02-23 13:46:05 +0000843 // Jump if either of the registers contain a non-smi.
844 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
845 // Jump if either of the registers contain a smi.
846 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
847
Iain Merrick75681382010-08-19 15:07:18 +0100848 // Abort execution if argument is a smi. Used in debug code.
849 void AbortIfSmi(Register object);
Steve Block1e0659c2011-05-24 12:43:12 +0100850 void AbortIfNotSmi(Register object);
851
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100852 // Abort execution if argument is a string. Used in debug code.
853 void AbortIfNotString(Register object);
854
Steve Block1e0659c2011-05-24 12:43:12 +0100855 // Abort execution if argument is not the root value with the given index.
856 void AbortIfNotRootValue(Register src,
857 Heap::RootListIndex root_value_index,
858 const char* message);
859
860 // ---------------------------------------------------------------------------
861 // HeapNumber utilities
862
863 void JumpIfNotHeapNumber(Register object,
864 Register heap_number_map,
865 Register scratch,
866 Label* on_not_heap_number);
Iain Merrick75681382010-08-19 15:07:18 +0100867
Andrei Popescu31002712010-02-23 13:46:05 +0000868 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000869 // String utilities
870
871 // Checks if both objects are sequential ASCII strings and jumps to label
872 // if either is not. Assumes that neither object is a smi.
873 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
874 Register object2,
875 Register scratch1,
876 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100877 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000878
879 // Checks if both objects are sequential ASCII strings and jumps to label
880 // if either is not.
881 void JumpIfNotBothSequentialAsciiStrings(Register first,
882 Register second,
883 Register scratch1,
884 Register scratch2,
885 Label* not_flat_ascii_strings);
886
Steve Block6ded16b2010-05-10 14:33:55 +0100887 // Checks if both instance types are sequential ASCII strings and jumps to
888 // label if either is not.
889 void JumpIfBothInstanceTypesAreNotSequentialAscii(
890 Register first_object_instance_type,
891 Register second_object_instance_type,
892 Register scratch1,
893 Register scratch2,
894 Label* failure);
895
896 // Check if instance type is sequential ASCII string and jump to label if
897 // it is not.
898 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
899 Register scratch,
900 Label* failure);
901
902
Steve Block1e0659c2011-05-24 12:43:12 +0100903 // ---------------------------------------------------------------------------
904 // Patching helpers.
905
906 // Get the location of a relocated constant (its address in the constant pool)
907 // from its load site.
908 void GetRelocatedValueLocation(Register ldr_location,
909 Register result);
910
911
Steve Blocka7e24c12009-10-30 11:49:00 +0000912 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000913 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
914 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000915
916 // Helper functions for generating invokes.
917 void InvokePrologue(const ParameterCount& expected,
918 const ParameterCount& actual,
919 Handle<Code> code_constant,
920 Register code_reg,
921 Label* done,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100922 InvokeFlag flag,
923 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000924
Steve Blocka7e24c12009-10-30 11:49:00 +0000925 // Activation support.
926 void EnterFrame(StackFrame::Type type);
927 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000928
Steve Block6ded16b2010-05-10 14:33:55 +0100929 void InitializeNewString(Register string,
930 Register length,
931 Heap::RootListIndex map_index,
932 Register scratch1,
933 Register scratch2);
934
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100935 // Compute memory operands for safepoint stack slots.
936 static int SafepointRegisterStackIndex(int reg_code);
937 MemOperand SafepointRegisterSlot(Register reg);
938 MemOperand SafepointRegistersAndDoublesSlot(Register reg);
939
Andrei Popescu31002712010-02-23 13:46:05 +0000940 bool generating_stub_;
941 bool allow_stub_calls_;
942 // This handle will be patched with the code object on installation.
943 Handle<Object> code_object_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100944
945 // Needs access to SafepointRegisterStackIndex for optimized frame
946 // traversal.
947 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +0000948};
949
950
951#ifdef ENABLE_DEBUGGER_SUPPORT
952// The code patcher is used to patch (typically) small parts of code e.g. for
953// debugging and other types of instrumentation. When using the code patcher
954// the exact number of bytes specified must be emitted. It is not legal to emit
955// relocation information. If any of these constraints are violated it causes
956// an assertion to fail.
957class CodePatcher {
958 public:
959 CodePatcher(byte* address, int instructions);
960 virtual ~CodePatcher();
961
962 // Macro assembler to emit code.
963 MacroAssembler* masm() { return &masm_; }
964
965 // Emit an instruction directly.
Steve Block1e0659c2011-05-24 12:43:12 +0100966 void Emit(Instr instr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000967
968 // Emit an address directly.
969 void Emit(Address addr);
970
Steve Block1e0659c2011-05-24 12:43:12 +0100971 // Emit the condition part of an instruction leaving the rest of the current
972 // instruction unchanged.
973 void EmitCondition(Condition cond);
974
Steve Blocka7e24c12009-10-30 11:49:00 +0000975 private:
976 byte* address_; // The address of the code being patched.
977 int instructions_; // Number of instructions of the expected patch size.
978 int size_; // Number of bytes of the expected patch size.
979 MacroAssembler masm_; // Macro assembler used to generate the code.
980};
981#endif // ENABLE_DEBUGGER_SUPPORT
982
983
Ben Murdochb0fe1622011-05-05 13:52:32 +0100984// Helper class for generating code or data associated with the code
985// right after a call instruction. As an example this can be used to
986// generate safepoint data after calls for crankshaft.
987class PostCallGenerator {
988 public:
989 PostCallGenerator() { }
990 virtual ~PostCallGenerator() { }
991 virtual void Generate() = 0;
992};
993
994
Steve Blocka7e24c12009-10-30 11:49:00 +0000995// -----------------------------------------------------------------------------
996// Static helper functions.
997
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800998static MemOperand ContextOperand(Register context, int index) {
999 return MemOperand(context, Context::SlotOffset(index));
1000}
1001
1002
1003static inline MemOperand GlobalObjectOperand() {
1004 return ContextOperand(cp, Context::GLOBAL_INDEX);
1005}
1006
1007
Steve Blocka7e24c12009-10-30 11:49:00 +00001008#ifdef GENERATED_CODE_COVERAGE
1009#define CODE_COVERAGE_STRINGIFY(x) #x
1010#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1011#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1012#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
1013#else
1014#define ACCESS_MASM(masm) masm->
1015#endif
1016
1017
1018} } // namespace v8::internal
1019
1020#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_