blob: 8d817c076412f463cfec36fed9117a300444b65a [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Ben Murdochb8e0da22011-05-16 14:20:40 +010036// Forward declaration.
Steve Block44f0eee2011-05-26 01:26:41 +010037class CallWrapper;
Ben Murdochb8e0da22011-05-16 14:20:40 +010038
Andrei Popescu31002712010-02-23 13:46:05 +000039// ----------------------------------------------------------------------------
40// Static helper functions
41
42// Generate a MemOperand for loading a field from an object.
43static inline MemOperand FieldMemOperand(Register object, int offset) {
44 return MemOperand(object, offset - kHeapObjectTag);
45}
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047
Steve Block1e0659c2011-05-24 12:43:12 +010048static inline Operand SmiUntagOperand(Register object) {
49 return Operand(object, ASR, kSmiTagSize);
50}
51
52
53
Steve Blocka7e24c12009-10-30 11:49:00 +000054// Give alias names to registers
55const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000056const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000057
58enum InvokeJSFlags {
59 CALL_JS,
60 JUMP_JS
61};
62
63
Kristian Monsen25f61362010-05-21 11:50:48 +010064// Flags used for the AllocateInNewSpace functions.
65enum AllocationFlags {
66 // No special flags.
67 NO_ALLOCATION_FLAGS = 0,
68 // Return the pointer to the allocated already tagged as a heap object.
69 TAG_OBJECT = 1 << 0,
70 // The content of the result register already contains the allocation top in
71 // new space.
72 RESULT_CONTAINS_TOP = 1 << 1,
73 // Specify that the requested size of the space to allocate is specified in
74 // words instead of bytes.
75 SIZE_IN_WORDS = 1 << 2
76};
77
78
Steve Block8defd9f2010-07-08 12:39:36 +010079// Flags used for the ObjectToDoubleVFPRegister function.
80enum ObjectToDoubleFlags {
81 // No special flags.
82 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
83 // Object is known to be a non smi.
84 OBJECT_NOT_SMI = 1 << 0,
85 // Don't load NaNs or infinities, branch to the non number case instead.
86 AVOID_NANS_AND_INFINITIES = 1 << 1
87};
88
89
Steve Blocka7e24c12009-10-30 11:49:00 +000090// MacroAssembler implements a collection of frequently used macros.
91class MacroAssembler: public Assembler {
92 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010093 // The isolate parameter can be NULL if the macro assembler should
94 // not use isolate-dependent functionality. In this case, it's the
95 // responsibility of the caller to never invoke such function on the
96 // macro assembler.
97 MacroAssembler(Isolate* isolate, void* buffer, int size);
Steve Blocka7e24c12009-10-30 11:49:00 +000098
Andrei Popescu31002712010-02-23 13:46:05 +000099 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +0000100 void Jump(Register target, Condition cond = al);
101 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
102 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
Ben Murdoch42effa52011-08-19 16:40:31 +0100103 static int CallSize(Register target, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000104 void Call(Register target, Condition cond = al);
Ben Murdoch42effa52011-08-19 16:40:31 +0100105 static int CallSize(byte* target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
Ben Murdoch42effa52011-08-19 16:40:31 +0100107 static int CallSize(Handle<Code> code,
108 RelocInfo::Mode rmode,
109 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000110 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
111 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +0000112
113 // Emit code to discard a non-negative number of pointer-sized elements
114 // from the stack, clobbering only the sp register.
115 void Drop(int count, Condition cond = al);
116
Ben Murdochb0fe1622011-05-05 13:52:32 +0100117 void Ret(int drop, Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100118
119 // Swap two registers. If the scratch register is omitted then a slightly
120 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100121 void Swap(Register reg1,
122 Register reg2,
123 Register scratch = no_reg,
124 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100125
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100126
127 void And(Register dst, Register src1, const Operand& src2,
128 Condition cond = al);
129 void Ubfx(Register dst, Register src, int lsb, int width,
130 Condition cond = al);
131 void Sbfx(Register dst, Register src, int lsb, int width,
132 Condition cond = al);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100133 // The scratch register is not used for ARMv7.
134 // scratch can be the same register as src (in which case it is trashed), but
135 // not the same as dst.
136 void Bfi(Register dst,
137 Register src,
138 Register scratch,
139 int lsb,
140 int width,
141 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100142 void Bfc(Register dst, int lsb, int width, Condition cond = al);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100143 void Usat(Register dst, int satpos, const Operand& src,
144 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100145
Leon Clarkee46be812010-01-19 14:06:41 +0000146 void Call(Label* target);
147 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +0100148 // May do nothing if the registers are identical.
149 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000150 // Jumps to the label at the index given by the Smi in "index".
151 void SmiJumpTable(Register index, Vector<Label*> targets);
152 // Load an object from the root table.
153 void LoadRoot(Register destination,
154 Heap::RootListIndex index,
155 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100156 // Store an object to the root table.
157 void StoreRoot(Register source,
158 Heap::RootListIndex index,
159 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000160
Steve Block6ded16b2010-05-10 14:33:55 +0100161
162 // Check if object is in new space.
163 // scratch can be object itself, but it will be clobbered.
164 void InNewSpace(Register object,
165 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100166 Condition cond, // eq for new space, ne otherwise
Steve Block6ded16b2010-05-10 14:33:55 +0100167 Label* branch);
168
169
Steve Block8defd9f2010-07-08 12:39:36 +0100170 // For the page containing |object| mark the region covering [address]
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100171 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100172 void RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100173 Register address,
174 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100175
Steve Block8defd9f2010-07-08 12:39:36 +0100176 // For the page containing |object| mark the region covering
177 // [object+offset] dirty. The object address must be in the first 8K
178 // of an allocated page. The 'scratch' registers are used in the
179 // implementation and all 3 registers are clobbered by the
180 // operation, as well as the ip register. RecordWrite updates the
181 // write barrier even when storing smis.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100182 void RecordWrite(Register object,
183 Operand offset,
184 Register scratch0,
185 Register scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000186
Steve Block8defd9f2010-07-08 12:39:36 +0100187 // For the page containing |object| mark the region covering
188 // [address] dirty. The object address must be in the first 8K of an
189 // allocated page. All 3 registers are clobbered by the operation,
190 // as well as the ip register. RecordWrite updates the write barrier
191 // even when storing smis.
192 void RecordWrite(Register object,
193 Register address,
194 Register scratch);
195
Steve Block6ded16b2010-05-10 14:33:55 +0100196 // Push two registers. Pushes leftmost register first (to highest address).
197 void Push(Register src1, Register src2, Condition cond = al) {
198 ASSERT(!src1.is(src2));
199 if (src1.code() > src2.code()) {
200 stm(db_w, sp, src1.bit() | src2.bit(), cond);
201 } else {
202 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
203 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
204 }
205 }
206
207 // Push three registers. Pushes leftmost register first (to highest address).
208 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
209 ASSERT(!src1.is(src2));
210 ASSERT(!src2.is(src3));
211 ASSERT(!src1.is(src3));
212 if (src1.code() > src2.code()) {
213 if (src2.code() > src3.code()) {
214 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
215 } else {
216 stm(db_w, sp, src1.bit() | src2.bit(), cond);
217 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
218 }
219 } else {
220 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
221 Push(src2, src3, cond);
222 }
223 }
224
225 // Push four registers. Pushes leftmost register first (to highest address).
226 void Push(Register src1, Register src2,
227 Register src3, Register src4, Condition cond = al) {
228 ASSERT(!src1.is(src2));
229 ASSERT(!src2.is(src3));
230 ASSERT(!src1.is(src3));
231 ASSERT(!src1.is(src4));
232 ASSERT(!src2.is(src4));
233 ASSERT(!src3.is(src4));
234 if (src1.code() > src2.code()) {
235 if (src2.code() > src3.code()) {
236 if (src3.code() > src4.code()) {
237 stm(db_w,
238 sp,
239 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
240 cond);
241 } else {
242 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
243 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
244 }
245 } else {
246 stm(db_w, sp, src1.bit() | src2.bit(), cond);
247 Push(src3, src4, cond);
248 }
249 } else {
250 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
251 Push(src2, src3, src4, cond);
252 }
253 }
254
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100255 // Pop two registers. Pops rightmost register first (from lower address).
256 void Pop(Register src1, Register src2, Condition cond = al) {
257 ASSERT(!src1.is(src2));
258 if (src1.code() > src2.code()) {
259 ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
260 } else {
261 ldr(src2, MemOperand(sp, 4, PostIndex), cond);
262 ldr(src1, MemOperand(sp, 4, PostIndex), cond);
263 }
264 }
265
Ben Murdochb0fe1622011-05-05 13:52:32 +0100266 // Push and pop the registers that can hold pointers, as defined by the
267 // RegList constant kSafepointSavedRegisters.
268 void PushSafepointRegisters();
269 void PopSafepointRegisters();
Ben Murdochb8e0da22011-05-16 14:20:40 +0100270 void PushSafepointRegistersAndDoubles();
271 void PopSafepointRegistersAndDoubles();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100272 // Store value in register src in the safepoint stack slot for
273 // register dst.
274 void StoreToSafepointRegisterSlot(Register src, Register dst);
275 void StoreToSafepointRegistersAndDoublesSlot(Register src, Register dst);
276 // Load the value of the src register from its safepoint stack slot
277 // into register dst.
278 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100279
Leon Clarkef7060e22010-06-03 12:02:55 +0100280 // Load two consecutive registers with two consecutive memory locations.
281 void Ldrd(Register dst1,
282 Register dst2,
283 const MemOperand& src,
284 Condition cond = al);
285
286 // Store two consecutive registers to two consecutive memory locations.
287 void Strd(Register src1,
288 Register src2,
289 const MemOperand& dst,
290 Condition cond = al);
291
Ben Murdochb8e0da22011-05-16 14:20:40 +0100292 // Clear specified FPSCR bits.
293 void ClearFPSCRBits(const uint32_t bits_to_clear,
294 const Register scratch,
295 const Condition cond = al);
296
297 // Compare double values and move the result to the normal condition flags.
298 void VFPCompareAndSetFlags(const DwVfpRegister src1,
299 const DwVfpRegister src2,
300 const Condition cond = al);
301 void VFPCompareAndSetFlags(const DwVfpRegister src1,
302 const double src2,
303 const Condition cond = al);
304
305 // Compare double values and then load the fpscr flags to a register.
306 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
307 const DwVfpRegister src2,
308 const Register fpscr_flags,
309 const Condition cond = al);
310 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
311 const double src2,
312 const Register fpscr_flags,
313 const Condition cond = al);
314
Ben Murdoch086aeea2011-05-13 15:57:08 +0100315
Steve Blocka7e24c12009-10-30 11:49:00 +0000316 // ---------------------------------------------------------------------------
317 // Activation frames
318
319 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
320 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
321
322 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
323 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
324
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100325 // Enter exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +0100326 // stack_space - extra stack space, used for alignment before call to C.
327 void EnterExitFrame(bool save_doubles, int stack_space = 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000328
329 // Leave the current exit frame. Expects the return value in r0.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100330 // Expect the number of values, pushed prior to the exit frame, to
331 // remove in a register (or no_reg, if there is nothing to remove).
332 void LeaveExitFrame(bool save_doubles, Register argument_count);
Steve Blocka7e24c12009-10-30 11:49:00 +0000333
Steve Block6ded16b2010-05-10 14:33:55 +0100334 // Get the actual activation frame alignment for target environment.
335 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000336
Steve Blockd0582a62009-12-15 09:54:21 +0000337 void LoadContext(Register dst, int context_chain_length);
338
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800339 void LoadGlobalFunction(int index, Register function);
340
341 // Load the initial map from the global function. The registers
342 // function and map can be the same, function is then overwritten.
343 void LoadGlobalFunctionInitialMap(Register function,
344 Register map,
345 Register scratch);
346
Steve Blocka7e24c12009-10-30 11:49:00 +0000347 // ---------------------------------------------------------------------------
348 // JavaScript invokes
349
350 // Invoke the JavaScript function code by either calling or jumping.
351 void InvokeCode(Register code,
352 const ParameterCount& expected,
353 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100354 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100355 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000356
357 void InvokeCode(Handle<Code> code,
358 const ParameterCount& expected,
359 const ParameterCount& actual,
360 RelocInfo::Mode rmode,
361 InvokeFlag flag);
362
363 // Invoke the JavaScript function in the given register. Changes the
364 // current context to the context in the function before invoking.
365 void InvokeFunction(Register function,
366 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100367 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100368 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000369
Andrei Popescu402d9372010-02-26 13:31:12 +0000370 void InvokeFunction(JSFunction* function,
371 const ParameterCount& actual,
372 InvokeFlag flag);
373
Ben Murdochb0fe1622011-05-05 13:52:32 +0100374 void IsObjectJSObjectType(Register heap_object,
375 Register map,
376 Register scratch,
377 Label* fail);
378
379 void IsInstanceJSObjectType(Register map,
380 Register scratch,
381 Label* fail);
382
383 void IsObjectJSStringType(Register object,
384 Register scratch,
385 Label* fail);
Steve Blocka7e24c12009-10-30 11:49:00 +0000386
387#ifdef ENABLE_DEBUGGER_SUPPORT
388 // ---------------------------------------------------------------------------
389 // Debugger Support
390
Andrei Popescu402d9372010-02-26 13:31:12 +0000391 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000392#endif
393
394 // ---------------------------------------------------------------------------
395 // Exception handling
396
397 // Push a new try handler and link into try handler chain.
398 // The return address must be passed in register lr.
399 // On exit, r0 contains TOS (code slot).
400 void PushTryHandler(CodeLocation try_location, HandlerType type);
401
Leon Clarkee46be812010-01-19 14:06:41 +0000402 // Unlink the stack handler on top of the stack from the try handler chain.
403 // Must preserve the result register.
404 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000405
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100406 // Passes thrown value (in r0) to the handler of top of the try handler chain.
407 void Throw(Register value);
408
409 // Propagates an uncatchable exception to the top of the current JS stack's
410 // handler chain.
411 void ThrowUncatchable(UncatchableExceptionType type, Register value);
412
Steve Blocka7e24c12009-10-30 11:49:00 +0000413 // ---------------------------------------------------------------------------
414 // Inline caching support
415
Steve Blocka7e24c12009-10-30 11:49:00 +0000416 // Generate code for checking access rights - used for security checks
417 // on access to global objects across environments. The holder register
418 // is left untouched, whereas both scratch registers are clobbered.
419 void CheckAccessGlobalProxy(Register holder_reg,
420 Register scratch,
421 Label* miss);
422
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800423 inline void MarkCode(NopMarkerTypes type) {
424 nop(type);
425 }
426
427 // Check if the given instruction is a 'type' marker.
428 // ie. check if is is a mov r<type>, r<type> (referenced as nop(type))
429 // These instructions are generated to mark special location in the code,
430 // like some special IC code.
431 static inline bool IsMarkedCode(Instr instr, int type) {
432 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
433 return IsNop(instr, type);
434 }
435
436
437 static inline int GetCodeMarker(Instr instr) {
438 int dst_reg_offset = 12;
439 int dst_mask = 0xf << dst_reg_offset;
440 int src_mask = 0xf;
441 int dst_reg = (instr & dst_mask) >> dst_reg_offset;
442 int src_reg = instr & src_mask;
443 uint32_t non_register_mask = ~(dst_mask | src_mask);
444 uint32_t mov_mask = al | 13 << 21;
445
446 // Return <n> if we have a mov rn rn, else return -1.
447 int type = ((instr & non_register_mask) == mov_mask) &&
448 (dst_reg == src_reg) &&
449 (FIRST_IC_MARKER <= dst_reg) && (dst_reg < LAST_CODE_MARKER)
450 ? src_reg
451 : -1;
452 ASSERT((type == -1) ||
453 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
454 return type;
455 }
456
Steve Blocka7e24c12009-10-30 11:49:00 +0000457
458 // ---------------------------------------------------------------------------
459 // Allocation support
460
Ben Murdoch086aeea2011-05-13 15:57:08 +0100461 // Allocate an object in new space. The object_size is specified
462 // either in bytes or in words if the allocation flag SIZE_IN_WORDS
463 // is passed. If the new space is exhausted control continues at the
464 // gc_required label. The allocated object is returned in result. If
465 // the flag tag_allocated_object is true the result is tagged as as
466 // a heap object. All registers are clobbered also when control
467 // continues at the gc_required label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 void AllocateInNewSpace(int object_size,
469 Register result,
470 Register scratch1,
471 Register scratch2,
472 Label* gc_required,
473 AllocationFlags flags);
474 void AllocateInNewSpace(Register object_size,
475 Register result,
476 Register scratch1,
477 Register scratch2,
478 Label* gc_required,
479 AllocationFlags flags);
480
481 // Undo allocation in new space. The object passed and objects allocated after
482 // it will no longer be allocated. The caller must make sure that no pointers
483 // are left to the object(s) no longer allocated as they would be invalid when
484 // allocation is undone.
485 void UndoAllocationInNewSpace(Register object, Register scratch);
486
Andrei Popescu31002712010-02-23 13:46:05 +0000487
488 void AllocateTwoByteString(Register result,
489 Register length,
490 Register scratch1,
491 Register scratch2,
492 Register scratch3,
493 Label* gc_required);
494 void AllocateAsciiString(Register result,
495 Register length,
496 Register scratch1,
497 Register scratch2,
498 Register scratch3,
499 Label* gc_required);
500 void AllocateTwoByteConsString(Register result,
501 Register length,
502 Register scratch1,
503 Register scratch2,
504 Label* gc_required);
505 void AllocateAsciiConsString(Register result,
506 Register length,
507 Register scratch1,
508 Register scratch2,
509 Label* gc_required);
510
Kristian Monsen25f61362010-05-21 11:50:48 +0100511 // Allocates a heap number or jumps to the gc_required label if the young
512 // space is full and a scavenge is needed. All registers are clobbered also
513 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100514 void AllocateHeapNumber(Register result,
515 Register scratch1,
516 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100517 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100518 Label* gc_required);
Steve Block8defd9f2010-07-08 12:39:36 +0100519 void AllocateHeapNumberWithValue(Register result,
520 DwVfpRegister value,
521 Register scratch1,
522 Register scratch2,
523 Register heap_number_map,
524 Label* gc_required);
525
Ben Murdochbb769b22010-08-11 14:56:33 +0100526 // Copies a fixed number of fields of heap objects from src to dst.
527 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000528
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100529 // Copies a number of bytes from src to dst. All registers are clobbered. On
530 // exit src and dst will point to the place just after where the last byte was
531 // read or written and length will be zero.
532 void CopyBytes(Register src,
533 Register dst,
534 Register length,
535 Register scratch);
536
Steve Blocka7e24c12009-10-30 11:49:00 +0000537 // ---------------------------------------------------------------------------
538 // Support functions.
539
540 // Try to get function prototype of a function and puts the value in
541 // the result register. Checks that the function really is a
542 // function and jumps to the miss label if the fast checks fail. The
543 // function register will be untouched; the other registers may be
544 // clobbered.
545 void TryGetFunctionPrototype(Register function,
546 Register result,
547 Register scratch,
548 Label* miss);
549
550 // Compare object type for heap object. heap_object contains a non-Smi
551 // whose object type should be compared with the given type. This both
552 // sets the flags and leaves the object type in the type_reg register.
553 // It leaves the map in the map register (unless the type_reg and map register
554 // are the same register). It leaves the heap object in the heap_object
555 // register unless the heap_object register is the same register as one of the
556 // other registers.
557 void CompareObjectType(Register heap_object,
558 Register map,
559 Register type_reg,
560 InstanceType type);
561
562 // Compare instance type in a map. map contains a valid map object whose
563 // object type should be compared with the given type. This both
564 // sets the flags and leaves the object type in the type_reg register. It
565 // leaves the heap object in the heap_object register unless the heap_object
566 // register is the same register as type_reg.
567 void CompareInstanceType(Register map,
568 Register type_reg,
569 InstanceType type);
570
Andrei Popescu31002712010-02-23 13:46:05 +0000571
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100572 // Check if the map of an object is equal to a specified map (either
573 // given directly or as an index into the root list) and branch to
574 // label if not. Skip the smi check if not required (object is known
575 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000576 void CheckMap(Register obj,
577 Register scratch,
578 Handle<Map> map,
579 Label* fail,
580 bool is_heap_object);
581
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100582 void CheckMap(Register obj,
583 Register scratch,
584 Heap::RootListIndex index,
585 Label* fail,
586 bool is_heap_object);
587
588
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100589 // Compare the object in a register to a value from the root list.
590 // Uses the ip register as scratch.
591 void CompareRoot(Register obj, Heap::RootListIndex index);
592
593
Andrei Popescu31002712010-02-23 13:46:05 +0000594 // Load and check the instance type of an object for being a string.
595 // Loads the type into the second argument register.
596 // Returns a condition that will be enabled if the object was a string.
597 Condition IsObjectStringType(Register obj,
598 Register type) {
599 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
600 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
601 tst(type, Operand(kIsNotStringMask));
602 ASSERT_EQ(0, kStringTag);
603 return eq;
604 }
605
606
Steve Blocka7e24c12009-10-30 11:49:00 +0000607 // Generates code for reporting that an illegal operation has
608 // occurred.
609 void IllegalOperation(int num_arguments);
610
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100611 // Picks out an array index from the hash field.
612 // Register use:
613 // hash - holds the index's hash. Clobbered.
614 // index - holds the overwritten index on exit.
615 void IndexFromHash(Register hash, Register index);
616
Andrei Popescu31002712010-02-23 13:46:05 +0000617 // Get the number of least significant bits from a register
618 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
Steve Block1e0659c2011-05-24 12:43:12 +0100619 void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +0000620
Steve Blockd0582a62009-12-15 09:54:21 +0000621 // Uses VFP instructions to Convert a Smi to a double.
622 void IntegerToDoubleConversionWithVFP3(Register inReg,
623 Register outHighReg,
624 Register outLowReg);
625
Steve Block8defd9f2010-07-08 12:39:36 +0100626 // Load the value of a number object into a VFP double register. If the object
627 // is not a number a jump to the label not_number is performed and the VFP
628 // double register is unchanged.
629 void ObjectToDoubleVFPRegister(
630 Register object,
631 DwVfpRegister value,
632 Register scratch1,
633 Register scratch2,
634 Register heap_number_map,
635 SwVfpRegister scratch3,
636 Label* not_number,
637 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
638
639 // Load the value of a smi object into a VFP double register. The register
640 // scratch1 can be the same register as smi in which case smi will hold the
641 // untagged value afterwards.
642 void SmiToDoubleVFPRegister(Register smi,
643 DwVfpRegister value,
644 Register scratch1,
645 SwVfpRegister scratch2);
646
Iain Merrick9ac36c92010-09-13 15:29:50 +0100647 // Convert the HeapNumber pointed to by source to a 32bits signed integer
648 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
Steve Block1e0659c2011-05-24 12:43:12 +0100649 // to not_int32 label. If VFP3 is available double_scratch is used but not
650 // scratch2.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100651 void ConvertToInt32(Register source,
652 Register dest,
653 Register scratch,
654 Register scratch2,
Steve Block1e0659c2011-05-24 12:43:12 +0100655 DwVfpRegister double_scratch,
Iain Merrick9ac36c92010-09-13 15:29:50 +0100656 Label *not_int32);
657
Steve Block44f0eee2011-05-26 01:26:41 +0100658 // Truncates a double using a specific rounding mode.
659 // Clears the z flag (ne condition) if an overflow occurs.
660 // If exact_conversion is true, the z flag is also cleared if the conversion
661 // was inexact, ie. if the double value could not be converted exactly
662 // to a 32bit integer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100663 void EmitVFPTruncate(VFPRoundingMode rounding_mode,
664 SwVfpRegister result,
665 DwVfpRegister double_input,
666 Register scratch1,
667 Register scratch2,
668 CheckForInexactConversion check
669 = kDontCheckForInexactConversion);
670
Steve Block44f0eee2011-05-26 01:26:41 +0100671 // Helper for EmitECMATruncate.
672 // This will truncate a floating-point value outside of the singed 32bit
673 // integer range to a 32bit signed integer.
674 // Expects the double value loaded in input_high and input_low.
675 // Exits with the answer in 'result'.
676 // Note that this code does not work for values in the 32bit range!
677 void EmitOutOfInt32RangeTruncate(Register result,
678 Register input_high,
679 Register input_low,
680 Register scratch);
681
682 // Performs a truncating conversion of a floating point number as used by
683 // the JS bitwise operations. See ECMA-262 9.5: ToInt32.
684 // Exits with 'result' holding the answer and all other registers clobbered.
685 void EmitECMATruncate(Register result,
686 DwVfpRegister double_input,
687 SwVfpRegister single_scratch,
688 Register scratch,
689 Register scratch2,
690 Register scratch3);
691
Steve Block6ded16b2010-05-10 14:33:55 +0100692 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
693 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
Steve Block8defd9f2010-07-08 12:39:36 +0100694 // for 0 (31 instead of 32). Source and scratch can be the same in which case
695 // the source is clobbered. Source and zeros can also be the same in which
696 // case scratch should be a different register.
697 void CountLeadingZeros(Register zeros,
698 Register source,
699 Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000700
701 // ---------------------------------------------------------------------------
702 // Runtime calls
703
704 // Call a code stub.
705 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000706
Andrei Popescu31002712010-02-23 13:46:05 +0000707 // Call a code stub.
708 void TailCallStub(CodeStub* stub, Condition cond = al);
709
Steve Block1e0659c2011-05-24 12:43:12 +0100710 // Tail call a code stub (jump) and return the code object called. Try to
711 // generate the code if necessary. Do not perform a GC but instead return
712 // a retry after GC failure.
713 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub,
714 Condition cond = al);
715
Steve Blocka7e24c12009-10-30 11:49:00 +0000716 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100717 void CallRuntime(const Runtime::Function* f, int num_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100718 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000719
720 // Convenience function: Same as above, but takes the fid instead.
721 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
722
Andrei Popescu402d9372010-02-26 13:31:12 +0000723 // Convenience function: call an external reference.
724 void CallExternalReference(const ExternalReference& ext,
725 int num_arguments);
726
Steve Blocka7e24c12009-10-30 11:49:00 +0000727 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100728 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000729 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100730 void TailCallExternalReference(const ExternalReference& ext,
731 int num_arguments,
732 int result_size);
733
Steve Block1e0659c2011-05-24 12:43:12 +0100734 // Tail call of a runtime routine (jump). Try to generate the code if
735 // necessary. Do not perform a GC but instead return a retry after GC
736 // failure.
737 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
738 const ExternalReference& ext, int num_arguments, int result_size);
739
Steve Block6ded16b2010-05-10 14:33:55 +0100740 // Convenience function: tail call a runtime routine (jump).
741 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000742 int num_arguments,
743 int result_size);
744
Steve Block6ded16b2010-05-10 14:33:55 +0100745 // Before calling a C-function from generated code, align arguments on stack.
746 // After aligning the frame, non-register arguments must be stored in
747 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
748 // are word sized.
749 // Some compilers/platforms require the stack to be aligned when calling
750 // C++ code.
751 // Needs a scratch register to do some arithmetic. This register will be
752 // trashed.
753 void PrepareCallCFunction(int num_arguments, Register scratch);
754
755 // Calls a C function and cleans up the space for arguments allocated
756 // by PrepareCallCFunction. The called function is not allowed to trigger a
757 // garbage collection, since that might move the code and invalidate the
758 // return address (unless this is somehow accounted for by the called
759 // function).
760 void CallCFunction(ExternalReference function, int num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100761 void CallCFunction(Register function, Register scratch, int num_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100762
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100763 void GetCFunctionDoubleResult(const DoubleRegister dst);
764
Steve Block1e0659c2011-05-24 12:43:12 +0100765 // Calls an API function. Allocates HandleScope, extracts returned value
766 // from handle and propagates exceptions. Restores context.
767 // stack_space - space to be unwound on exit (includes the call js
768 // arguments space and the additional space allocated for the fast call).
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100769 MaybeObject* TryCallApiFunctionAndReturn(ExternalReference function,
Steve Block1e0659c2011-05-24 12:43:12 +0100770 int stack_space);
771
Steve Blocka7e24c12009-10-30 11:49:00 +0000772 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100773 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000774
Steve Block1e0659c2011-05-24 12:43:12 +0100775 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
776
Steve Blocka7e24c12009-10-30 11:49:00 +0000777 // Invoke specified builtin JavaScript function. Adds an entry to
778 // the unresolved list if the name does not resolve.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100779 void InvokeBuiltin(Builtins::JavaScript id,
780 InvokeJSFlags flags,
Steve Block44f0eee2011-05-26 01:26:41 +0100781 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000782
783 // Store the code object for the given builtin in the target register and
784 // setup the function in r1.
785 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
786
Steve Block791712a2010-08-27 10:21:07 +0100787 // Store the function for the given builtin in the target register.
788 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
789
Ben Murdoch8b112d22011-06-08 16:22:53 +0100790 Handle<Object> CodeObject() {
791 ASSERT(!code_object_.is_null());
792 return code_object_;
793 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000794
795
796 // ---------------------------------------------------------------------------
797 // StatsCounter support
798
799 void SetCounter(StatsCounter* counter, int value,
800 Register scratch1, Register scratch2);
801 void IncrementCounter(StatsCounter* counter, int value,
802 Register scratch1, Register scratch2);
803 void DecrementCounter(StatsCounter* counter, int value,
804 Register scratch1, Register scratch2);
805
806
807 // ---------------------------------------------------------------------------
808 // Debugging
809
Steve Block1e0659c2011-05-24 12:43:12 +0100810 // Calls Abort(msg) if the condition cond is not satisfied.
Steve Blocka7e24c12009-10-30 11:49:00 +0000811 // Use --debug_code to enable.
Steve Block1e0659c2011-05-24 12:43:12 +0100812 void Assert(Condition cond, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100813 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Iain Merrick75681382010-08-19 15:07:18 +0100814 void AssertFastElements(Register elements);
Steve Blocka7e24c12009-10-30 11:49:00 +0000815
816 // Like Assert(), but always enabled.
Steve Block1e0659c2011-05-24 12:43:12 +0100817 void Check(Condition cond, const char* msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000818
819 // Print a message to stdout and abort execution.
820 void Abort(const char* msg);
821
822 // Verify restrictions about code generated in stubs.
823 void set_generating_stub(bool value) { generating_stub_ = value; }
824 bool generating_stub() { return generating_stub_; }
825 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
826 bool allow_stub_calls() { return allow_stub_calls_; }
827
Leon Clarked91b9f72010-01-27 17:25:45 +0000828 // ---------------------------------------------------------------------------
Steve Block1e0659c2011-05-24 12:43:12 +0100829 // Number utilities
830
831 // Check whether the value of reg is a power of two and not zero. If not
832 // control continues at the label not_power_of_two. If reg is a power of two
833 // the register scratch contains the value of (reg - 1) when control falls
834 // through.
835 void JumpIfNotPowerOfTwoOrZero(Register reg,
836 Register scratch,
837 Label* not_power_of_two_or_zero);
Steve Block44f0eee2011-05-26 01:26:41 +0100838 // Check whether the value of reg is a power of two and not zero.
839 // Control falls through if it is, with scratch containing the mask
840 // value (reg - 1).
841 // Otherwise control jumps to the 'zero_and_neg' label if the value of reg is
842 // zero or negative, or jumps to the 'not_power_of_two' label if the value is
843 // strictly positive but not a power of two.
844 void JumpIfNotPowerOfTwoOrZeroAndNeg(Register reg,
845 Register scratch,
846 Label* zero_and_neg,
847 Label* not_power_of_two);
Steve Block1e0659c2011-05-24 12:43:12 +0100848
849 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000850 // Smi utilities
851
Ben Murdochb0fe1622011-05-05 13:52:32 +0100852 void SmiTag(Register reg, SBit s = LeaveCC) {
853 add(reg, reg, Operand(reg), s);
854 }
Steve Block1e0659c2011-05-24 12:43:12 +0100855 void SmiTag(Register dst, Register src, SBit s = LeaveCC) {
856 add(dst, src, Operand(src), s);
857 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100858
Ben Murdochb8e0da22011-05-16 14:20:40 +0100859 // Try to convert int32 to smi. If the value is to large, preserve
860 // the original value and jump to not_a_smi. Destroys scratch and
861 // sets flags.
862 void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
863 mov(scratch, reg);
864 SmiTag(scratch, SetCC);
865 b(vs, not_a_smi);
866 mov(reg, scratch);
867 }
868
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100869 void SmiUntag(Register reg, SBit s = LeaveCC) {
870 mov(reg, Operand(reg, ASR, kSmiTagSize), s);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100871 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100872 void SmiUntag(Register dst, Register src, SBit s = LeaveCC) {
873 mov(dst, Operand(src, ASR, kSmiTagSize), s);
Steve Block1e0659c2011-05-24 12:43:12 +0100874 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100875
Steve Block1e0659c2011-05-24 12:43:12 +0100876 // Jump the register contains a smi.
877 inline void JumpIfSmi(Register value, Label* smi_label) {
878 tst(value, Operand(kSmiTagMask));
879 b(eq, smi_label);
880 }
881 // Jump if either of the registers contain a non-smi.
882 inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
883 tst(value, Operand(kSmiTagMask));
884 b(ne, not_smi_label);
885 }
Andrei Popescu31002712010-02-23 13:46:05 +0000886 // Jump if either of the registers contain a non-smi.
887 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
888 // Jump if either of the registers contain a smi.
889 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
890
Iain Merrick75681382010-08-19 15:07:18 +0100891 // Abort execution if argument is a smi. Used in debug code.
892 void AbortIfSmi(Register object);
Steve Block1e0659c2011-05-24 12:43:12 +0100893 void AbortIfNotSmi(Register object);
894
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100895 // Abort execution if argument is a string. Used in debug code.
896 void AbortIfNotString(Register object);
897
Steve Block1e0659c2011-05-24 12:43:12 +0100898 // Abort execution if argument is not the root value with the given index.
899 void AbortIfNotRootValue(Register src,
900 Heap::RootListIndex root_value_index,
901 const char* message);
902
903 // ---------------------------------------------------------------------------
904 // HeapNumber utilities
905
906 void JumpIfNotHeapNumber(Register object,
907 Register heap_number_map,
908 Register scratch,
909 Label* on_not_heap_number);
Iain Merrick75681382010-08-19 15:07:18 +0100910
Andrei Popescu31002712010-02-23 13:46:05 +0000911 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000912 // String utilities
913
914 // Checks if both objects are sequential ASCII strings and jumps to label
915 // if either is not. Assumes that neither object is a smi.
916 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
917 Register object2,
918 Register scratch1,
919 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100920 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000921
922 // Checks if both objects are sequential ASCII strings and jumps to label
923 // if either is not.
924 void JumpIfNotBothSequentialAsciiStrings(Register first,
925 Register second,
926 Register scratch1,
927 Register scratch2,
928 Label* not_flat_ascii_strings);
929
Steve Block6ded16b2010-05-10 14:33:55 +0100930 // Checks if both instance types are sequential ASCII strings and jumps to
931 // label if either is not.
932 void JumpIfBothInstanceTypesAreNotSequentialAscii(
933 Register first_object_instance_type,
934 Register second_object_instance_type,
935 Register scratch1,
936 Register scratch2,
937 Label* failure);
938
939 // Check if instance type is sequential ASCII string and jump to label if
940 // it is not.
941 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
942 Register scratch,
943 Label* failure);
944
945
Steve Block1e0659c2011-05-24 12:43:12 +0100946 // ---------------------------------------------------------------------------
947 // Patching helpers.
948
949 // Get the location of a relocated constant (its address in the constant pool)
950 // from its load site.
951 void GetRelocatedValueLocation(Register ldr_location,
952 Register result);
953
954
Steve Blocka7e24c12009-10-30 11:49:00 +0000955 private:
Steve Block44f0eee2011-05-26 01:26:41 +0100956 void CallCFunctionHelper(Register function,
957 ExternalReference function_reference,
958 Register scratch,
959 int num_arguments);
960
Andrei Popescu31002712010-02-23 13:46:05 +0000961 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Ben Murdoch42effa52011-08-19 16:40:31 +0100962 static int CallSize(intptr_t target,
963 RelocInfo::Mode rmode,
964 Condition cond = al);
Andrei Popescu31002712010-02-23 13:46:05 +0000965 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000966
967 // Helper functions for generating invokes.
968 void InvokePrologue(const ParameterCount& expected,
969 const ParameterCount& actual,
970 Handle<Code> code_constant,
971 Register code_reg,
972 Label* done,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100973 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100974 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000975
Steve Blocka7e24c12009-10-30 11:49:00 +0000976 // Activation support.
977 void EnterFrame(StackFrame::Type type);
978 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000979
Steve Block6ded16b2010-05-10 14:33:55 +0100980 void InitializeNewString(Register string,
981 Register length,
982 Heap::RootListIndex map_index,
983 Register scratch1,
984 Register scratch2);
985
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100986 // Compute memory operands for safepoint stack slots.
987 static int SafepointRegisterStackIndex(int reg_code);
988 MemOperand SafepointRegisterSlot(Register reg);
989 MemOperand SafepointRegistersAndDoublesSlot(Register reg);
990
Andrei Popescu31002712010-02-23 13:46:05 +0000991 bool generating_stub_;
992 bool allow_stub_calls_;
993 // This handle will be patched with the code object on installation.
994 Handle<Object> code_object_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100995
996 // Needs access to SafepointRegisterStackIndex for optimized frame
997 // traversal.
998 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +0000999};
1000
1001
1002#ifdef ENABLE_DEBUGGER_SUPPORT
1003// The code patcher is used to patch (typically) small parts of code e.g. for
1004// debugging and other types of instrumentation. When using the code patcher
1005// the exact number of bytes specified must be emitted. It is not legal to emit
1006// relocation information. If any of these constraints are violated it causes
1007// an assertion to fail.
1008class CodePatcher {
1009 public:
1010 CodePatcher(byte* address, int instructions);
1011 virtual ~CodePatcher();
1012
1013 // Macro assembler to emit code.
1014 MacroAssembler* masm() { return &masm_; }
1015
1016 // Emit an instruction directly.
Steve Block1e0659c2011-05-24 12:43:12 +01001017 void Emit(Instr instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00001018
1019 // Emit an address directly.
1020 void Emit(Address addr);
1021
Steve Block1e0659c2011-05-24 12:43:12 +01001022 // Emit the condition part of an instruction leaving the rest of the current
1023 // instruction unchanged.
1024 void EmitCondition(Condition cond);
1025
Steve Blocka7e24c12009-10-30 11:49:00 +00001026 private:
1027 byte* address_; // The address of the code being patched.
1028 int instructions_; // Number of instructions of the expected patch size.
1029 int size_; // Number of bytes of the expected patch size.
1030 MacroAssembler masm_; // Macro assembler used to generate the code.
1031};
1032#endif // ENABLE_DEBUGGER_SUPPORT
1033
1034
Ben Murdochb0fe1622011-05-05 13:52:32 +01001035// Helper class for generating code or data associated with the code
1036// right after a call instruction. As an example this can be used to
1037// generate safepoint data after calls for crankshaft.
Steve Block44f0eee2011-05-26 01:26:41 +01001038class CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001039 public:
Steve Block44f0eee2011-05-26 01:26:41 +01001040 CallWrapper() { }
1041 virtual ~CallWrapper() { }
1042 // Called just before emitting a call. Argument is the size of the generated
1043 // call code.
1044 virtual void BeforeCall(int call_size) = 0;
1045 // Called just after emitting a call, i.e., at the return site for the call.
1046 virtual void AfterCall() = 0;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001047};
1048
1049
Steve Blocka7e24c12009-10-30 11:49:00 +00001050// -----------------------------------------------------------------------------
1051// Static helper functions.
1052
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001053static MemOperand ContextOperand(Register context, int index) {
1054 return MemOperand(context, Context::SlotOffset(index));
1055}
1056
1057
1058static inline MemOperand GlobalObjectOperand() {
1059 return ContextOperand(cp, Context::GLOBAL_INDEX);
1060}
1061
1062
Steve Blocka7e24c12009-10-30 11:49:00 +00001063#ifdef GENERATED_CODE_COVERAGE
1064#define CODE_COVERAGE_STRINGIFY(x) #x
1065#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1066#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1067#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
1068#else
1069#define ACCESS_MASM(masm) masm->
1070#endif
1071
1072
1073} } // namespace v8::internal
1074
1075#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_