blob: 36e4a1fe5e0e71ad7d4ced373d388f8bd27ca2ea [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Ben Murdochb8e0da22011-05-16 14:20:40 +010036// Forward declaration.
37class PostCallGenerator;
38
Andrei Popescu31002712010-02-23 13:46:05 +000039// ----------------------------------------------------------------------------
40// Static helper functions
41
42// Generate a MemOperand for loading a field from an object.
43static inline MemOperand FieldMemOperand(Register object, int offset) {
44 return MemOperand(object, offset - kHeapObjectTag);
45}
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047
Steve Block1e0659c2011-05-24 12:43:12 +010048static inline Operand SmiUntagOperand(Register object) {
49 return Operand(object, ASR, kSmiTagSize);
50}
51
52
53
Steve Blocka7e24c12009-10-30 11:49:00 +000054// Give alias names to registers
55const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000056const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000057
58enum InvokeJSFlags {
59 CALL_JS,
60 JUMP_JS
61};
62
63
Kristian Monsen25f61362010-05-21 11:50:48 +010064// Flags used for the AllocateInNewSpace functions.
65enum AllocationFlags {
66 // No special flags.
67 NO_ALLOCATION_FLAGS = 0,
68 // Return the pointer to the allocated already tagged as a heap object.
69 TAG_OBJECT = 1 << 0,
70 // The content of the result register already contains the allocation top in
71 // new space.
72 RESULT_CONTAINS_TOP = 1 << 1,
73 // Specify that the requested size of the space to allocate is specified in
74 // words instead of bytes.
75 SIZE_IN_WORDS = 1 << 2
76};
77
78
Steve Block8defd9f2010-07-08 12:39:36 +010079// Flags used for the ObjectToDoubleVFPRegister function.
80enum ObjectToDoubleFlags {
81 // No special flags.
82 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
83 // Object is known to be a non smi.
84 OBJECT_NOT_SMI = 1 << 0,
85 // Don't load NaNs or infinities, branch to the non number case instead.
86 AVOID_NANS_AND_INFINITIES = 1 << 1
87};
88
89
Steve Blocka7e24c12009-10-30 11:49:00 +000090// MacroAssembler implements a collection of frequently used macros.
91class MacroAssembler: public Assembler {
92 public:
93 MacroAssembler(void* buffer, int size);
94
Andrei Popescu31002712010-02-23 13:46:05 +000095 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000096 void Jump(Register target, Condition cond = al);
97 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
98 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
99 void Call(Register target, Condition cond = al);
100 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
101 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
102 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +0000103
104 // Emit code to discard a non-negative number of pointer-sized elements
105 // from the stack, clobbering only the sp register.
106 void Drop(int count, Condition cond = al);
107
Ben Murdochb0fe1622011-05-05 13:52:32 +0100108 void Ret(int drop, Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100109
110 // Swap two registers. If the scratch register is omitted then a slightly
111 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100112 void Swap(Register reg1,
113 Register reg2,
114 Register scratch = no_reg,
115 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100116
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100117
118 void And(Register dst, Register src1, const Operand& src2,
119 Condition cond = al);
120 void Ubfx(Register dst, Register src, int lsb, int width,
121 Condition cond = al);
122 void Sbfx(Register dst, Register src, int lsb, int width,
123 Condition cond = al);
124 void Bfc(Register dst, int lsb, int width, Condition cond = al);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100125 void Usat(Register dst, int satpos, const Operand& src,
126 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100127
Leon Clarkee46be812010-01-19 14:06:41 +0000128 void Call(Label* target);
129 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +0100130 // May do nothing if the registers are identical.
131 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000132 // Jumps to the label at the index given by the Smi in "index".
133 void SmiJumpTable(Register index, Vector<Label*> targets);
134 // Load an object from the root table.
135 void LoadRoot(Register destination,
136 Heap::RootListIndex index,
137 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100138 // Store an object to the root table.
139 void StoreRoot(Register source,
140 Heap::RootListIndex index,
141 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000142
Steve Block6ded16b2010-05-10 14:33:55 +0100143
144 // Check if object is in new space.
145 // scratch can be object itself, but it will be clobbered.
146 void InNewSpace(Register object,
147 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100148 Condition cond, // eq for new space, ne otherwise
Steve Block6ded16b2010-05-10 14:33:55 +0100149 Label* branch);
150
151
Steve Block8defd9f2010-07-08 12:39:36 +0100152 // For the page containing |object| mark the region covering [address]
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100153 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100154 void RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100155 Register address,
156 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100157
Steve Block8defd9f2010-07-08 12:39:36 +0100158 // For the page containing |object| mark the region covering
159 // [object+offset] dirty. The object address must be in the first 8K
160 // of an allocated page. The 'scratch' registers are used in the
161 // implementation and all 3 registers are clobbered by the
162 // operation, as well as the ip register. RecordWrite updates the
163 // write barrier even when storing smis.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100164 void RecordWrite(Register object,
165 Operand offset,
166 Register scratch0,
167 Register scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000168
Steve Block8defd9f2010-07-08 12:39:36 +0100169 // For the page containing |object| mark the region covering
170 // [address] dirty. The object address must be in the first 8K of an
171 // allocated page. All 3 registers are clobbered by the operation,
172 // as well as the ip register. RecordWrite updates the write barrier
173 // even when storing smis.
174 void RecordWrite(Register object,
175 Register address,
176 Register scratch);
177
Steve Block6ded16b2010-05-10 14:33:55 +0100178 // Push two registers. Pushes leftmost register first (to highest address).
179 void Push(Register src1, Register src2, Condition cond = al) {
180 ASSERT(!src1.is(src2));
181 if (src1.code() > src2.code()) {
182 stm(db_w, sp, src1.bit() | src2.bit(), cond);
183 } else {
184 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
185 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
186 }
187 }
188
189 // Push three registers. Pushes leftmost register first (to highest address).
190 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
191 ASSERT(!src1.is(src2));
192 ASSERT(!src2.is(src3));
193 ASSERT(!src1.is(src3));
194 if (src1.code() > src2.code()) {
195 if (src2.code() > src3.code()) {
196 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
197 } else {
198 stm(db_w, sp, src1.bit() | src2.bit(), cond);
199 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
200 }
201 } else {
202 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
203 Push(src2, src3, cond);
204 }
205 }
206
207 // Push four registers. Pushes leftmost register first (to highest address).
208 void Push(Register src1, Register src2,
209 Register src3, Register src4, Condition cond = al) {
210 ASSERT(!src1.is(src2));
211 ASSERT(!src2.is(src3));
212 ASSERT(!src1.is(src3));
213 ASSERT(!src1.is(src4));
214 ASSERT(!src2.is(src4));
215 ASSERT(!src3.is(src4));
216 if (src1.code() > src2.code()) {
217 if (src2.code() > src3.code()) {
218 if (src3.code() > src4.code()) {
219 stm(db_w,
220 sp,
221 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
222 cond);
223 } else {
224 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
225 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
226 }
227 } else {
228 stm(db_w, sp, src1.bit() | src2.bit(), cond);
229 Push(src3, src4, cond);
230 }
231 } else {
232 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
233 Push(src2, src3, src4, cond);
234 }
235 }
236
Ben Murdochb0fe1622011-05-05 13:52:32 +0100237 // Push and pop the registers that can hold pointers, as defined by the
238 // RegList constant kSafepointSavedRegisters.
239 void PushSafepointRegisters();
240 void PopSafepointRegisters();
Ben Murdochb8e0da22011-05-16 14:20:40 +0100241 void PushSafepointRegistersAndDoubles();
242 void PopSafepointRegistersAndDoubles();
Steve Block1e0659c2011-05-24 12:43:12 +0100243 void StoreToSafepointRegisterSlot(Register reg);
244 void StoreToSafepointRegistersAndDoublesSlot(Register reg);
245 void LoadFromSafepointRegisterSlot(Register reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100246 static int SafepointRegisterStackIndex(int reg_code);
Steve Block1e0659c2011-05-24 12:43:12 +0100247 static MemOperand SafepointRegisterSlot(Register reg);
248 static MemOperand SafepointRegistersAndDoublesSlot(Register reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100249
Leon Clarkef7060e22010-06-03 12:02:55 +0100250 // Load two consecutive registers with two consecutive memory locations.
251 void Ldrd(Register dst1,
252 Register dst2,
253 const MemOperand& src,
254 Condition cond = al);
255
256 // Store two consecutive registers to two consecutive memory locations.
257 void Strd(Register src1,
258 Register src2,
259 const MemOperand& dst,
260 Condition cond = al);
261
Ben Murdochb8e0da22011-05-16 14:20:40 +0100262 // Clear specified FPSCR bits.
263 void ClearFPSCRBits(const uint32_t bits_to_clear,
264 const Register scratch,
265 const Condition cond = al);
266
267 // Compare double values and move the result to the normal condition flags.
268 void VFPCompareAndSetFlags(const DwVfpRegister src1,
269 const DwVfpRegister src2,
270 const Condition cond = al);
271 void VFPCompareAndSetFlags(const DwVfpRegister src1,
272 const double src2,
273 const Condition cond = al);
274
275 // Compare double values and then load the fpscr flags to a register.
276 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
277 const DwVfpRegister src2,
278 const Register fpscr_flags,
279 const Condition cond = al);
280 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
281 const double src2,
282 const Register fpscr_flags,
283 const Condition cond = al);
284
Ben Murdoch086aeea2011-05-13 15:57:08 +0100285
Steve Blocka7e24c12009-10-30 11:49:00 +0000286 // ---------------------------------------------------------------------------
287 // Activation frames
288
289 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
290 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
291
292 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
293 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
294
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100295 // Enter exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +0100296 // stack_space - extra stack space, used for alignment before call to C.
297 void EnterExitFrame(bool save_doubles, int stack_space = 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000298
299 // Leave the current exit frame. Expects the return value in r0.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100300 void LeaveExitFrame(bool save_doubles);
Steve Blocka7e24c12009-10-30 11:49:00 +0000301
Steve Block6ded16b2010-05-10 14:33:55 +0100302 // Get the actual activation frame alignment for target environment.
303 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000304
Steve Blockd0582a62009-12-15 09:54:21 +0000305 void LoadContext(Register dst, int context_chain_length);
306
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800307 void LoadGlobalFunction(int index, Register function);
308
309 // Load the initial map from the global function. The registers
310 // function and map can be the same, function is then overwritten.
311 void LoadGlobalFunctionInitialMap(Register function,
312 Register map,
313 Register scratch);
314
Steve Blocka7e24c12009-10-30 11:49:00 +0000315 // ---------------------------------------------------------------------------
316 // JavaScript invokes
317
318 // Invoke the JavaScript function code by either calling or jumping.
319 void InvokeCode(Register code,
320 const ParameterCount& expected,
321 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100322 InvokeFlag flag,
323 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000324
325 void InvokeCode(Handle<Code> code,
326 const ParameterCount& expected,
327 const ParameterCount& actual,
328 RelocInfo::Mode rmode,
329 InvokeFlag flag);
330
331 // Invoke the JavaScript function in the given register. Changes the
332 // current context to the context in the function before invoking.
333 void InvokeFunction(Register function,
334 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100335 InvokeFlag flag,
336 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000337
Andrei Popescu402d9372010-02-26 13:31:12 +0000338 void InvokeFunction(JSFunction* function,
339 const ParameterCount& actual,
340 InvokeFlag flag);
341
Ben Murdochb0fe1622011-05-05 13:52:32 +0100342 void IsObjectJSObjectType(Register heap_object,
343 Register map,
344 Register scratch,
345 Label* fail);
346
347 void IsInstanceJSObjectType(Register map,
348 Register scratch,
349 Label* fail);
350
351 void IsObjectJSStringType(Register object,
352 Register scratch,
353 Label* fail);
Steve Blocka7e24c12009-10-30 11:49:00 +0000354
355#ifdef ENABLE_DEBUGGER_SUPPORT
356 // ---------------------------------------------------------------------------
357 // Debugger Support
358
Andrei Popescu402d9372010-02-26 13:31:12 +0000359 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000360#endif
361
362 // ---------------------------------------------------------------------------
363 // Exception handling
364
365 // Push a new try handler and link into try handler chain.
366 // The return address must be passed in register lr.
367 // On exit, r0 contains TOS (code slot).
368 void PushTryHandler(CodeLocation try_location, HandlerType type);
369
Leon Clarkee46be812010-01-19 14:06:41 +0000370 // Unlink the stack handler on top of the stack from the try handler chain.
371 // Must preserve the result register.
372 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000373
374 // ---------------------------------------------------------------------------
375 // Inline caching support
376
Steve Blocka7e24c12009-10-30 11:49:00 +0000377 // Generate code for checking access rights - used for security checks
378 // on access to global objects across environments. The holder register
379 // is left untouched, whereas both scratch registers are clobbered.
380 void CheckAccessGlobalProxy(Register holder_reg,
381 Register scratch,
382 Label* miss);
383
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800384 inline void MarkCode(NopMarkerTypes type) {
385 nop(type);
386 }
387
388 // Check if the given instruction is a 'type' marker.
389 // ie. check if is is a mov r<type>, r<type> (referenced as nop(type))
390 // These instructions are generated to mark special location in the code,
391 // like some special IC code.
392 static inline bool IsMarkedCode(Instr instr, int type) {
393 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
394 return IsNop(instr, type);
395 }
396
397
398 static inline int GetCodeMarker(Instr instr) {
399 int dst_reg_offset = 12;
400 int dst_mask = 0xf << dst_reg_offset;
401 int src_mask = 0xf;
402 int dst_reg = (instr & dst_mask) >> dst_reg_offset;
403 int src_reg = instr & src_mask;
404 uint32_t non_register_mask = ~(dst_mask | src_mask);
405 uint32_t mov_mask = al | 13 << 21;
406
407 // Return <n> if we have a mov rn rn, else return -1.
408 int type = ((instr & non_register_mask) == mov_mask) &&
409 (dst_reg == src_reg) &&
410 (FIRST_IC_MARKER <= dst_reg) && (dst_reg < LAST_CODE_MARKER)
411 ? src_reg
412 : -1;
413 ASSERT((type == -1) ||
414 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
415 return type;
416 }
417
Steve Blocka7e24c12009-10-30 11:49:00 +0000418
419 // ---------------------------------------------------------------------------
420 // Allocation support
421
Ben Murdoch086aeea2011-05-13 15:57:08 +0100422 // Allocate an object in new space. The object_size is specified
423 // either in bytes or in words if the allocation flag SIZE_IN_WORDS
424 // is passed. If the new space is exhausted control continues at the
425 // gc_required label. The allocated object is returned in result. If
426 // the flag tag_allocated_object is true the result is tagged as as
427 // a heap object. All registers are clobbered also when control
428 // continues at the gc_required label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000429 void AllocateInNewSpace(int object_size,
430 Register result,
431 Register scratch1,
432 Register scratch2,
433 Label* gc_required,
434 AllocationFlags flags);
435 void AllocateInNewSpace(Register object_size,
436 Register result,
437 Register scratch1,
438 Register scratch2,
439 Label* gc_required,
440 AllocationFlags flags);
441
442 // Undo allocation in new space. The object passed and objects allocated after
443 // it will no longer be allocated. The caller must make sure that no pointers
444 // are left to the object(s) no longer allocated as they would be invalid when
445 // allocation is undone.
446 void UndoAllocationInNewSpace(Register object, Register scratch);
447
Andrei Popescu31002712010-02-23 13:46:05 +0000448
449 void AllocateTwoByteString(Register result,
450 Register length,
451 Register scratch1,
452 Register scratch2,
453 Register scratch3,
454 Label* gc_required);
455 void AllocateAsciiString(Register result,
456 Register length,
457 Register scratch1,
458 Register scratch2,
459 Register scratch3,
460 Label* gc_required);
461 void AllocateTwoByteConsString(Register result,
462 Register length,
463 Register scratch1,
464 Register scratch2,
465 Label* gc_required);
466 void AllocateAsciiConsString(Register result,
467 Register length,
468 Register scratch1,
469 Register scratch2,
470 Label* gc_required);
471
Kristian Monsen25f61362010-05-21 11:50:48 +0100472 // Allocates a heap number or jumps to the gc_required label if the young
473 // space is full and a scavenge is needed. All registers are clobbered also
474 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100475 void AllocateHeapNumber(Register result,
476 Register scratch1,
477 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100478 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100479 Label* gc_required);
Steve Block8defd9f2010-07-08 12:39:36 +0100480 void AllocateHeapNumberWithValue(Register result,
481 DwVfpRegister value,
482 Register scratch1,
483 Register scratch2,
484 Register heap_number_map,
485 Label* gc_required);
486
Ben Murdochbb769b22010-08-11 14:56:33 +0100487 // Copies a fixed number of fields of heap objects from src to dst.
488 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000489
Steve Blocka7e24c12009-10-30 11:49:00 +0000490 // ---------------------------------------------------------------------------
491 // Support functions.
492
493 // Try to get function prototype of a function and puts the value in
494 // the result register. Checks that the function really is a
495 // function and jumps to the miss label if the fast checks fail. The
496 // function register will be untouched; the other registers may be
497 // clobbered.
498 void TryGetFunctionPrototype(Register function,
499 Register result,
500 Register scratch,
501 Label* miss);
502
503 // Compare object type for heap object. heap_object contains a non-Smi
504 // whose object type should be compared with the given type. This both
505 // sets the flags and leaves the object type in the type_reg register.
506 // It leaves the map in the map register (unless the type_reg and map register
507 // are the same register). It leaves the heap object in the heap_object
508 // register unless the heap_object register is the same register as one of the
509 // other registers.
510 void CompareObjectType(Register heap_object,
511 Register map,
512 Register type_reg,
513 InstanceType type);
514
515 // Compare instance type in a map. map contains a valid map object whose
516 // object type should be compared with the given type. This both
517 // sets the flags and leaves the object type in the type_reg register. It
518 // leaves the heap object in the heap_object register unless the heap_object
519 // register is the same register as type_reg.
520 void CompareInstanceType(Register map,
521 Register type_reg,
522 InstanceType type);
523
Andrei Popescu31002712010-02-23 13:46:05 +0000524
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100525 // Check if the map of an object is equal to a specified map (either
526 // given directly or as an index into the root list) and branch to
527 // label if not. Skip the smi check if not required (object is known
528 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000529 void CheckMap(Register obj,
530 Register scratch,
531 Handle<Map> map,
532 Label* fail,
533 bool is_heap_object);
534
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100535 void CheckMap(Register obj,
536 Register scratch,
537 Heap::RootListIndex index,
538 Label* fail,
539 bool is_heap_object);
540
541
Andrei Popescu31002712010-02-23 13:46:05 +0000542 // Load and check the instance type of an object for being a string.
543 // Loads the type into the second argument register.
544 // Returns a condition that will be enabled if the object was a string.
545 Condition IsObjectStringType(Register obj,
546 Register type) {
547 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
548 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
549 tst(type, Operand(kIsNotStringMask));
550 ASSERT_EQ(0, kStringTag);
551 return eq;
552 }
553
554
Steve Blocka7e24c12009-10-30 11:49:00 +0000555 // Generates code for reporting that an illegal operation has
556 // occurred.
557 void IllegalOperation(int num_arguments);
558
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100559 // Picks out an array index from the hash field.
560 // Register use:
561 // hash - holds the index's hash. Clobbered.
562 // index - holds the overwritten index on exit.
563 void IndexFromHash(Register hash, Register index);
564
Andrei Popescu31002712010-02-23 13:46:05 +0000565 // Get the number of least significant bits from a register
566 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
Steve Block1e0659c2011-05-24 12:43:12 +0100567 void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +0000568
Steve Blockd0582a62009-12-15 09:54:21 +0000569 // Uses VFP instructions to Convert a Smi to a double.
570 void IntegerToDoubleConversionWithVFP3(Register inReg,
571 Register outHighReg,
572 Register outLowReg);
573
Steve Block8defd9f2010-07-08 12:39:36 +0100574 // Load the value of a number object into a VFP double register. If the object
575 // is not a number a jump to the label not_number is performed and the VFP
576 // double register is unchanged.
577 void ObjectToDoubleVFPRegister(
578 Register object,
579 DwVfpRegister value,
580 Register scratch1,
581 Register scratch2,
582 Register heap_number_map,
583 SwVfpRegister scratch3,
584 Label* not_number,
585 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
586
587 // Load the value of a smi object into a VFP double register. The register
588 // scratch1 can be the same register as smi in which case smi will hold the
589 // untagged value afterwards.
590 void SmiToDoubleVFPRegister(Register smi,
591 DwVfpRegister value,
592 Register scratch1,
593 SwVfpRegister scratch2);
594
Iain Merrick9ac36c92010-09-13 15:29:50 +0100595 // Convert the HeapNumber pointed to by source to a 32bits signed integer
596 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
Steve Block1e0659c2011-05-24 12:43:12 +0100597 // to not_int32 label. If VFP3 is available double_scratch is used but not
598 // scratch2.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100599 void ConvertToInt32(Register source,
600 Register dest,
601 Register scratch,
602 Register scratch2,
Steve Block1e0659c2011-05-24 12:43:12 +0100603 DwVfpRegister double_scratch,
Iain Merrick9ac36c92010-09-13 15:29:50 +0100604 Label *not_int32);
605
Steve Block6ded16b2010-05-10 14:33:55 +0100606 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
607 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
Steve Block8defd9f2010-07-08 12:39:36 +0100608 // for 0 (31 instead of 32). Source and scratch can be the same in which case
609 // the source is clobbered. Source and zeros can also be the same in which
610 // case scratch should be a different register.
611 void CountLeadingZeros(Register zeros,
612 Register source,
613 Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000614
615 // ---------------------------------------------------------------------------
616 // Runtime calls
617
618 // Call a code stub.
619 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000620
Andrei Popescu31002712010-02-23 13:46:05 +0000621 // Call a code stub.
622 void TailCallStub(CodeStub* stub, Condition cond = al);
623
Steve Block1e0659c2011-05-24 12:43:12 +0100624 // Tail call a code stub (jump) and return the code object called. Try to
625 // generate the code if necessary. Do not perform a GC but instead return
626 // a retry after GC failure.
627 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub,
628 Condition cond = al);
629
Steve Blocka7e24c12009-10-30 11:49:00 +0000630 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000631 void CallRuntime(Runtime::Function* f, int num_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100632 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000633
634 // Convenience function: Same as above, but takes the fid instead.
635 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
636
Andrei Popescu402d9372010-02-26 13:31:12 +0000637 // Convenience function: call an external reference.
638 void CallExternalReference(const ExternalReference& ext,
639 int num_arguments);
640
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100642 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000643 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100644 void TailCallExternalReference(const ExternalReference& ext,
645 int num_arguments,
646 int result_size);
647
Steve Block1e0659c2011-05-24 12:43:12 +0100648 // Tail call of a runtime routine (jump). Try to generate the code if
649 // necessary. Do not perform a GC but instead return a retry after GC
650 // failure.
651 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
652 const ExternalReference& ext, int num_arguments, int result_size);
653
Steve Block6ded16b2010-05-10 14:33:55 +0100654 // Convenience function: tail call a runtime routine (jump).
655 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 int num_arguments,
657 int result_size);
658
Steve Block6ded16b2010-05-10 14:33:55 +0100659 // Before calling a C-function from generated code, align arguments on stack.
660 // After aligning the frame, non-register arguments must be stored in
661 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
662 // are word sized.
663 // Some compilers/platforms require the stack to be aligned when calling
664 // C++ code.
665 // Needs a scratch register to do some arithmetic. This register will be
666 // trashed.
667 void PrepareCallCFunction(int num_arguments, Register scratch);
668
669 // Calls a C function and cleans up the space for arguments allocated
670 // by PrepareCallCFunction. The called function is not allowed to trigger a
671 // garbage collection, since that might move the code and invalidate the
672 // return address (unless this is somehow accounted for by the called
673 // function).
674 void CallCFunction(ExternalReference function, int num_arguments);
675 void CallCFunction(Register function, int num_arguments);
676
Steve Block1e0659c2011-05-24 12:43:12 +0100677 // Calls an API function. Allocates HandleScope, extracts returned value
678 // from handle and propagates exceptions. Restores context.
679 // stack_space - space to be unwound on exit (includes the call js
680 // arguments space and the additional space allocated for the fast call).
681 MaybeObject* TryCallApiFunctionAndReturn(ApiFunction* function,
682 int stack_space);
683
Steve Blocka7e24c12009-10-30 11:49:00 +0000684 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100685 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000686
Steve Block1e0659c2011-05-24 12:43:12 +0100687 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
688
Steve Blocka7e24c12009-10-30 11:49:00 +0000689 // Invoke specified builtin JavaScript function. Adds an entry to
690 // the unresolved list if the name does not resolve.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100691 void InvokeBuiltin(Builtins::JavaScript id,
692 InvokeJSFlags flags,
693 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000694
695 // Store the code object for the given builtin in the target register and
696 // setup the function in r1.
697 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
698
Steve Block791712a2010-08-27 10:21:07 +0100699 // Store the function for the given builtin in the target register.
700 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
701
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 Handle<Object> CodeObject() { return code_object_; }
703
704
705 // ---------------------------------------------------------------------------
706 // StatsCounter support
707
708 void SetCounter(StatsCounter* counter, int value,
709 Register scratch1, Register scratch2);
710 void IncrementCounter(StatsCounter* counter, int value,
711 Register scratch1, Register scratch2);
712 void DecrementCounter(StatsCounter* counter, int value,
713 Register scratch1, Register scratch2);
714
715
716 // ---------------------------------------------------------------------------
717 // Debugging
718
Steve Block1e0659c2011-05-24 12:43:12 +0100719 // Calls Abort(msg) if the condition cond is not satisfied.
Steve Blocka7e24c12009-10-30 11:49:00 +0000720 // Use --debug_code to enable.
Steve Block1e0659c2011-05-24 12:43:12 +0100721 void Assert(Condition cond, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100722 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Iain Merrick75681382010-08-19 15:07:18 +0100723 void AssertFastElements(Register elements);
Steve Blocka7e24c12009-10-30 11:49:00 +0000724
725 // Like Assert(), but always enabled.
Steve Block1e0659c2011-05-24 12:43:12 +0100726 void Check(Condition cond, const char* msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000727
728 // Print a message to stdout and abort execution.
729 void Abort(const char* msg);
730
731 // Verify restrictions about code generated in stubs.
732 void set_generating_stub(bool value) { generating_stub_ = value; }
733 bool generating_stub() { return generating_stub_; }
734 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
735 bool allow_stub_calls() { return allow_stub_calls_; }
736
Leon Clarked91b9f72010-01-27 17:25:45 +0000737 // ---------------------------------------------------------------------------
Steve Block1e0659c2011-05-24 12:43:12 +0100738 // Number utilities
739
740 // Check whether the value of reg is a power of two and not zero. If not
741 // control continues at the label not_power_of_two. If reg is a power of two
742 // the register scratch contains the value of (reg - 1) when control falls
743 // through.
744 void JumpIfNotPowerOfTwoOrZero(Register reg,
745 Register scratch,
746 Label* not_power_of_two_or_zero);
747
748 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000749 // Smi utilities
750
Ben Murdochb0fe1622011-05-05 13:52:32 +0100751 void SmiTag(Register reg, SBit s = LeaveCC) {
752 add(reg, reg, Operand(reg), s);
753 }
Steve Block1e0659c2011-05-24 12:43:12 +0100754 void SmiTag(Register dst, Register src, SBit s = LeaveCC) {
755 add(dst, src, Operand(src), s);
756 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100757
Ben Murdochb8e0da22011-05-16 14:20:40 +0100758 // Try to convert int32 to smi. If the value is to large, preserve
759 // the original value and jump to not_a_smi. Destroys scratch and
760 // sets flags.
761 void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
762 mov(scratch, reg);
763 SmiTag(scratch, SetCC);
764 b(vs, not_a_smi);
765 mov(reg, scratch);
766 }
767
Ben Murdochb0fe1622011-05-05 13:52:32 +0100768 void SmiUntag(Register reg) {
769 mov(reg, Operand(reg, ASR, kSmiTagSize));
770 }
Steve Block1e0659c2011-05-24 12:43:12 +0100771 void SmiUntag(Register dst, Register src) {
772 mov(dst, Operand(src, ASR, kSmiTagSize));
773 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100774
Steve Block1e0659c2011-05-24 12:43:12 +0100775 // Jump the register contains a smi.
776 inline void JumpIfSmi(Register value, Label* smi_label) {
777 tst(value, Operand(kSmiTagMask));
778 b(eq, smi_label);
779 }
780 // Jump if either of the registers contain a non-smi.
781 inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
782 tst(value, Operand(kSmiTagMask));
783 b(ne, not_smi_label);
784 }
Andrei Popescu31002712010-02-23 13:46:05 +0000785 // Jump if either of the registers contain a non-smi.
786 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
787 // Jump if either of the registers contain a smi.
788 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
789
Iain Merrick75681382010-08-19 15:07:18 +0100790 // Abort execution if argument is a smi. Used in debug code.
791 void AbortIfSmi(Register object);
Steve Block1e0659c2011-05-24 12:43:12 +0100792 void AbortIfNotSmi(Register object);
793
794 // Abort execution if argument is not the root value with the given index.
795 void AbortIfNotRootValue(Register src,
796 Heap::RootListIndex root_value_index,
797 const char* message);
798
799 // ---------------------------------------------------------------------------
800 // HeapNumber utilities
801
802 void JumpIfNotHeapNumber(Register object,
803 Register heap_number_map,
804 Register scratch,
805 Label* on_not_heap_number);
Iain Merrick75681382010-08-19 15:07:18 +0100806
Andrei Popescu31002712010-02-23 13:46:05 +0000807 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000808 // String utilities
809
810 // Checks if both objects are sequential ASCII strings and jumps to label
811 // if either is not. Assumes that neither object is a smi.
812 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
813 Register object2,
814 Register scratch1,
815 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100816 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000817
818 // Checks if both objects are sequential ASCII strings and jumps to label
819 // if either is not.
820 void JumpIfNotBothSequentialAsciiStrings(Register first,
821 Register second,
822 Register scratch1,
823 Register scratch2,
824 Label* not_flat_ascii_strings);
825
Steve Block6ded16b2010-05-10 14:33:55 +0100826 // Checks if both instance types are sequential ASCII strings and jumps to
827 // label if either is not.
828 void JumpIfBothInstanceTypesAreNotSequentialAscii(
829 Register first_object_instance_type,
830 Register second_object_instance_type,
831 Register scratch1,
832 Register scratch2,
833 Label* failure);
834
835 // Check if instance type is sequential ASCII string and jump to label if
836 // it is not.
837 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
838 Register scratch,
839 Label* failure);
840
841
Steve Block1e0659c2011-05-24 12:43:12 +0100842 // ---------------------------------------------------------------------------
843 // Patching helpers.
844
845 // Get the location of a relocated constant (its address in the constant pool)
846 // from its load site.
847 void GetRelocatedValueLocation(Register ldr_location,
848 Register result);
849
850
Steve Blocka7e24c12009-10-30 11:49:00 +0000851 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000852 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
853 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000854
855 // Helper functions for generating invokes.
856 void InvokePrologue(const ParameterCount& expected,
857 const ParameterCount& actual,
858 Handle<Code> code_constant,
859 Register code_reg,
860 Label* done,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100861 InvokeFlag flag,
862 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000863
Steve Blocka7e24c12009-10-30 11:49:00 +0000864 // Activation support.
865 void EnterFrame(StackFrame::Type type);
866 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000867
Steve Block6ded16b2010-05-10 14:33:55 +0100868 void InitializeNewString(Register string,
869 Register length,
870 Heap::RootListIndex map_index,
871 Register scratch1,
872 Register scratch2);
873
Andrei Popescu31002712010-02-23 13:46:05 +0000874 bool generating_stub_;
875 bool allow_stub_calls_;
876 // This handle will be patched with the code object on installation.
877 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000878};
879
880
881#ifdef ENABLE_DEBUGGER_SUPPORT
882// The code patcher is used to patch (typically) small parts of code e.g. for
883// debugging and other types of instrumentation. When using the code patcher
884// the exact number of bytes specified must be emitted. It is not legal to emit
885// relocation information. If any of these constraints are violated it causes
886// an assertion to fail.
887class CodePatcher {
888 public:
889 CodePatcher(byte* address, int instructions);
890 virtual ~CodePatcher();
891
892 // Macro assembler to emit code.
893 MacroAssembler* masm() { return &masm_; }
894
895 // Emit an instruction directly.
Steve Block1e0659c2011-05-24 12:43:12 +0100896 void Emit(Instr instr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000897
898 // Emit an address directly.
899 void Emit(Address addr);
900
Steve Block1e0659c2011-05-24 12:43:12 +0100901 // Emit the condition part of an instruction leaving the rest of the current
902 // instruction unchanged.
903 void EmitCondition(Condition cond);
904
Steve Blocka7e24c12009-10-30 11:49:00 +0000905 private:
906 byte* address_; // The address of the code being patched.
907 int instructions_; // Number of instructions of the expected patch size.
908 int size_; // Number of bytes of the expected patch size.
909 MacroAssembler masm_; // Macro assembler used to generate the code.
910};
911#endif // ENABLE_DEBUGGER_SUPPORT
912
913
Ben Murdochb0fe1622011-05-05 13:52:32 +0100914// Helper class for generating code or data associated with the code
915// right after a call instruction. As an example this can be used to
916// generate safepoint data after calls for crankshaft.
917class PostCallGenerator {
918 public:
919 PostCallGenerator() { }
920 virtual ~PostCallGenerator() { }
921 virtual void Generate() = 0;
922};
923
924
Steve Blocka7e24c12009-10-30 11:49:00 +0000925// -----------------------------------------------------------------------------
926// Static helper functions.
927
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800928static MemOperand ContextOperand(Register context, int index) {
929 return MemOperand(context, Context::SlotOffset(index));
930}
931
932
933static inline MemOperand GlobalObjectOperand() {
934 return ContextOperand(cp, Context::GLOBAL_INDEX);
935}
936
937
Steve Blocka7e24c12009-10-30 11:49:00 +0000938#ifdef GENERATED_CODE_COVERAGE
939#define CODE_COVERAGE_STRINGIFY(x) #x
940#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
941#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
942#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
943#else
944#define ACCESS_MASM(masm) masm->
945#endif
946
947
948} } // namespace v8::internal
949
950#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_