blob: 8bd134c38ef8d55e390a9eb3978c6303b14c0bd4 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Andrei Popescu31002712010-02-23 13:46:05 +000036// ----------------------------------------------------------------------------
37// Static helper functions
38
39// Generate a MemOperand for loading a field from an object.
40static inline MemOperand FieldMemOperand(Register object, int offset) {
41 return MemOperand(object, offset - kHeapObjectTag);
42}
43
Steve Blocka7e24c12009-10-30 11:49:00 +000044
45// Give alias names to registers
46const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000047const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49enum InvokeJSFlags {
50 CALL_JS,
51 JUMP_JS
52};
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055// Flags used for the AllocateInNewSpace functions.
56enum AllocationFlags {
57 // No special flags.
58 NO_ALLOCATION_FLAGS = 0,
59 // Return the pointer to the allocated already tagged as a heap object.
60 TAG_OBJECT = 1 << 0,
61 // The content of the result register already contains the allocation top in
62 // new space.
63 RESULT_CONTAINS_TOP = 1 << 1,
64 // Specify that the requested size of the space to allocate is specified in
65 // words instead of bytes.
66 SIZE_IN_WORDS = 1 << 2
67};
68
69
Steve Block8defd9f2010-07-08 12:39:36 +010070// Flags used for the ObjectToDoubleVFPRegister function.
71enum ObjectToDoubleFlags {
72 // No special flags.
73 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
74 // Object is known to be a non smi.
75 OBJECT_NOT_SMI = 1 << 0,
76 // Don't load NaNs or infinities, branch to the non number case instead.
77 AVOID_NANS_AND_INFINITIES = 1 << 1
78};
79
80
Steve Blocka7e24c12009-10-30 11:49:00 +000081// MacroAssembler implements a collection of frequently used macros.
82class MacroAssembler: public Assembler {
83 public:
84 MacroAssembler(void* buffer, int size);
85
Andrei Popescu31002712010-02-23 13:46:05 +000086 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000087 void Jump(Register target, Condition cond = al);
88 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
89 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
90 void Call(Register target, Condition cond = al);
91 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
92 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
93 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +000094
95 // Emit code to discard a non-negative number of pointer-sized elements
96 // from the stack, clobbering only the sp register.
97 void Drop(int count, Condition cond = al);
98
Steve Block6ded16b2010-05-10 14:33:55 +010099
100 // Swap two registers. If the scratch register is omitted then a slightly
101 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100102 void Swap(Register reg1,
103 Register reg2,
104 Register scratch = no_reg,
105 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100106
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100107
108 void And(Register dst, Register src1, const Operand& src2,
109 Condition cond = al);
110 void Ubfx(Register dst, Register src, int lsb, int width,
111 Condition cond = al);
112 void Sbfx(Register dst, Register src, int lsb, int width,
113 Condition cond = al);
114 void Bfc(Register dst, int lsb, int width, Condition cond = al);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100115 void Usat(Register dst, int satpos, const Operand& src,
116 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100117
Leon Clarkee46be812010-01-19 14:06:41 +0000118 void Call(Label* target);
119 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +0100120 // May do nothing if the registers are identical.
121 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 // Jumps to the label at the index given by the Smi in "index".
123 void SmiJumpTable(Register index, Vector<Label*> targets);
124 // Load an object from the root table.
125 void LoadRoot(Register destination,
126 Heap::RootListIndex index,
127 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100128 // Store an object to the root table.
129 void StoreRoot(Register source,
130 Heap::RootListIndex index,
131 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000132
Steve Block6ded16b2010-05-10 14:33:55 +0100133
134 // Check if object is in new space.
135 // scratch can be object itself, but it will be clobbered.
136 void InNewSpace(Register object,
137 Register scratch,
138 Condition cc, // eq for new space, ne otherwise
139 Label* branch);
140
141
Steve Block8defd9f2010-07-08 12:39:36 +0100142 // For the page containing |object| mark the region covering [address]
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100143 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100144 void RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100145 Register address,
146 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100147
Steve Block8defd9f2010-07-08 12:39:36 +0100148 // For the page containing |object| mark the region covering
149 // [object+offset] dirty. The object address must be in the first 8K
150 // of an allocated page. The 'scratch' registers are used in the
151 // implementation and all 3 registers are clobbered by the
152 // operation, as well as the ip register. RecordWrite updates the
153 // write barrier even when storing smis.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100154 void RecordWrite(Register object,
155 Operand offset,
156 Register scratch0,
157 Register scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000158
Steve Block8defd9f2010-07-08 12:39:36 +0100159 // For the page containing |object| mark the region covering
160 // [address] dirty. The object address must be in the first 8K of an
161 // allocated page. All 3 registers are clobbered by the operation,
162 // as well as the ip register. RecordWrite updates the write barrier
163 // even when storing smis.
164 void RecordWrite(Register object,
165 Register address,
166 Register scratch);
167
Steve Block6ded16b2010-05-10 14:33:55 +0100168 // Push two registers. Pushes leftmost register first (to highest address).
169 void Push(Register src1, Register src2, Condition cond = al) {
170 ASSERT(!src1.is(src2));
171 if (src1.code() > src2.code()) {
172 stm(db_w, sp, src1.bit() | src2.bit(), cond);
173 } else {
174 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
175 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
176 }
177 }
178
179 // Push three registers. Pushes leftmost register first (to highest address).
180 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
181 ASSERT(!src1.is(src2));
182 ASSERT(!src2.is(src3));
183 ASSERT(!src1.is(src3));
184 if (src1.code() > src2.code()) {
185 if (src2.code() > src3.code()) {
186 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
187 } else {
188 stm(db_w, sp, src1.bit() | src2.bit(), cond);
189 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
190 }
191 } else {
192 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
193 Push(src2, src3, cond);
194 }
195 }
196
197 // Push four registers. Pushes leftmost register first (to highest address).
198 void Push(Register src1, Register src2,
199 Register src3, Register src4, Condition cond = al) {
200 ASSERT(!src1.is(src2));
201 ASSERT(!src2.is(src3));
202 ASSERT(!src1.is(src3));
203 ASSERT(!src1.is(src4));
204 ASSERT(!src2.is(src4));
205 ASSERT(!src3.is(src4));
206 if (src1.code() > src2.code()) {
207 if (src2.code() > src3.code()) {
208 if (src3.code() > src4.code()) {
209 stm(db_w,
210 sp,
211 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
212 cond);
213 } else {
214 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
215 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
216 }
217 } else {
218 stm(db_w, sp, src1.bit() | src2.bit(), cond);
219 Push(src3, src4, cond);
220 }
221 } else {
222 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
223 Push(src2, src3, src4, cond);
224 }
225 }
226
Leon Clarkef7060e22010-06-03 12:02:55 +0100227 // Load two consecutive registers with two consecutive memory locations.
228 void Ldrd(Register dst1,
229 Register dst2,
230 const MemOperand& src,
231 Condition cond = al);
232
233 // Store two consecutive registers to two consecutive memory locations.
234 void Strd(Register src1,
235 Register src2,
236 const MemOperand& dst,
237 Condition cond = al);
238
Steve Blocka7e24c12009-10-30 11:49:00 +0000239 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000240 // Stack limit support
241
242 void StackLimitCheck(Label* on_stack_limit_hit);
243
244 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000245 // Activation frames
246
247 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
248 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
249
250 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
251 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
252
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100253 // Enter exit frame.
Steve Blockd0582a62009-12-15 09:54:21 +0000254 // Expects the number of arguments in register r0 and
Steve Blocka7e24c12009-10-30 11:49:00 +0000255 // the builtin function to call in register r1. Exits with argc in
256 // r4, argv in r6, and and the builtin function to call in r5.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100257 void EnterExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000258
259 // Leave the current exit frame. Expects the return value in r0.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100260 void LeaveExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000261
Steve Block6ded16b2010-05-10 14:33:55 +0100262 // Get the actual activation frame alignment for target environment.
263 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000264
Steve Blockd0582a62009-12-15 09:54:21 +0000265 void LoadContext(Register dst, int context_chain_length);
266
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800267 void LoadGlobalFunction(int index, Register function);
268
269 // Load the initial map from the global function. The registers
270 // function and map can be the same, function is then overwritten.
271 void LoadGlobalFunctionInitialMap(Register function,
272 Register map,
273 Register scratch);
274
Steve Blocka7e24c12009-10-30 11:49:00 +0000275 // ---------------------------------------------------------------------------
276 // JavaScript invokes
277
278 // Invoke the JavaScript function code by either calling or jumping.
279 void InvokeCode(Register code,
280 const ParameterCount& expected,
281 const ParameterCount& actual,
282 InvokeFlag flag);
283
284 void InvokeCode(Handle<Code> code,
285 const ParameterCount& expected,
286 const ParameterCount& actual,
287 RelocInfo::Mode rmode,
288 InvokeFlag flag);
289
290 // Invoke the JavaScript function in the given register. Changes the
291 // current context to the context in the function before invoking.
292 void InvokeFunction(Register function,
293 const ParameterCount& actual,
294 InvokeFlag flag);
295
Andrei Popescu402d9372010-02-26 13:31:12 +0000296 void InvokeFunction(JSFunction* function,
297 const ParameterCount& actual,
298 InvokeFlag flag);
299
Steve Blocka7e24c12009-10-30 11:49:00 +0000300
301#ifdef ENABLE_DEBUGGER_SUPPORT
302 // ---------------------------------------------------------------------------
303 // Debugger Support
304
Andrei Popescu402d9372010-02-26 13:31:12 +0000305 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000306#endif
307
308 // ---------------------------------------------------------------------------
309 // Exception handling
310
311 // Push a new try handler and link into try handler chain.
312 // The return address must be passed in register lr.
313 // On exit, r0 contains TOS (code slot).
314 void PushTryHandler(CodeLocation try_location, HandlerType type);
315
Leon Clarkee46be812010-01-19 14:06:41 +0000316 // Unlink the stack handler on top of the stack from the try handler chain.
317 // Must preserve the result register.
318 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000319
320 // ---------------------------------------------------------------------------
321 // Inline caching support
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323 // Generate code for checking access rights - used for security checks
324 // on access to global objects across environments. The holder register
325 // is left untouched, whereas both scratch registers are clobbered.
326 void CheckAccessGlobalProxy(Register holder_reg,
327 Register scratch,
328 Label* miss);
329
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800330 inline void MarkCode(NopMarkerTypes type) {
331 nop(type);
332 }
333
334 // Check if the given instruction is a 'type' marker.
335 // ie. check if is is a mov r<type>, r<type> (referenced as nop(type))
336 // These instructions are generated to mark special location in the code,
337 // like some special IC code.
338 static inline bool IsMarkedCode(Instr instr, int type) {
339 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
340 return IsNop(instr, type);
341 }
342
343
344 static inline int GetCodeMarker(Instr instr) {
345 int dst_reg_offset = 12;
346 int dst_mask = 0xf << dst_reg_offset;
347 int src_mask = 0xf;
348 int dst_reg = (instr & dst_mask) >> dst_reg_offset;
349 int src_reg = instr & src_mask;
350 uint32_t non_register_mask = ~(dst_mask | src_mask);
351 uint32_t mov_mask = al | 13 << 21;
352
353 // Return <n> if we have a mov rn rn, else return -1.
354 int type = ((instr & non_register_mask) == mov_mask) &&
355 (dst_reg == src_reg) &&
356 (FIRST_IC_MARKER <= dst_reg) && (dst_reg < LAST_CODE_MARKER)
357 ? src_reg
358 : -1;
359 ASSERT((type == -1) ||
360 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
361 return type;
362 }
363
Steve Blocka7e24c12009-10-30 11:49:00 +0000364
365 // ---------------------------------------------------------------------------
366 // Allocation support
367
368 // Allocate an object in new space. The object_size is specified in words (not
369 // bytes). If the new space is exhausted control continues at the gc_required
370 // label. The allocated object is returned in result. If the flag
Kristian Monsen25f61362010-05-21 11:50:48 +0100371 // tag_allocated_object is true the result is tagged as as a heap object. All
372 // registers are clobbered also when control continues at the gc_required
373 // label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000374 void AllocateInNewSpace(int object_size,
375 Register result,
376 Register scratch1,
377 Register scratch2,
378 Label* gc_required,
379 AllocationFlags flags);
380 void AllocateInNewSpace(Register object_size,
381 Register result,
382 Register scratch1,
383 Register scratch2,
384 Label* gc_required,
385 AllocationFlags flags);
386
387 // Undo allocation in new space. The object passed and objects allocated after
388 // it will no longer be allocated. The caller must make sure that no pointers
389 // are left to the object(s) no longer allocated as they would be invalid when
390 // allocation is undone.
391 void UndoAllocationInNewSpace(Register object, Register scratch);
392
Andrei Popescu31002712010-02-23 13:46:05 +0000393
394 void AllocateTwoByteString(Register result,
395 Register length,
396 Register scratch1,
397 Register scratch2,
398 Register scratch3,
399 Label* gc_required);
400 void AllocateAsciiString(Register result,
401 Register length,
402 Register scratch1,
403 Register scratch2,
404 Register scratch3,
405 Label* gc_required);
406 void AllocateTwoByteConsString(Register result,
407 Register length,
408 Register scratch1,
409 Register scratch2,
410 Label* gc_required);
411 void AllocateAsciiConsString(Register result,
412 Register length,
413 Register scratch1,
414 Register scratch2,
415 Label* gc_required);
416
Kristian Monsen25f61362010-05-21 11:50:48 +0100417 // Allocates a heap number or jumps to the gc_required label if the young
418 // space is full and a scavenge is needed. All registers are clobbered also
419 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100420 void AllocateHeapNumber(Register result,
421 Register scratch1,
422 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100423 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100424 Label* gc_required);
Steve Block8defd9f2010-07-08 12:39:36 +0100425 void AllocateHeapNumberWithValue(Register result,
426 DwVfpRegister value,
427 Register scratch1,
428 Register scratch2,
429 Register heap_number_map,
430 Label* gc_required);
431
Ben Murdochbb769b22010-08-11 14:56:33 +0100432 // Copies a fixed number of fields of heap objects from src to dst.
433 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000434
Steve Blocka7e24c12009-10-30 11:49:00 +0000435 // ---------------------------------------------------------------------------
436 // Support functions.
437
438 // Try to get function prototype of a function and puts the value in
439 // the result register. Checks that the function really is a
440 // function and jumps to the miss label if the fast checks fail. The
441 // function register will be untouched; the other registers may be
442 // clobbered.
443 void TryGetFunctionPrototype(Register function,
444 Register result,
445 Register scratch,
446 Label* miss);
447
448 // Compare object type for heap object. heap_object contains a non-Smi
449 // whose object type should be compared with the given type. This both
450 // sets the flags and leaves the object type in the type_reg register.
451 // It leaves the map in the map register (unless the type_reg and map register
452 // are the same register). It leaves the heap object in the heap_object
453 // register unless the heap_object register is the same register as one of the
454 // other registers.
455 void CompareObjectType(Register heap_object,
456 Register map,
457 Register type_reg,
458 InstanceType type);
459
460 // Compare instance type in a map. map contains a valid map object whose
461 // object type should be compared with the given type. This both
462 // sets the flags and leaves the object type in the type_reg register. It
463 // leaves the heap object in the heap_object register unless the heap_object
464 // register is the same register as type_reg.
465 void CompareInstanceType(Register map,
466 Register type_reg,
467 InstanceType type);
468
Andrei Popescu31002712010-02-23 13:46:05 +0000469
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100470 // Check if the map of an object is equal to a specified map (either
471 // given directly or as an index into the root list) and branch to
472 // label if not. Skip the smi check if not required (object is known
473 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000474 void CheckMap(Register obj,
475 Register scratch,
476 Handle<Map> map,
477 Label* fail,
478 bool is_heap_object);
479
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100480 void CheckMap(Register obj,
481 Register scratch,
482 Heap::RootListIndex index,
483 Label* fail,
484 bool is_heap_object);
485
486
Andrei Popescu31002712010-02-23 13:46:05 +0000487 // Load and check the instance type of an object for being a string.
488 // Loads the type into the second argument register.
489 // Returns a condition that will be enabled if the object was a string.
490 Condition IsObjectStringType(Register obj,
491 Register type) {
492 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
493 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
494 tst(type, Operand(kIsNotStringMask));
495 ASSERT_EQ(0, kStringTag);
496 return eq;
497 }
498
499
Steve Blocka7e24c12009-10-30 11:49:00 +0000500 inline void BranchOnSmi(Register value, Label* smi_label) {
501 tst(value, Operand(kSmiTagMask));
502 b(eq, smi_label);
503 }
504
505 inline void BranchOnNotSmi(Register value, Label* not_smi_label) {
506 tst(value, Operand(kSmiTagMask));
507 b(ne, not_smi_label);
508 }
509
510 // Generates code for reporting that an illegal operation has
511 // occurred.
512 void IllegalOperation(int num_arguments);
513
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100514 // Picks out an array index from the hash field.
515 // Register use:
516 // hash - holds the index's hash. Clobbered.
517 // index - holds the overwritten index on exit.
518 void IndexFromHash(Register hash, Register index);
519
Andrei Popescu31002712010-02-23 13:46:05 +0000520 // Get the number of least significant bits from a register
521 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
522
Steve Blockd0582a62009-12-15 09:54:21 +0000523 // Uses VFP instructions to Convert a Smi to a double.
524 void IntegerToDoubleConversionWithVFP3(Register inReg,
525 Register outHighReg,
526 Register outLowReg);
527
Steve Block8defd9f2010-07-08 12:39:36 +0100528 // Load the value of a number object into a VFP double register. If the object
529 // is not a number a jump to the label not_number is performed and the VFP
530 // double register is unchanged.
531 void ObjectToDoubleVFPRegister(
532 Register object,
533 DwVfpRegister value,
534 Register scratch1,
535 Register scratch2,
536 Register heap_number_map,
537 SwVfpRegister scratch3,
538 Label* not_number,
539 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
540
541 // Load the value of a smi object into a VFP double register. The register
542 // scratch1 can be the same register as smi in which case smi will hold the
543 // untagged value afterwards.
544 void SmiToDoubleVFPRegister(Register smi,
545 DwVfpRegister value,
546 Register scratch1,
547 SwVfpRegister scratch2);
548
Iain Merrick9ac36c92010-09-13 15:29:50 +0100549 // Convert the HeapNumber pointed to by source to a 32bits signed integer
550 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
551 // to not_int32 label.
552 void ConvertToInt32(Register source,
553 Register dest,
554 Register scratch,
555 Register scratch2,
556 Label *not_int32);
557
Steve Block6ded16b2010-05-10 14:33:55 +0100558 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
559 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
Steve Block8defd9f2010-07-08 12:39:36 +0100560 // for 0 (31 instead of 32). Source and scratch can be the same in which case
561 // the source is clobbered. Source and zeros can also be the same in which
562 // case scratch should be a different register.
563 void CountLeadingZeros(Register zeros,
564 Register source,
565 Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566
567 // ---------------------------------------------------------------------------
568 // Runtime calls
569
570 // Call a code stub.
571 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000572
Andrei Popescu31002712010-02-23 13:46:05 +0000573 // Call a code stub.
574 void TailCallStub(CodeStub* stub, Condition cond = al);
575
Steve Blocka7e24c12009-10-30 11:49:00 +0000576 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000577 void CallRuntime(Runtime::Function* f, int num_arguments);
578
579 // Convenience function: Same as above, but takes the fid instead.
580 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
581
Andrei Popescu402d9372010-02-26 13:31:12 +0000582 // Convenience function: call an external reference.
583 void CallExternalReference(const ExternalReference& ext,
584 int num_arguments);
585
Steve Blocka7e24c12009-10-30 11:49:00 +0000586 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100587 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000588 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100589 void TailCallExternalReference(const ExternalReference& ext,
590 int num_arguments,
591 int result_size);
592
593 // Convenience function: tail call a runtime routine (jump).
594 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000595 int num_arguments,
596 int result_size);
597
Steve Block6ded16b2010-05-10 14:33:55 +0100598 // Before calling a C-function from generated code, align arguments on stack.
599 // After aligning the frame, non-register arguments must be stored in
600 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
601 // are word sized.
602 // Some compilers/platforms require the stack to be aligned when calling
603 // C++ code.
604 // Needs a scratch register to do some arithmetic. This register will be
605 // trashed.
606 void PrepareCallCFunction(int num_arguments, Register scratch);
607
608 // Calls a C function and cleans up the space for arguments allocated
609 // by PrepareCallCFunction. The called function is not allowed to trigger a
610 // garbage collection, since that might move the code and invalidate the
611 // return address (unless this is somehow accounted for by the called
612 // function).
613 void CallCFunction(ExternalReference function, int num_arguments);
614 void CallCFunction(Register function, int num_arguments);
615
Steve Blocka7e24c12009-10-30 11:49:00 +0000616 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100617 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000618
619 // Invoke specified builtin JavaScript function. Adds an entry to
620 // the unresolved list if the name does not resolve.
621 void InvokeBuiltin(Builtins::JavaScript id, InvokeJSFlags flags);
622
623 // Store the code object for the given builtin in the target register and
624 // setup the function in r1.
625 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
626
Steve Block791712a2010-08-27 10:21:07 +0100627 // Store the function for the given builtin in the target register.
628 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
629
Steve Blocka7e24c12009-10-30 11:49:00 +0000630 Handle<Object> CodeObject() { return code_object_; }
631
632
633 // ---------------------------------------------------------------------------
634 // StatsCounter support
635
636 void SetCounter(StatsCounter* counter, int value,
637 Register scratch1, Register scratch2);
638 void IncrementCounter(StatsCounter* counter, int value,
639 Register scratch1, Register scratch2);
640 void DecrementCounter(StatsCounter* counter, int value,
641 Register scratch1, Register scratch2);
642
643
644 // ---------------------------------------------------------------------------
645 // Debugging
646
647 // Calls Abort(msg) if the condition cc is not satisfied.
648 // Use --debug_code to enable.
649 void Assert(Condition cc, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100650 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Iain Merrick75681382010-08-19 15:07:18 +0100651 void AssertFastElements(Register elements);
Steve Blocka7e24c12009-10-30 11:49:00 +0000652
653 // Like Assert(), but always enabled.
654 void Check(Condition cc, const char* msg);
655
656 // Print a message to stdout and abort execution.
657 void Abort(const char* msg);
658
659 // Verify restrictions about code generated in stubs.
660 void set_generating_stub(bool value) { generating_stub_ = value; }
661 bool generating_stub() { return generating_stub_; }
662 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
663 bool allow_stub_calls() { return allow_stub_calls_; }
664
Leon Clarked91b9f72010-01-27 17:25:45 +0000665 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000666 // Smi utilities
667
668 // Jump if either of the registers contain a non-smi.
669 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
670 // Jump if either of the registers contain a smi.
671 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
672
Iain Merrick75681382010-08-19 15:07:18 +0100673 // Abort execution if argument is a smi. Used in debug code.
674 void AbortIfSmi(Register object);
675
Andrei Popescu31002712010-02-23 13:46:05 +0000676 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000677 // String utilities
678
679 // Checks if both objects are sequential ASCII strings and jumps to label
680 // if either is not. Assumes that neither object is a smi.
681 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
682 Register object2,
683 Register scratch1,
684 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100685 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000686
687 // Checks if both objects are sequential ASCII strings and jumps to label
688 // if either is not.
689 void JumpIfNotBothSequentialAsciiStrings(Register first,
690 Register second,
691 Register scratch1,
692 Register scratch2,
693 Label* not_flat_ascii_strings);
694
Steve Block6ded16b2010-05-10 14:33:55 +0100695 // Checks if both instance types are sequential ASCII strings and jumps to
696 // label if either is not.
697 void JumpIfBothInstanceTypesAreNotSequentialAscii(
698 Register first_object_instance_type,
699 Register second_object_instance_type,
700 Register scratch1,
701 Register scratch2,
702 Label* failure);
703
704 // Check if instance type is sequential ASCII string and jump to label if
705 // it is not.
706 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
707 Register scratch,
708 Label* failure);
709
710
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000712 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
713 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000714
715 // Helper functions for generating invokes.
716 void InvokePrologue(const ParameterCount& expected,
717 const ParameterCount& actual,
718 Handle<Code> code_constant,
719 Register code_reg,
720 Label* done,
721 InvokeFlag flag);
722
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 // Activation support.
724 void EnterFrame(StackFrame::Type type);
725 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000726
Steve Block6ded16b2010-05-10 14:33:55 +0100727 void InitializeNewString(Register string,
728 Register length,
729 Heap::RootListIndex map_index,
730 Register scratch1,
731 Register scratch2);
732
Andrei Popescu31002712010-02-23 13:46:05 +0000733 bool generating_stub_;
734 bool allow_stub_calls_;
735 // This handle will be patched with the code object on installation.
736 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000737};
738
739
740#ifdef ENABLE_DEBUGGER_SUPPORT
741// The code patcher is used to patch (typically) small parts of code e.g. for
742// debugging and other types of instrumentation. When using the code patcher
743// the exact number of bytes specified must be emitted. It is not legal to emit
744// relocation information. If any of these constraints are violated it causes
745// an assertion to fail.
746class CodePatcher {
747 public:
748 CodePatcher(byte* address, int instructions);
749 virtual ~CodePatcher();
750
751 // Macro assembler to emit code.
752 MacroAssembler* masm() { return &masm_; }
753
754 // Emit an instruction directly.
755 void Emit(Instr x);
756
757 // Emit an address directly.
758 void Emit(Address addr);
759
760 private:
761 byte* address_; // The address of the code being patched.
762 int instructions_; // Number of instructions of the expected patch size.
763 int size_; // Number of bytes of the expected patch size.
764 MacroAssembler masm_; // Macro assembler used to generate the code.
765};
766#endif // ENABLE_DEBUGGER_SUPPORT
767
768
769// -----------------------------------------------------------------------------
770// Static helper functions.
771
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800772static MemOperand ContextOperand(Register context, int index) {
773 return MemOperand(context, Context::SlotOffset(index));
774}
775
776
777static inline MemOperand GlobalObjectOperand() {
778 return ContextOperand(cp, Context::GLOBAL_INDEX);
779}
780
781
Steve Blocka7e24c12009-10-30 11:49:00 +0000782#ifdef GENERATED_CODE_COVERAGE
783#define CODE_COVERAGE_STRINGIFY(x) #x
784#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
785#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
786#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
787#else
788#define ACCESS_MASM(masm) masm->
789#endif
790
791
792} } // namespace v8::internal
793
794#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_