blob: d57c565a1eb997c7b316b2d07ab751c1484b6b3b [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Andrei Popescu31002712010-02-23 13:46:05 +000036// ----------------------------------------------------------------------------
37// Static helper functions
38
39// Generate a MemOperand for loading a field from an object.
40static inline MemOperand FieldMemOperand(Register object, int offset) {
41 return MemOperand(object, offset - kHeapObjectTag);
42}
43
Steve Blocka7e24c12009-10-30 11:49:00 +000044
45// Give alias names to registers
46const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000047const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49enum InvokeJSFlags {
50 CALL_JS,
51 JUMP_JS
52};
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055// Flags used for the AllocateInNewSpace functions.
56enum AllocationFlags {
57 // No special flags.
58 NO_ALLOCATION_FLAGS = 0,
59 // Return the pointer to the allocated already tagged as a heap object.
60 TAG_OBJECT = 1 << 0,
61 // The content of the result register already contains the allocation top in
62 // new space.
63 RESULT_CONTAINS_TOP = 1 << 1,
64 // Specify that the requested size of the space to allocate is specified in
65 // words instead of bytes.
66 SIZE_IN_WORDS = 1 << 2
67};
68
69
Steve Block8defd9f2010-07-08 12:39:36 +010070// Flags used for the ObjectToDoubleVFPRegister function.
71enum ObjectToDoubleFlags {
72 // No special flags.
73 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
74 // Object is known to be a non smi.
75 OBJECT_NOT_SMI = 1 << 0,
76 // Don't load NaNs or infinities, branch to the non number case instead.
77 AVOID_NANS_AND_INFINITIES = 1 << 1
78};
79
80
Steve Blocka7e24c12009-10-30 11:49:00 +000081// MacroAssembler implements a collection of frequently used macros.
82class MacroAssembler: public Assembler {
83 public:
84 MacroAssembler(void* buffer, int size);
85
Andrei Popescu31002712010-02-23 13:46:05 +000086 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000087 void Jump(Register target, Condition cond = al);
88 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
89 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
90 void Call(Register target, Condition cond = al);
91 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
92 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
93 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +000094
95 // Emit code to discard a non-negative number of pointer-sized elements
96 // from the stack, clobbering only the sp register.
97 void Drop(int count, Condition cond = al);
98
Steve Block6ded16b2010-05-10 14:33:55 +010099
100 // Swap two registers. If the scratch register is omitted then a slightly
101 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100102 void Swap(Register reg1,
103 Register reg2,
104 Register scratch = no_reg,
105 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100106
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100107
108 void And(Register dst, Register src1, const Operand& src2,
109 Condition cond = al);
110 void Ubfx(Register dst, Register src, int lsb, int width,
111 Condition cond = al);
112 void Sbfx(Register dst, Register src, int lsb, int width,
113 Condition cond = al);
114 void Bfc(Register dst, int lsb, int width, Condition cond = al);
115
Leon Clarkee46be812010-01-19 14:06:41 +0000116 void Call(Label* target);
117 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +0100118 // May do nothing if the registers are identical.
119 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000120 // Jumps to the label at the index given by the Smi in "index".
121 void SmiJumpTable(Register index, Vector<Label*> targets);
122 // Load an object from the root table.
123 void LoadRoot(Register destination,
124 Heap::RootListIndex index,
125 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100126 // Store an object to the root table.
127 void StoreRoot(Register source,
128 Heap::RootListIndex index,
129 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000130
Steve Block6ded16b2010-05-10 14:33:55 +0100131
132 // Check if object is in new space.
133 // scratch can be object itself, but it will be clobbered.
134 void InNewSpace(Register object,
135 Register scratch,
136 Condition cc, // eq for new space, ne otherwise
137 Label* branch);
138
139
Steve Block8defd9f2010-07-08 12:39:36 +0100140 // For the page containing |object| mark the region covering [address]
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100141 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100142 void RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100143 Register address,
144 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100145
Steve Block8defd9f2010-07-08 12:39:36 +0100146 // For the page containing |object| mark the region covering
147 // [object+offset] dirty. The object address must be in the first 8K
148 // of an allocated page. The 'scratch' registers are used in the
149 // implementation and all 3 registers are clobbered by the
150 // operation, as well as the ip register. RecordWrite updates the
151 // write barrier even when storing smis.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100152 void RecordWrite(Register object,
153 Operand offset,
154 Register scratch0,
155 Register scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000156
Steve Block8defd9f2010-07-08 12:39:36 +0100157 // For the page containing |object| mark the region covering
158 // [address] dirty. The object address must be in the first 8K of an
159 // allocated page. All 3 registers are clobbered by the operation,
160 // as well as the ip register. RecordWrite updates the write barrier
161 // even when storing smis.
162 void RecordWrite(Register object,
163 Register address,
164 Register scratch);
165
Steve Block6ded16b2010-05-10 14:33:55 +0100166 // Push two registers. Pushes leftmost register first (to highest address).
167 void Push(Register src1, Register src2, Condition cond = al) {
168 ASSERT(!src1.is(src2));
169 if (src1.code() > src2.code()) {
170 stm(db_w, sp, src1.bit() | src2.bit(), cond);
171 } else {
172 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
173 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
174 }
175 }
176
177 // Push three registers. Pushes leftmost register first (to highest address).
178 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
179 ASSERT(!src1.is(src2));
180 ASSERT(!src2.is(src3));
181 ASSERT(!src1.is(src3));
182 if (src1.code() > src2.code()) {
183 if (src2.code() > src3.code()) {
184 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
185 } else {
186 stm(db_w, sp, src1.bit() | src2.bit(), cond);
187 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
188 }
189 } else {
190 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
191 Push(src2, src3, cond);
192 }
193 }
194
195 // Push four registers. Pushes leftmost register first (to highest address).
196 void Push(Register src1, Register src2,
197 Register src3, Register src4, Condition cond = al) {
198 ASSERT(!src1.is(src2));
199 ASSERT(!src2.is(src3));
200 ASSERT(!src1.is(src3));
201 ASSERT(!src1.is(src4));
202 ASSERT(!src2.is(src4));
203 ASSERT(!src3.is(src4));
204 if (src1.code() > src2.code()) {
205 if (src2.code() > src3.code()) {
206 if (src3.code() > src4.code()) {
207 stm(db_w,
208 sp,
209 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
210 cond);
211 } else {
212 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
213 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
214 }
215 } else {
216 stm(db_w, sp, src1.bit() | src2.bit(), cond);
217 Push(src3, src4, cond);
218 }
219 } else {
220 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
221 Push(src2, src3, src4, cond);
222 }
223 }
224
Leon Clarkef7060e22010-06-03 12:02:55 +0100225 // Load two consecutive registers with two consecutive memory locations.
226 void Ldrd(Register dst1,
227 Register dst2,
228 const MemOperand& src,
229 Condition cond = al);
230
231 // Store two consecutive registers to two consecutive memory locations.
232 void Strd(Register src1,
233 Register src2,
234 const MemOperand& dst,
235 Condition cond = al);
236
Steve Blocka7e24c12009-10-30 11:49:00 +0000237 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000238 // Stack limit support
239
240 void StackLimitCheck(Label* on_stack_limit_hit);
241
242 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000243 // Activation frames
244
245 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
246 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
247
248 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
249 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
250
Steve Blockd0582a62009-12-15 09:54:21 +0000251 // Enter specific kind of exit frame; either normal or debug mode.
252 // Expects the number of arguments in register r0 and
Steve Blocka7e24c12009-10-30 11:49:00 +0000253 // the builtin function to call in register r1. Exits with argc in
254 // r4, argv in r6, and and the builtin function to call in r5.
Steve Blockd0582a62009-12-15 09:54:21 +0000255 void EnterExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000256
257 // Leave the current exit frame. Expects the return value in r0.
Steve Blockd0582a62009-12-15 09:54:21 +0000258 void LeaveExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000259
Steve Block6ded16b2010-05-10 14:33:55 +0100260 // Get the actual activation frame alignment for target environment.
261 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000262
Steve Blockd0582a62009-12-15 09:54:21 +0000263 void LoadContext(Register dst, int context_chain_length);
264
Steve Blocka7e24c12009-10-30 11:49:00 +0000265 // ---------------------------------------------------------------------------
266 // JavaScript invokes
267
268 // Invoke the JavaScript function code by either calling or jumping.
269 void InvokeCode(Register code,
270 const ParameterCount& expected,
271 const ParameterCount& actual,
272 InvokeFlag flag);
273
274 void InvokeCode(Handle<Code> code,
275 const ParameterCount& expected,
276 const ParameterCount& actual,
277 RelocInfo::Mode rmode,
278 InvokeFlag flag);
279
280 // Invoke the JavaScript function in the given register. Changes the
281 // current context to the context in the function before invoking.
282 void InvokeFunction(Register function,
283 const ParameterCount& actual,
284 InvokeFlag flag);
285
Andrei Popescu402d9372010-02-26 13:31:12 +0000286 void InvokeFunction(JSFunction* function,
287 const ParameterCount& actual,
288 InvokeFlag flag);
289
Steve Blocka7e24c12009-10-30 11:49:00 +0000290
291#ifdef ENABLE_DEBUGGER_SUPPORT
292 // ---------------------------------------------------------------------------
293 // Debugger Support
294
295 void SaveRegistersToMemory(RegList regs);
296 void RestoreRegistersFromMemory(RegList regs);
297 void CopyRegistersFromMemoryToStack(Register base, RegList regs);
298 void CopyRegistersFromStackToMemory(Register base,
299 Register scratch,
300 RegList regs);
Andrei Popescu402d9372010-02-26 13:31:12 +0000301 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000302#endif
303
304 // ---------------------------------------------------------------------------
305 // Exception handling
306
307 // Push a new try handler and link into try handler chain.
308 // The return address must be passed in register lr.
309 // On exit, r0 contains TOS (code slot).
310 void PushTryHandler(CodeLocation try_location, HandlerType type);
311
Leon Clarkee46be812010-01-19 14:06:41 +0000312 // Unlink the stack handler on top of the stack from the try handler chain.
313 // Must preserve the result register.
314 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000315
316 // ---------------------------------------------------------------------------
317 // Inline caching support
318
319 // Generates code that verifies that the maps of objects in the
320 // prototype chain of object hasn't changed since the code was
321 // generated and branches to the miss label if any map has. If
322 // necessary the function also generates code for security check
323 // in case of global object holders. The scratch and holder
324 // registers are always clobbered, but the object register is only
325 // clobbered if it the same as the holder register. The function
326 // returns a register containing the holder - either object_reg or
327 // holder_reg.
Steve Block6ded16b2010-05-10 14:33:55 +0100328 // The function can optionally (when save_at_depth !=
329 // kInvalidProtoDepth) save the object at the given depth by moving
330 // it to [sp].
Steve Blocka7e24c12009-10-30 11:49:00 +0000331 Register CheckMaps(JSObject* object, Register object_reg,
332 JSObject* holder, Register holder_reg,
Steve Block6ded16b2010-05-10 14:33:55 +0100333 Register scratch,
334 int save_at_depth,
335 Label* miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000336
337 // Generate code for checking access rights - used for security checks
338 // on access to global objects across environments. The holder register
339 // is left untouched, whereas both scratch registers are clobbered.
340 void CheckAccessGlobalProxy(Register holder_reg,
341 Register scratch,
342 Label* miss);
343
344
345 // ---------------------------------------------------------------------------
346 // Allocation support
347
348 // Allocate an object in new space. The object_size is specified in words (not
349 // bytes). If the new space is exhausted control continues at the gc_required
350 // label. The allocated object is returned in result. If the flag
Kristian Monsen25f61362010-05-21 11:50:48 +0100351 // tag_allocated_object is true the result is tagged as as a heap object. All
352 // registers are clobbered also when control continues at the gc_required
353 // label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000354 void AllocateInNewSpace(int object_size,
355 Register result,
356 Register scratch1,
357 Register scratch2,
358 Label* gc_required,
359 AllocationFlags flags);
360 void AllocateInNewSpace(Register object_size,
361 Register result,
362 Register scratch1,
363 Register scratch2,
364 Label* gc_required,
365 AllocationFlags flags);
366
367 // Undo allocation in new space. The object passed and objects allocated after
368 // it will no longer be allocated. The caller must make sure that no pointers
369 // are left to the object(s) no longer allocated as they would be invalid when
370 // allocation is undone.
371 void UndoAllocationInNewSpace(Register object, Register scratch);
372
Andrei Popescu31002712010-02-23 13:46:05 +0000373
374 void AllocateTwoByteString(Register result,
375 Register length,
376 Register scratch1,
377 Register scratch2,
378 Register scratch3,
379 Label* gc_required);
380 void AllocateAsciiString(Register result,
381 Register length,
382 Register scratch1,
383 Register scratch2,
384 Register scratch3,
385 Label* gc_required);
386 void AllocateTwoByteConsString(Register result,
387 Register length,
388 Register scratch1,
389 Register scratch2,
390 Label* gc_required);
391 void AllocateAsciiConsString(Register result,
392 Register length,
393 Register scratch1,
394 Register scratch2,
395 Label* gc_required);
396
Kristian Monsen25f61362010-05-21 11:50:48 +0100397 // Allocates a heap number or jumps to the gc_required label if the young
398 // space is full and a scavenge is needed. All registers are clobbered also
399 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100400 void AllocateHeapNumber(Register result,
401 Register scratch1,
402 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100403 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100404 Label* gc_required);
Steve Block8defd9f2010-07-08 12:39:36 +0100405 void AllocateHeapNumberWithValue(Register result,
406 DwVfpRegister value,
407 Register scratch1,
408 Register scratch2,
409 Register heap_number_map,
410 Label* gc_required);
411
Andrei Popescu31002712010-02-23 13:46:05 +0000412
Steve Blocka7e24c12009-10-30 11:49:00 +0000413 // ---------------------------------------------------------------------------
414 // Support functions.
415
416 // Try to get function prototype of a function and puts the value in
417 // the result register. Checks that the function really is a
418 // function and jumps to the miss label if the fast checks fail. The
419 // function register will be untouched; the other registers may be
420 // clobbered.
421 void TryGetFunctionPrototype(Register function,
422 Register result,
423 Register scratch,
424 Label* miss);
425
426 // Compare object type for heap object. heap_object contains a non-Smi
427 // whose object type should be compared with the given type. This both
428 // sets the flags and leaves the object type in the type_reg register.
429 // It leaves the map in the map register (unless the type_reg and map register
430 // are the same register). It leaves the heap object in the heap_object
431 // register unless the heap_object register is the same register as one of the
432 // other registers.
433 void CompareObjectType(Register heap_object,
434 Register map,
435 Register type_reg,
436 InstanceType type);
437
438 // Compare instance type in a map. map contains a valid map object whose
439 // object type should be compared with the given type. This both
440 // sets the flags and leaves the object type in the type_reg register. It
441 // leaves the heap object in the heap_object register unless the heap_object
442 // register is the same register as type_reg.
443 void CompareInstanceType(Register map,
444 Register type_reg,
445 InstanceType type);
446
Andrei Popescu31002712010-02-23 13:46:05 +0000447
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100448 // Check if the map of an object is equal to a specified map (either
449 // given directly or as an index into the root list) and branch to
450 // label if not. Skip the smi check if not required (object is known
451 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000452 void CheckMap(Register obj,
453 Register scratch,
454 Handle<Map> map,
455 Label* fail,
456 bool is_heap_object);
457
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100458 void CheckMap(Register obj,
459 Register scratch,
460 Heap::RootListIndex index,
461 Label* fail,
462 bool is_heap_object);
463
464
Andrei Popescu31002712010-02-23 13:46:05 +0000465 // Load and check the instance type of an object for being a string.
466 // Loads the type into the second argument register.
467 // Returns a condition that will be enabled if the object was a string.
468 Condition IsObjectStringType(Register obj,
469 Register type) {
470 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
471 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
472 tst(type, Operand(kIsNotStringMask));
473 ASSERT_EQ(0, kStringTag);
474 return eq;
475 }
476
477
Steve Blocka7e24c12009-10-30 11:49:00 +0000478 inline void BranchOnSmi(Register value, Label* smi_label) {
479 tst(value, Operand(kSmiTagMask));
480 b(eq, smi_label);
481 }
482
483 inline void BranchOnNotSmi(Register value, Label* not_smi_label) {
484 tst(value, Operand(kSmiTagMask));
485 b(ne, not_smi_label);
486 }
487
488 // Generates code for reporting that an illegal operation has
489 // occurred.
490 void IllegalOperation(int num_arguments);
491
Andrei Popescu31002712010-02-23 13:46:05 +0000492 // Get the number of least significant bits from a register
493 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
494
Steve Blockd0582a62009-12-15 09:54:21 +0000495 // Uses VFP instructions to Convert a Smi to a double.
496 void IntegerToDoubleConversionWithVFP3(Register inReg,
497 Register outHighReg,
498 Register outLowReg);
499
Steve Block8defd9f2010-07-08 12:39:36 +0100500 // Load the value of a number object into a VFP double register. If the object
501 // is not a number a jump to the label not_number is performed and the VFP
502 // double register is unchanged.
503 void ObjectToDoubleVFPRegister(
504 Register object,
505 DwVfpRegister value,
506 Register scratch1,
507 Register scratch2,
508 Register heap_number_map,
509 SwVfpRegister scratch3,
510 Label* not_number,
511 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
512
513 // Load the value of a smi object into a VFP double register. The register
514 // scratch1 can be the same register as smi in which case smi will hold the
515 // untagged value afterwards.
516 void SmiToDoubleVFPRegister(Register smi,
517 DwVfpRegister value,
518 Register scratch1,
519 SwVfpRegister scratch2);
520
Steve Block6ded16b2010-05-10 14:33:55 +0100521 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
522 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
Steve Block8defd9f2010-07-08 12:39:36 +0100523 // for 0 (31 instead of 32). Source and scratch can be the same in which case
524 // the source is clobbered. Source and zeros can also be the same in which
525 // case scratch should be a different register.
526 void CountLeadingZeros(Register zeros,
527 Register source,
528 Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000529
530 // ---------------------------------------------------------------------------
531 // Runtime calls
532
533 // Call a code stub.
534 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000535
Andrei Popescu31002712010-02-23 13:46:05 +0000536 // Call a code stub.
537 void TailCallStub(CodeStub* stub, Condition cond = al);
538
Steve Blocka7e24c12009-10-30 11:49:00 +0000539 // Return from a code stub after popping its arguments.
540 void StubReturn(int argc);
541
542 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000543 void CallRuntime(Runtime::Function* f, int num_arguments);
544
545 // Convenience function: Same as above, but takes the fid instead.
546 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
547
Andrei Popescu402d9372010-02-26 13:31:12 +0000548 // Convenience function: call an external reference.
549 void CallExternalReference(const ExternalReference& ext,
550 int num_arguments);
551
Steve Blocka7e24c12009-10-30 11:49:00 +0000552 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100553 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000554 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100555 void TailCallExternalReference(const ExternalReference& ext,
556 int num_arguments,
557 int result_size);
558
559 // Convenience function: tail call a runtime routine (jump).
560 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000561 int num_arguments,
562 int result_size);
563
Steve Block6ded16b2010-05-10 14:33:55 +0100564 // Before calling a C-function from generated code, align arguments on stack.
565 // After aligning the frame, non-register arguments must be stored in
566 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
567 // are word sized.
568 // Some compilers/platforms require the stack to be aligned when calling
569 // C++ code.
570 // Needs a scratch register to do some arithmetic. This register will be
571 // trashed.
572 void PrepareCallCFunction(int num_arguments, Register scratch);
573
574 // Calls a C function and cleans up the space for arguments allocated
575 // by PrepareCallCFunction. The called function is not allowed to trigger a
576 // garbage collection, since that might move the code and invalidate the
577 // return address (unless this is somehow accounted for by the called
578 // function).
579 void CallCFunction(ExternalReference function, int num_arguments);
580 void CallCFunction(Register function, int num_arguments);
581
Steve Blocka7e24c12009-10-30 11:49:00 +0000582 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100583 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000584
585 // Invoke specified builtin JavaScript function. Adds an entry to
586 // the unresolved list if the name does not resolve.
587 void InvokeBuiltin(Builtins::JavaScript id, InvokeJSFlags flags);
588
589 // Store the code object for the given builtin in the target register and
590 // setup the function in r1.
591 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
592
Steve Blocka7e24c12009-10-30 11:49:00 +0000593 Handle<Object> CodeObject() { return code_object_; }
594
595
596 // ---------------------------------------------------------------------------
597 // StatsCounter support
598
599 void SetCounter(StatsCounter* counter, int value,
600 Register scratch1, Register scratch2);
601 void IncrementCounter(StatsCounter* counter, int value,
602 Register scratch1, Register scratch2);
603 void DecrementCounter(StatsCounter* counter, int value,
604 Register scratch1, Register scratch2);
605
606
607 // ---------------------------------------------------------------------------
608 // Debugging
609
610 // Calls Abort(msg) if the condition cc is not satisfied.
611 // Use --debug_code to enable.
612 void Assert(Condition cc, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100613 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000614
615 // Like Assert(), but always enabled.
616 void Check(Condition cc, const char* msg);
617
618 // Print a message to stdout and abort execution.
619 void Abort(const char* msg);
620
621 // Verify restrictions about code generated in stubs.
622 void set_generating_stub(bool value) { generating_stub_ = value; }
623 bool generating_stub() { return generating_stub_; }
624 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
625 bool allow_stub_calls() { return allow_stub_calls_; }
626
Leon Clarked91b9f72010-01-27 17:25:45 +0000627 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000628 // Smi utilities
629
630 // Jump if either of the registers contain a non-smi.
631 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
632 // Jump if either of the registers contain a smi.
633 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
634
635 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000636 // String utilities
637
638 // Checks if both objects are sequential ASCII strings and jumps to label
639 // if either is not. Assumes that neither object is a smi.
640 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
641 Register object2,
642 Register scratch1,
643 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100644 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000645
646 // Checks if both objects are sequential ASCII strings and jumps to label
647 // if either is not.
648 void JumpIfNotBothSequentialAsciiStrings(Register first,
649 Register second,
650 Register scratch1,
651 Register scratch2,
652 Label* not_flat_ascii_strings);
653
Steve Block6ded16b2010-05-10 14:33:55 +0100654 // Checks if both instance types are sequential ASCII strings and jumps to
655 // label if either is not.
656 void JumpIfBothInstanceTypesAreNotSequentialAscii(
657 Register first_object_instance_type,
658 Register second_object_instance_type,
659 Register scratch1,
660 Register scratch2,
661 Label* failure);
662
663 // Check if instance type is sequential ASCII string and jump to label if
664 // it is not.
665 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
666 Register scratch,
667 Label* failure);
668
669
Steve Blocka7e24c12009-10-30 11:49:00 +0000670 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000671 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
672 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000673
674 // Helper functions for generating invokes.
675 void InvokePrologue(const ParameterCount& expected,
676 const ParameterCount& actual,
677 Handle<Code> code_constant,
678 Register code_reg,
679 Label* done,
680 InvokeFlag flag);
681
Steve Blocka7e24c12009-10-30 11:49:00 +0000682 // Activation support.
683 void EnterFrame(StackFrame::Type type);
684 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000685
Steve Block6ded16b2010-05-10 14:33:55 +0100686 void InitializeNewString(Register string,
687 Register length,
688 Heap::RootListIndex map_index,
689 Register scratch1,
690 Register scratch2);
691
Andrei Popescu31002712010-02-23 13:46:05 +0000692 bool generating_stub_;
693 bool allow_stub_calls_;
694 // This handle will be patched with the code object on installation.
695 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000696};
697
698
699#ifdef ENABLE_DEBUGGER_SUPPORT
700// The code patcher is used to patch (typically) small parts of code e.g. for
701// debugging and other types of instrumentation. When using the code patcher
702// the exact number of bytes specified must be emitted. It is not legal to emit
703// relocation information. If any of these constraints are violated it causes
704// an assertion to fail.
705class CodePatcher {
706 public:
707 CodePatcher(byte* address, int instructions);
708 virtual ~CodePatcher();
709
710 // Macro assembler to emit code.
711 MacroAssembler* masm() { return &masm_; }
712
713 // Emit an instruction directly.
714 void Emit(Instr x);
715
716 // Emit an address directly.
717 void Emit(Address addr);
718
719 private:
720 byte* address_; // The address of the code being patched.
721 int instructions_; // Number of instructions of the expected patch size.
722 int size_; // Number of bytes of the expected patch size.
723 MacroAssembler masm_; // Macro assembler used to generate the code.
724};
725#endif // ENABLE_DEBUGGER_SUPPORT
726
727
728// -----------------------------------------------------------------------------
729// Static helper functions.
730
Steve Blocka7e24c12009-10-30 11:49:00 +0000731#ifdef GENERATED_CODE_COVERAGE
732#define CODE_COVERAGE_STRINGIFY(x) #x
733#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
734#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
735#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
736#else
737#define ACCESS_MASM(masm) masm->
738#endif
739
740
741} } // namespace v8::internal
742
743#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_