blob: e02a6c8a3e304ed175c557bcb82c99c9a5b40694 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Andrei Popescu31002712010-02-23 13:46:05 +000036// ----------------------------------------------------------------------------
37// Static helper functions
38
39// Generate a MemOperand for loading a field from an object.
40static inline MemOperand FieldMemOperand(Register object, int offset) {
41 return MemOperand(object, offset - kHeapObjectTag);
42}
43
Steve Blocka7e24c12009-10-30 11:49:00 +000044
45// Give alias names to registers
46const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000047const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49enum InvokeJSFlags {
50 CALL_JS,
51 JUMP_JS
52};
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055// Flags used for the AllocateInNewSpace functions.
56enum AllocationFlags {
57 // No special flags.
58 NO_ALLOCATION_FLAGS = 0,
59 // Return the pointer to the allocated already tagged as a heap object.
60 TAG_OBJECT = 1 << 0,
61 // The content of the result register already contains the allocation top in
62 // new space.
63 RESULT_CONTAINS_TOP = 1 << 1,
64 // Specify that the requested size of the space to allocate is specified in
65 // words instead of bytes.
66 SIZE_IN_WORDS = 1 << 2
67};
68
69
Steve Blocka7e24c12009-10-30 11:49:00 +000070// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
Andrei Popescu31002712010-02-23 13:46:05 +000075 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000076 void Jump(Register target, Condition cond = al);
77 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
78 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
79 void Call(Register target, Condition cond = al);
80 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
81 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
82 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +000083
84 // Emit code to discard a non-negative number of pointer-sized elements
85 // from the stack, clobbering only the sp register.
86 void Drop(int count, Condition cond = al);
87
Steve Block6ded16b2010-05-10 14:33:55 +010088
89 // Swap two registers. If the scratch register is omitted then a slightly
90 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010091 void Swap(Register reg1,
92 Register reg2,
93 Register scratch = no_reg,
94 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +010095
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010096
97 void And(Register dst, Register src1, const Operand& src2,
98 Condition cond = al);
99 void Ubfx(Register dst, Register src, int lsb, int width,
100 Condition cond = al);
101 void Sbfx(Register dst, Register src, int lsb, int width,
102 Condition cond = al);
103 void Bfc(Register dst, int lsb, int width, Condition cond = al);
104
Leon Clarkee46be812010-01-19 14:06:41 +0000105 void Call(Label* target);
106 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +0100107 // May do nothing if the registers are identical.
108 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 // Jumps to the label at the index given by the Smi in "index".
110 void SmiJumpTable(Register index, Vector<Label*> targets);
111 // Load an object from the root table.
112 void LoadRoot(Register destination,
113 Heap::RootListIndex index,
114 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100115 // Store an object to the root table.
116 void StoreRoot(Register source,
117 Heap::RootListIndex index,
118 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000119
Steve Block6ded16b2010-05-10 14:33:55 +0100120
121 // Check if object is in new space.
122 // scratch can be object itself, but it will be clobbered.
123 void InNewSpace(Register object,
124 Register scratch,
125 Condition cc, // eq for new space, ne otherwise
126 Label* branch);
127
128
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100129 // For the page containing |object| mark the region covering [object+offset]
130 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100131 void RecordWriteHelper(Register object,
132 Operand offset,
133 Register scratch0,
134 Register scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +0100135
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100136 // For the page containing |object| mark the region covering [object+offset]
137 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100138 // The 'scratch' registers are used in the implementation and all 3 registers
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 // are clobbered by the operation, as well as the ip register.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100140 void RecordWrite(Register object,
141 Operand offset,
142 Register scratch0,
143 Register scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000144
Steve Block6ded16b2010-05-10 14:33:55 +0100145 // Push two registers. Pushes leftmost register first (to highest address).
146 void Push(Register src1, Register src2, Condition cond = al) {
147 ASSERT(!src1.is(src2));
148 if (src1.code() > src2.code()) {
149 stm(db_w, sp, src1.bit() | src2.bit(), cond);
150 } else {
151 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
152 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
153 }
154 }
155
156 // Push three registers. Pushes leftmost register first (to highest address).
157 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
158 ASSERT(!src1.is(src2));
159 ASSERT(!src2.is(src3));
160 ASSERT(!src1.is(src3));
161 if (src1.code() > src2.code()) {
162 if (src2.code() > src3.code()) {
163 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
164 } else {
165 stm(db_w, sp, src1.bit() | src2.bit(), cond);
166 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
167 }
168 } else {
169 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
170 Push(src2, src3, cond);
171 }
172 }
173
174 // Push four registers. Pushes leftmost register first (to highest address).
175 void Push(Register src1, Register src2,
176 Register src3, Register src4, Condition cond = al) {
177 ASSERT(!src1.is(src2));
178 ASSERT(!src2.is(src3));
179 ASSERT(!src1.is(src3));
180 ASSERT(!src1.is(src4));
181 ASSERT(!src2.is(src4));
182 ASSERT(!src3.is(src4));
183 if (src1.code() > src2.code()) {
184 if (src2.code() > src3.code()) {
185 if (src3.code() > src4.code()) {
186 stm(db_w,
187 sp,
188 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
189 cond);
190 } else {
191 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
192 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
193 }
194 } else {
195 stm(db_w, sp, src1.bit() | src2.bit(), cond);
196 Push(src3, src4, cond);
197 }
198 } else {
199 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
200 Push(src2, src3, src4, cond);
201 }
202 }
203
Leon Clarkef7060e22010-06-03 12:02:55 +0100204 // Load two consecutive registers with two consecutive memory locations.
205 void Ldrd(Register dst1,
206 Register dst2,
207 const MemOperand& src,
208 Condition cond = al);
209
210 // Store two consecutive registers to two consecutive memory locations.
211 void Strd(Register src1,
212 Register src2,
213 const MemOperand& dst,
214 Condition cond = al);
215
Steve Blocka7e24c12009-10-30 11:49:00 +0000216 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000217 // Stack limit support
218
219 void StackLimitCheck(Label* on_stack_limit_hit);
220
221 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000222 // Activation frames
223
224 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
225 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
226
227 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
228 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
229
Steve Blockd0582a62009-12-15 09:54:21 +0000230 // Enter specific kind of exit frame; either normal or debug mode.
231 // Expects the number of arguments in register r0 and
Steve Blocka7e24c12009-10-30 11:49:00 +0000232 // the builtin function to call in register r1. Exits with argc in
233 // r4, argv in r6, and and the builtin function to call in r5.
Steve Blockd0582a62009-12-15 09:54:21 +0000234 void EnterExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000235
236 // Leave the current exit frame. Expects the return value in r0.
Steve Blockd0582a62009-12-15 09:54:21 +0000237 void LeaveExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000238
Steve Block6ded16b2010-05-10 14:33:55 +0100239 // Get the actual activation frame alignment for target environment.
240 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000241
Steve Blockd0582a62009-12-15 09:54:21 +0000242 void LoadContext(Register dst, int context_chain_length);
243
Steve Blocka7e24c12009-10-30 11:49:00 +0000244 // ---------------------------------------------------------------------------
245 // JavaScript invokes
246
247 // Invoke the JavaScript function code by either calling or jumping.
248 void InvokeCode(Register code,
249 const ParameterCount& expected,
250 const ParameterCount& actual,
251 InvokeFlag flag);
252
253 void InvokeCode(Handle<Code> code,
254 const ParameterCount& expected,
255 const ParameterCount& actual,
256 RelocInfo::Mode rmode,
257 InvokeFlag flag);
258
259 // Invoke the JavaScript function in the given register. Changes the
260 // current context to the context in the function before invoking.
261 void InvokeFunction(Register function,
262 const ParameterCount& actual,
263 InvokeFlag flag);
264
Andrei Popescu402d9372010-02-26 13:31:12 +0000265 void InvokeFunction(JSFunction* function,
266 const ParameterCount& actual,
267 InvokeFlag flag);
268
Steve Blocka7e24c12009-10-30 11:49:00 +0000269
270#ifdef ENABLE_DEBUGGER_SUPPORT
271 // ---------------------------------------------------------------------------
272 // Debugger Support
273
274 void SaveRegistersToMemory(RegList regs);
275 void RestoreRegistersFromMemory(RegList regs);
276 void CopyRegistersFromMemoryToStack(Register base, RegList regs);
277 void CopyRegistersFromStackToMemory(Register base,
278 Register scratch,
279 RegList regs);
Andrei Popescu402d9372010-02-26 13:31:12 +0000280 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000281#endif
282
283 // ---------------------------------------------------------------------------
284 // Exception handling
285
286 // Push a new try handler and link into try handler chain.
287 // The return address must be passed in register lr.
288 // On exit, r0 contains TOS (code slot).
289 void PushTryHandler(CodeLocation try_location, HandlerType type);
290
Leon Clarkee46be812010-01-19 14:06:41 +0000291 // Unlink the stack handler on top of the stack from the try handler chain.
292 // Must preserve the result register.
293 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000294
295 // ---------------------------------------------------------------------------
296 // Inline caching support
297
298 // Generates code that verifies that the maps of objects in the
299 // prototype chain of object hasn't changed since the code was
300 // generated and branches to the miss label if any map has. If
301 // necessary the function also generates code for security check
302 // in case of global object holders. The scratch and holder
303 // registers are always clobbered, but the object register is only
304 // clobbered if it the same as the holder register. The function
305 // returns a register containing the holder - either object_reg or
306 // holder_reg.
Steve Block6ded16b2010-05-10 14:33:55 +0100307 // The function can optionally (when save_at_depth !=
308 // kInvalidProtoDepth) save the object at the given depth by moving
309 // it to [sp].
Steve Blocka7e24c12009-10-30 11:49:00 +0000310 Register CheckMaps(JSObject* object, Register object_reg,
311 JSObject* holder, Register holder_reg,
Steve Block6ded16b2010-05-10 14:33:55 +0100312 Register scratch,
313 int save_at_depth,
314 Label* miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000315
316 // Generate code for checking access rights - used for security checks
317 // on access to global objects across environments. The holder register
318 // is left untouched, whereas both scratch registers are clobbered.
319 void CheckAccessGlobalProxy(Register holder_reg,
320 Register scratch,
321 Label* miss);
322
323
324 // ---------------------------------------------------------------------------
325 // Allocation support
326
327 // Allocate an object in new space. The object_size is specified in words (not
328 // bytes). If the new space is exhausted control continues at the gc_required
329 // label. The allocated object is returned in result. If the flag
Kristian Monsen25f61362010-05-21 11:50:48 +0100330 // tag_allocated_object is true the result is tagged as as a heap object. All
331 // registers are clobbered also when control continues at the gc_required
332 // label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000333 void AllocateInNewSpace(int object_size,
334 Register result,
335 Register scratch1,
336 Register scratch2,
337 Label* gc_required,
338 AllocationFlags flags);
339 void AllocateInNewSpace(Register object_size,
340 Register result,
341 Register scratch1,
342 Register scratch2,
343 Label* gc_required,
344 AllocationFlags flags);
345
346 // Undo allocation in new space. The object passed and objects allocated after
347 // it will no longer be allocated. The caller must make sure that no pointers
348 // are left to the object(s) no longer allocated as they would be invalid when
349 // allocation is undone.
350 void UndoAllocationInNewSpace(Register object, Register scratch);
351
Andrei Popescu31002712010-02-23 13:46:05 +0000352
353 void AllocateTwoByteString(Register result,
354 Register length,
355 Register scratch1,
356 Register scratch2,
357 Register scratch3,
358 Label* gc_required);
359 void AllocateAsciiString(Register result,
360 Register length,
361 Register scratch1,
362 Register scratch2,
363 Register scratch3,
364 Label* gc_required);
365 void AllocateTwoByteConsString(Register result,
366 Register length,
367 Register scratch1,
368 Register scratch2,
369 Label* gc_required);
370 void AllocateAsciiConsString(Register result,
371 Register length,
372 Register scratch1,
373 Register scratch2,
374 Label* gc_required);
375
Kristian Monsen25f61362010-05-21 11:50:48 +0100376 // Allocates a heap number or jumps to the gc_required label if the young
377 // space is full and a scavenge is needed. All registers are clobbered also
378 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100379 void AllocateHeapNumber(Register result,
380 Register scratch1,
381 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100382 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100383 Label* gc_required);
Andrei Popescu31002712010-02-23 13:46:05 +0000384
Steve Blocka7e24c12009-10-30 11:49:00 +0000385 // ---------------------------------------------------------------------------
386 // Support functions.
387
388 // Try to get function prototype of a function and puts the value in
389 // the result register. Checks that the function really is a
390 // function and jumps to the miss label if the fast checks fail. The
391 // function register will be untouched; the other registers may be
392 // clobbered.
393 void TryGetFunctionPrototype(Register function,
394 Register result,
395 Register scratch,
396 Label* miss);
397
398 // Compare object type for heap object. heap_object contains a non-Smi
399 // whose object type should be compared with the given type. This both
400 // sets the flags and leaves the object type in the type_reg register.
401 // It leaves the map in the map register (unless the type_reg and map register
402 // are the same register). It leaves the heap object in the heap_object
403 // register unless the heap_object register is the same register as one of the
404 // other registers.
405 void CompareObjectType(Register heap_object,
406 Register map,
407 Register type_reg,
408 InstanceType type);
409
410 // Compare instance type in a map. map contains a valid map object whose
411 // object type should be compared with the given type. This both
412 // sets the flags and leaves the object type in the type_reg register. It
413 // leaves the heap object in the heap_object register unless the heap_object
414 // register is the same register as type_reg.
415 void CompareInstanceType(Register map,
416 Register type_reg,
417 InstanceType type);
418
Andrei Popescu31002712010-02-23 13:46:05 +0000419
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100420 // Check if the map of an object is equal to a specified map (either
421 // given directly or as an index into the root list) and branch to
422 // label if not. Skip the smi check if not required (object is known
423 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000424 void CheckMap(Register obj,
425 Register scratch,
426 Handle<Map> map,
427 Label* fail,
428 bool is_heap_object);
429
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100430 void CheckMap(Register obj,
431 Register scratch,
432 Heap::RootListIndex index,
433 Label* fail,
434 bool is_heap_object);
435
436
Andrei Popescu31002712010-02-23 13:46:05 +0000437 // Load and check the instance type of an object for being a string.
438 // Loads the type into the second argument register.
439 // Returns a condition that will be enabled if the object was a string.
440 Condition IsObjectStringType(Register obj,
441 Register type) {
442 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
443 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
444 tst(type, Operand(kIsNotStringMask));
445 ASSERT_EQ(0, kStringTag);
446 return eq;
447 }
448
449
Steve Blocka7e24c12009-10-30 11:49:00 +0000450 inline void BranchOnSmi(Register value, Label* smi_label) {
451 tst(value, Operand(kSmiTagMask));
452 b(eq, smi_label);
453 }
454
455 inline void BranchOnNotSmi(Register value, Label* not_smi_label) {
456 tst(value, Operand(kSmiTagMask));
457 b(ne, not_smi_label);
458 }
459
460 // Generates code for reporting that an illegal operation has
461 // occurred.
462 void IllegalOperation(int num_arguments);
463
Andrei Popescu31002712010-02-23 13:46:05 +0000464 // Get the number of least significant bits from a register
465 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
466
Steve Blockd0582a62009-12-15 09:54:21 +0000467 // Uses VFP instructions to Convert a Smi to a double.
468 void IntegerToDoubleConversionWithVFP3(Register inReg,
469 Register outHighReg,
470 Register outLowReg);
471
Steve Block6ded16b2010-05-10 14:33:55 +0100472 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
473 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
474 // for 0 (31 instead of 32).
475 void CountLeadingZeros(Register source,
476 Register scratch,
477 Register zeros);
Steve Blocka7e24c12009-10-30 11:49:00 +0000478
479 // ---------------------------------------------------------------------------
480 // Runtime calls
481
482 // Call a code stub.
483 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000484
Andrei Popescu31002712010-02-23 13:46:05 +0000485 // Call a code stub.
486 void TailCallStub(CodeStub* stub, Condition cond = al);
487
Steve Blocka7e24c12009-10-30 11:49:00 +0000488 // Return from a code stub after popping its arguments.
489 void StubReturn(int argc);
490
491 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000492 void CallRuntime(Runtime::Function* f, int num_arguments);
493
494 // Convenience function: Same as above, but takes the fid instead.
495 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
496
Andrei Popescu402d9372010-02-26 13:31:12 +0000497 // Convenience function: call an external reference.
498 void CallExternalReference(const ExternalReference& ext,
499 int num_arguments);
500
Steve Blocka7e24c12009-10-30 11:49:00 +0000501 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100502 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000503 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100504 void TailCallExternalReference(const ExternalReference& ext,
505 int num_arguments,
506 int result_size);
507
508 // Convenience function: tail call a runtime routine (jump).
509 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000510 int num_arguments,
511 int result_size);
512
Steve Block6ded16b2010-05-10 14:33:55 +0100513 // Before calling a C-function from generated code, align arguments on stack.
514 // After aligning the frame, non-register arguments must be stored in
515 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
516 // are word sized.
517 // Some compilers/platforms require the stack to be aligned when calling
518 // C++ code.
519 // Needs a scratch register to do some arithmetic. This register will be
520 // trashed.
521 void PrepareCallCFunction(int num_arguments, Register scratch);
522
523 // Calls a C function and cleans up the space for arguments allocated
524 // by PrepareCallCFunction. The called function is not allowed to trigger a
525 // garbage collection, since that might move the code and invalidate the
526 // return address (unless this is somehow accounted for by the called
527 // function).
528 void CallCFunction(ExternalReference function, int num_arguments);
529 void CallCFunction(Register function, int num_arguments);
530
Steve Blocka7e24c12009-10-30 11:49:00 +0000531 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100532 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000533
534 // Invoke specified builtin JavaScript function. Adds an entry to
535 // the unresolved list if the name does not resolve.
536 void InvokeBuiltin(Builtins::JavaScript id, InvokeJSFlags flags);
537
538 // Store the code object for the given builtin in the target register and
539 // setup the function in r1.
540 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
541
Steve Blocka7e24c12009-10-30 11:49:00 +0000542 Handle<Object> CodeObject() { return code_object_; }
543
544
545 // ---------------------------------------------------------------------------
546 // StatsCounter support
547
548 void SetCounter(StatsCounter* counter, int value,
549 Register scratch1, Register scratch2);
550 void IncrementCounter(StatsCounter* counter, int value,
551 Register scratch1, Register scratch2);
552 void DecrementCounter(StatsCounter* counter, int value,
553 Register scratch1, Register scratch2);
554
555
556 // ---------------------------------------------------------------------------
557 // Debugging
558
559 // Calls Abort(msg) if the condition cc is not satisfied.
560 // Use --debug_code to enable.
561 void Assert(Condition cc, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100562 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000563
564 // Like Assert(), but always enabled.
565 void Check(Condition cc, const char* msg);
566
567 // Print a message to stdout and abort execution.
568 void Abort(const char* msg);
569
570 // Verify restrictions about code generated in stubs.
571 void set_generating_stub(bool value) { generating_stub_ = value; }
572 bool generating_stub() { return generating_stub_; }
573 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
574 bool allow_stub_calls() { return allow_stub_calls_; }
575
Leon Clarked91b9f72010-01-27 17:25:45 +0000576 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000577 // Smi utilities
578
579 // Jump if either of the registers contain a non-smi.
580 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
581 // Jump if either of the registers contain a smi.
582 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
583
584 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000585 // String utilities
586
587 // Checks if both objects are sequential ASCII strings and jumps to label
588 // if either is not. Assumes that neither object is a smi.
589 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
590 Register object2,
591 Register scratch1,
592 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100593 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000594
595 // Checks if both objects are sequential ASCII strings and jumps to label
596 // if either is not.
597 void JumpIfNotBothSequentialAsciiStrings(Register first,
598 Register second,
599 Register scratch1,
600 Register scratch2,
601 Label* not_flat_ascii_strings);
602
Steve Block6ded16b2010-05-10 14:33:55 +0100603 // Checks if both instance types are sequential ASCII strings and jumps to
604 // label if either is not.
605 void JumpIfBothInstanceTypesAreNotSequentialAscii(
606 Register first_object_instance_type,
607 Register second_object_instance_type,
608 Register scratch1,
609 Register scratch2,
610 Label* failure);
611
612 // Check if instance type is sequential ASCII string and jump to label if
613 // it is not.
614 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
615 Register scratch,
616 Label* failure);
617
618
Steve Blocka7e24c12009-10-30 11:49:00 +0000619 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000620 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
621 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000622
623 // Helper functions for generating invokes.
624 void InvokePrologue(const ParameterCount& expected,
625 const ParameterCount& actual,
626 Handle<Code> code_constant,
627 Register code_reg,
628 Label* done,
629 InvokeFlag flag);
630
Steve Blocka7e24c12009-10-30 11:49:00 +0000631 // Activation support.
632 void EnterFrame(StackFrame::Type type);
633 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000634
Steve Block6ded16b2010-05-10 14:33:55 +0100635 void InitializeNewString(Register string,
636 Register length,
637 Heap::RootListIndex map_index,
638 Register scratch1,
639 Register scratch2);
640
Andrei Popescu31002712010-02-23 13:46:05 +0000641 bool generating_stub_;
642 bool allow_stub_calls_;
643 // This handle will be patched with the code object on installation.
644 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000645};
646
647
648#ifdef ENABLE_DEBUGGER_SUPPORT
649// The code patcher is used to patch (typically) small parts of code e.g. for
650// debugging and other types of instrumentation. When using the code patcher
651// the exact number of bytes specified must be emitted. It is not legal to emit
652// relocation information. If any of these constraints are violated it causes
653// an assertion to fail.
654class CodePatcher {
655 public:
656 CodePatcher(byte* address, int instructions);
657 virtual ~CodePatcher();
658
659 // Macro assembler to emit code.
660 MacroAssembler* masm() { return &masm_; }
661
662 // Emit an instruction directly.
663 void Emit(Instr x);
664
665 // Emit an address directly.
666 void Emit(Address addr);
667
668 private:
669 byte* address_; // The address of the code being patched.
670 int instructions_; // Number of instructions of the expected patch size.
671 int size_; // Number of bytes of the expected patch size.
672 MacroAssembler masm_; // Macro assembler used to generate the code.
673};
674#endif // ENABLE_DEBUGGER_SUPPORT
675
676
677// -----------------------------------------------------------------------------
678// Static helper functions.
679
Steve Blocka7e24c12009-10-30 11:49:00 +0000680#ifdef GENERATED_CODE_COVERAGE
681#define CODE_COVERAGE_STRINGIFY(x) #x
682#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
683#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
684#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
685#else
686#define ACCESS_MASM(masm) masm->
687#endif
688
689
690} } // namespace v8::internal
691
692#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_