blob: 87f7b5feefd7dd71de9d4a5147674d47eb9e39df [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Andrei Popescu31002712010-02-23 13:46:05 +000036// ----------------------------------------------------------------------------
37// Static helper functions
38
39// Generate a MemOperand for loading a field from an object.
40static inline MemOperand FieldMemOperand(Register object, int offset) {
41 return MemOperand(object, offset - kHeapObjectTag);
42}
43
Steve Blocka7e24c12009-10-30 11:49:00 +000044
45// Give alias names to registers
46const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000047const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49enum InvokeJSFlags {
50 CALL_JS,
51 JUMP_JS
52};
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055// Flags used for the AllocateInNewSpace functions.
56enum AllocationFlags {
57 // No special flags.
58 NO_ALLOCATION_FLAGS = 0,
59 // Return the pointer to the allocated already tagged as a heap object.
60 TAG_OBJECT = 1 << 0,
61 // The content of the result register already contains the allocation top in
62 // new space.
63 RESULT_CONTAINS_TOP = 1 << 1,
64 // Specify that the requested size of the space to allocate is specified in
65 // words instead of bytes.
66 SIZE_IN_WORDS = 1 << 2
67};
68
69
Steve Blocka7e24c12009-10-30 11:49:00 +000070// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
Andrei Popescu31002712010-02-23 13:46:05 +000075 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000076 void Jump(Register target, Condition cond = al);
77 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
78 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
79 void Call(Register target, Condition cond = al);
80 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
81 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
82 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +000083
84 // Emit code to discard a non-negative number of pointer-sized elements
85 // from the stack, clobbering only the sp register.
86 void Drop(int count, Condition cond = al);
87
Steve Block6ded16b2010-05-10 14:33:55 +010088
89 // Swap two registers. If the scratch register is omitted then a slightly
90 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010091 void Swap(Register reg1,
92 Register reg2,
93 Register scratch = no_reg,
94 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +010095
Leon Clarkee46be812010-01-19 14:06:41 +000096 void Call(Label* target);
97 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +010098 // May do nothing if the registers are identical.
99 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000100 // Jumps to the label at the index given by the Smi in "index".
101 void SmiJumpTable(Register index, Vector<Label*> targets);
102 // Load an object from the root table.
103 void LoadRoot(Register destination,
104 Heap::RootListIndex index,
105 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100106 // Store an object to the root table.
107 void StoreRoot(Register source,
108 Heap::RootListIndex index,
109 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000110
Steve Block6ded16b2010-05-10 14:33:55 +0100111
112 // Check if object is in new space.
113 // scratch can be object itself, but it will be clobbered.
114 void InNewSpace(Register object,
115 Register scratch,
116 Condition cc, // eq for new space, ne otherwise
117 Label* branch);
118
119
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100120 // For the page containing |object| mark the region covering [object+offset]
121 // dirty. The object address must be in the first 8K of an allocated page.
122 void RecordWriteHelper(Register object, Register offset, Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100123
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100124 // For the page containing |object| mark the region covering [object+offset]
125 // dirty. The object address must be in the first 8K of an allocated page.
126 // The 'scratch' register is used in the implementation and all 3 registers
127 // are clobbered by the operation, as well as the ip register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 void RecordWrite(Register object, Register offset, Register scratch);
129
Steve Block6ded16b2010-05-10 14:33:55 +0100130 // Push two registers. Pushes leftmost register first (to highest address).
131 void Push(Register src1, Register src2, Condition cond = al) {
132 ASSERT(!src1.is(src2));
133 if (src1.code() > src2.code()) {
134 stm(db_w, sp, src1.bit() | src2.bit(), cond);
135 } else {
136 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
137 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
138 }
139 }
140
141 // Push three registers. Pushes leftmost register first (to highest address).
142 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
143 ASSERT(!src1.is(src2));
144 ASSERT(!src2.is(src3));
145 ASSERT(!src1.is(src3));
146 if (src1.code() > src2.code()) {
147 if (src2.code() > src3.code()) {
148 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
149 } else {
150 stm(db_w, sp, src1.bit() | src2.bit(), cond);
151 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
152 }
153 } else {
154 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
155 Push(src2, src3, cond);
156 }
157 }
158
159 // Push four registers. Pushes leftmost register first (to highest address).
160 void Push(Register src1, Register src2,
161 Register src3, Register src4, Condition cond = al) {
162 ASSERT(!src1.is(src2));
163 ASSERT(!src2.is(src3));
164 ASSERT(!src1.is(src3));
165 ASSERT(!src1.is(src4));
166 ASSERT(!src2.is(src4));
167 ASSERT(!src3.is(src4));
168 if (src1.code() > src2.code()) {
169 if (src2.code() > src3.code()) {
170 if (src3.code() > src4.code()) {
171 stm(db_w,
172 sp,
173 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
174 cond);
175 } else {
176 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
177 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
178 }
179 } else {
180 stm(db_w, sp, src1.bit() | src2.bit(), cond);
181 Push(src3, src4, cond);
182 }
183 } else {
184 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
185 Push(src2, src3, src4, cond);
186 }
187 }
188
Leon Clarkef7060e22010-06-03 12:02:55 +0100189 // Load two consecutive registers with two consecutive memory locations.
190 void Ldrd(Register dst1,
191 Register dst2,
192 const MemOperand& src,
193 Condition cond = al);
194
195 // Store two consecutive registers to two consecutive memory locations.
196 void Strd(Register src1,
197 Register src2,
198 const MemOperand& dst,
199 Condition cond = al);
200
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000202 // Stack limit support
203
204 void StackLimitCheck(Label* on_stack_limit_hit);
205
206 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000207 // Activation frames
208
209 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
210 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
211
212 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
213 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
214
Steve Blockd0582a62009-12-15 09:54:21 +0000215 // Enter specific kind of exit frame; either normal or debug mode.
216 // Expects the number of arguments in register r0 and
Steve Blocka7e24c12009-10-30 11:49:00 +0000217 // the builtin function to call in register r1. Exits with argc in
218 // r4, argv in r6, and and the builtin function to call in r5.
Steve Blockd0582a62009-12-15 09:54:21 +0000219 void EnterExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000220
221 // Leave the current exit frame. Expects the return value in r0.
Steve Blockd0582a62009-12-15 09:54:21 +0000222 void LeaveExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000223
Steve Block6ded16b2010-05-10 14:33:55 +0100224 // Get the actual activation frame alignment for target environment.
225 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000226
Steve Blockd0582a62009-12-15 09:54:21 +0000227 void LoadContext(Register dst, int context_chain_length);
228
Steve Blocka7e24c12009-10-30 11:49:00 +0000229 // ---------------------------------------------------------------------------
230 // JavaScript invokes
231
232 // Invoke the JavaScript function code by either calling or jumping.
233 void InvokeCode(Register code,
234 const ParameterCount& expected,
235 const ParameterCount& actual,
236 InvokeFlag flag);
237
238 void InvokeCode(Handle<Code> code,
239 const ParameterCount& expected,
240 const ParameterCount& actual,
241 RelocInfo::Mode rmode,
242 InvokeFlag flag);
243
244 // Invoke the JavaScript function in the given register. Changes the
245 // current context to the context in the function before invoking.
246 void InvokeFunction(Register function,
247 const ParameterCount& actual,
248 InvokeFlag flag);
249
Andrei Popescu402d9372010-02-26 13:31:12 +0000250 void InvokeFunction(JSFunction* function,
251 const ParameterCount& actual,
252 InvokeFlag flag);
253
Steve Blocka7e24c12009-10-30 11:49:00 +0000254
255#ifdef ENABLE_DEBUGGER_SUPPORT
256 // ---------------------------------------------------------------------------
257 // Debugger Support
258
259 void SaveRegistersToMemory(RegList regs);
260 void RestoreRegistersFromMemory(RegList regs);
261 void CopyRegistersFromMemoryToStack(Register base, RegList regs);
262 void CopyRegistersFromStackToMemory(Register base,
263 Register scratch,
264 RegList regs);
Andrei Popescu402d9372010-02-26 13:31:12 +0000265 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000266#endif
267
268 // ---------------------------------------------------------------------------
269 // Exception handling
270
271 // Push a new try handler and link into try handler chain.
272 // The return address must be passed in register lr.
273 // On exit, r0 contains TOS (code slot).
274 void PushTryHandler(CodeLocation try_location, HandlerType type);
275
Leon Clarkee46be812010-01-19 14:06:41 +0000276 // Unlink the stack handler on top of the stack from the try handler chain.
277 // Must preserve the result register.
278 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000279
280 // ---------------------------------------------------------------------------
281 // Inline caching support
282
283 // Generates code that verifies that the maps of objects in the
284 // prototype chain of object hasn't changed since the code was
285 // generated and branches to the miss label if any map has. If
286 // necessary the function also generates code for security check
287 // in case of global object holders. The scratch and holder
288 // registers are always clobbered, but the object register is only
289 // clobbered if it the same as the holder register. The function
290 // returns a register containing the holder - either object_reg or
291 // holder_reg.
Steve Block6ded16b2010-05-10 14:33:55 +0100292 // The function can optionally (when save_at_depth !=
293 // kInvalidProtoDepth) save the object at the given depth by moving
294 // it to [sp].
Steve Blocka7e24c12009-10-30 11:49:00 +0000295 Register CheckMaps(JSObject* object, Register object_reg,
296 JSObject* holder, Register holder_reg,
Steve Block6ded16b2010-05-10 14:33:55 +0100297 Register scratch,
298 int save_at_depth,
299 Label* miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000300
301 // Generate code for checking access rights - used for security checks
302 // on access to global objects across environments. The holder register
303 // is left untouched, whereas both scratch registers are clobbered.
304 void CheckAccessGlobalProxy(Register holder_reg,
305 Register scratch,
306 Label* miss);
307
308
309 // ---------------------------------------------------------------------------
310 // Allocation support
311
312 // Allocate an object in new space. The object_size is specified in words (not
313 // bytes). If the new space is exhausted control continues at the gc_required
314 // label. The allocated object is returned in result. If the flag
Kristian Monsen25f61362010-05-21 11:50:48 +0100315 // tag_allocated_object is true the result is tagged as as a heap object. All
316 // registers are clobbered also when control continues at the gc_required
317 // label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000318 void AllocateInNewSpace(int object_size,
319 Register result,
320 Register scratch1,
321 Register scratch2,
322 Label* gc_required,
323 AllocationFlags flags);
324 void AllocateInNewSpace(Register object_size,
325 Register result,
326 Register scratch1,
327 Register scratch2,
328 Label* gc_required,
329 AllocationFlags flags);
330
331 // Undo allocation in new space. The object passed and objects allocated after
332 // it will no longer be allocated. The caller must make sure that no pointers
333 // are left to the object(s) no longer allocated as they would be invalid when
334 // allocation is undone.
335 void UndoAllocationInNewSpace(Register object, Register scratch);
336
Andrei Popescu31002712010-02-23 13:46:05 +0000337
338 void AllocateTwoByteString(Register result,
339 Register length,
340 Register scratch1,
341 Register scratch2,
342 Register scratch3,
343 Label* gc_required);
344 void AllocateAsciiString(Register result,
345 Register length,
346 Register scratch1,
347 Register scratch2,
348 Register scratch3,
349 Label* gc_required);
350 void AllocateTwoByteConsString(Register result,
351 Register length,
352 Register scratch1,
353 Register scratch2,
354 Label* gc_required);
355 void AllocateAsciiConsString(Register result,
356 Register length,
357 Register scratch1,
358 Register scratch2,
359 Label* gc_required);
360
Kristian Monsen25f61362010-05-21 11:50:48 +0100361 // Allocates a heap number or jumps to the gc_required label if the young
362 // space is full and a scavenge is needed. All registers are clobbered also
363 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100364 void AllocateHeapNumber(Register result,
365 Register scratch1,
366 Register scratch2,
367 Label* gc_required);
Andrei Popescu31002712010-02-23 13:46:05 +0000368
Steve Blocka7e24c12009-10-30 11:49:00 +0000369 // ---------------------------------------------------------------------------
370 // Support functions.
371
372 // Try to get function prototype of a function and puts the value in
373 // the result register. Checks that the function really is a
374 // function and jumps to the miss label if the fast checks fail. The
375 // function register will be untouched; the other registers may be
376 // clobbered.
377 void TryGetFunctionPrototype(Register function,
378 Register result,
379 Register scratch,
380 Label* miss);
381
382 // Compare object type for heap object. heap_object contains a non-Smi
383 // whose object type should be compared with the given type. This both
384 // sets the flags and leaves the object type in the type_reg register.
385 // It leaves the map in the map register (unless the type_reg and map register
386 // are the same register). It leaves the heap object in the heap_object
387 // register unless the heap_object register is the same register as one of the
388 // other registers.
389 void CompareObjectType(Register heap_object,
390 Register map,
391 Register type_reg,
392 InstanceType type);
393
394 // Compare instance type in a map. map contains a valid map object whose
395 // object type should be compared with the given type. This both
396 // sets the flags and leaves the object type in the type_reg register. It
397 // leaves the heap object in the heap_object register unless the heap_object
398 // register is the same register as type_reg.
399 void CompareInstanceType(Register map,
400 Register type_reg,
401 InstanceType type);
402
Andrei Popescu31002712010-02-23 13:46:05 +0000403
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100404 // Check if the map of an object is equal to a specified map (either
405 // given directly or as an index into the root list) and branch to
406 // label if not. Skip the smi check if not required (object is known
407 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000408 void CheckMap(Register obj,
409 Register scratch,
410 Handle<Map> map,
411 Label* fail,
412 bool is_heap_object);
413
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100414 void CheckMap(Register obj,
415 Register scratch,
416 Heap::RootListIndex index,
417 Label* fail,
418 bool is_heap_object);
419
420
Andrei Popescu31002712010-02-23 13:46:05 +0000421 // Load and check the instance type of an object for being a string.
422 // Loads the type into the second argument register.
423 // Returns a condition that will be enabled if the object was a string.
424 Condition IsObjectStringType(Register obj,
425 Register type) {
426 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
427 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
428 tst(type, Operand(kIsNotStringMask));
429 ASSERT_EQ(0, kStringTag);
430 return eq;
431 }
432
433
Steve Blocka7e24c12009-10-30 11:49:00 +0000434 inline void BranchOnSmi(Register value, Label* smi_label) {
435 tst(value, Operand(kSmiTagMask));
436 b(eq, smi_label);
437 }
438
439 inline void BranchOnNotSmi(Register value, Label* not_smi_label) {
440 tst(value, Operand(kSmiTagMask));
441 b(ne, not_smi_label);
442 }
443
444 // Generates code for reporting that an illegal operation has
445 // occurred.
446 void IllegalOperation(int num_arguments);
447
Andrei Popescu31002712010-02-23 13:46:05 +0000448 // Get the number of least significant bits from a register
449 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
450
Steve Blockd0582a62009-12-15 09:54:21 +0000451 // Uses VFP instructions to Convert a Smi to a double.
452 void IntegerToDoubleConversionWithVFP3(Register inReg,
453 Register outHighReg,
454 Register outLowReg);
455
Steve Block6ded16b2010-05-10 14:33:55 +0100456 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
457 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
458 // for 0 (31 instead of 32).
459 void CountLeadingZeros(Register source,
460 Register scratch,
461 Register zeros);
Steve Blocka7e24c12009-10-30 11:49:00 +0000462
463 // ---------------------------------------------------------------------------
464 // Runtime calls
465
466 // Call a code stub.
467 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000468
Andrei Popescu31002712010-02-23 13:46:05 +0000469 // Call a code stub.
470 void TailCallStub(CodeStub* stub, Condition cond = al);
471
Steve Blocka7e24c12009-10-30 11:49:00 +0000472 // Return from a code stub after popping its arguments.
473 void StubReturn(int argc);
474
475 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000476 void CallRuntime(Runtime::Function* f, int num_arguments);
477
478 // Convenience function: Same as above, but takes the fid instead.
479 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
480
Andrei Popescu402d9372010-02-26 13:31:12 +0000481 // Convenience function: call an external reference.
482 void CallExternalReference(const ExternalReference& ext,
483 int num_arguments);
484
Steve Blocka7e24c12009-10-30 11:49:00 +0000485 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100486 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000487 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100488 void TailCallExternalReference(const ExternalReference& ext,
489 int num_arguments,
490 int result_size);
491
492 // Convenience function: tail call a runtime routine (jump).
493 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000494 int num_arguments,
495 int result_size);
496
Steve Block6ded16b2010-05-10 14:33:55 +0100497 // Before calling a C-function from generated code, align arguments on stack.
498 // After aligning the frame, non-register arguments must be stored in
499 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
500 // are word sized.
501 // Some compilers/platforms require the stack to be aligned when calling
502 // C++ code.
503 // Needs a scratch register to do some arithmetic. This register will be
504 // trashed.
505 void PrepareCallCFunction(int num_arguments, Register scratch);
506
507 // Calls a C function and cleans up the space for arguments allocated
508 // by PrepareCallCFunction. The called function is not allowed to trigger a
509 // garbage collection, since that might move the code and invalidate the
510 // return address (unless this is somehow accounted for by the called
511 // function).
512 void CallCFunction(ExternalReference function, int num_arguments);
513 void CallCFunction(Register function, int num_arguments);
514
Steve Blocka7e24c12009-10-30 11:49:00 +0000515 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100516 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000517
518 // Invoke specified builtin JavaScript function. Adds an entry to
519 // the unresolved list if the name does not resolve.
520 void InvokeBuiltin(Builtins::JavaScript id, InvokeJSFlags flags);
521
522 // Store the code object for the given builtin in the target register and
523 // setup the function in r1.
524 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
525
Steve Blocka7e24c12009-10-30 11:49:00 +0000526 Handle<Object> CodeObject() { return code_object_; }
527
528
529 // ---------------------------------------------------------------------------
530 // StatsCounter support
531
532 void SetCounter(StatsCounter* counter, int value,
533 Register scratch1, Register scratch2);
534 void IncrementCounter(StatsCounter* counter, int value,
535 Register scratch1, Register scratch2);
536 void DecrementCounter(StatsCounter* counter, int value,
537 Register scratch1, Register scratch2);
538
539
540 // ---------------------------------------------------------------------------
541 // Debugging
542
543 // Calls Abort(msg) if the condition cc is not satisfied.
544 // Use --debug_code to enable.
545 void Assert(Condition cc, const char* msg);
546
547 // Like Assert(), but always enabled.
548 void Check(Condition cc, const char* msg);
549
550 // Print a message to stdout and abort execution.
551 void Abort(const char* msg);
552
553 // Verify restrictions about code generated in stubs.
554 void set_generating_stub(bool value) { generating_stub_ = value; }
555 bool generating_stub() { return generating_stub_; }
556 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
557 bool allow_stub_calls() { return allow_stub_calls_; }
558
Leon Clarked91b9f72010-01-27 17:25:45 +0000559 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000560 // Smi utilities
561
562 // Jump if either of the registers contain a non-smi.
563 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
564 // Jump if either of the registers contain a smi.
565 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
566
567 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000568 // String utilities
569
570 // Checks if both objects are sequential ASCII strings and jumps to label
571 // if either is not. Assumes that neither object is a smi.
572 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
573 Register object2,
574 Register scratch1,
575 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100576 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000577
578 // Checks if both objects are sequential ASCII strings and jumps to label
579 // if either is not.
580 void JumpIfNotBothSequentialAsciiStrings(Register first,
581 Register second,
582 Register scratch1,
583 Register scratch2,
584 Label* not_flat_ascii_strings);
585
Steve Block6ded16b2010-05-10 14:33:55 +0100586 // Checks if both instance types are sequential ASCII strings and jumps to
587 // label if either is not.
588 void JumpIfBothInstanceTypesAreNotSequentialAscii(
589 Register first_object_instance_type,
590 Register second_object_instance_type,
591 Register scratch1,
592 Register scratch2,
593 Label* failure);
594
595 // Check if instance type is sequential ASCII string and jump to label if
596 // it is not.
597 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
598 Register scratch,
599 Label* failure);
600
601
Steve Blocka7e24c12009-10-30 11:49:00 +0000602 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000603 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
604 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000605
606 // Helper functions for generating invokes.
607 void InvokePrologue(const ParameterCount& expected,
608 const ParameterCount& actual,
609 Handle<Code> code_constant,
610 Register code_reg,
611 Label* done,
612 InvokeFlag flag);
613
Steve Blocka7e24c12009-10-30 11:49:00 +0000614 // Activation support.
615 void EnterFrame(StackFrame::Type type);
616 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000617
Steve Block6ded16b2010-05-10 14:33:55 +0100618 void InitializeNewString(Register string,
619 Register length,
620 Heap::RootListIndex map_index,
621 Register scratch1,
622 Register scratch2);
623
Andrei Popescu31002712010-02-23 13:46:05 +0000624 bool generating_stub_;
625 bool allow_stub_calls_;
626 // This handle will be patched with the code object on installation.
627 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000628};
629
630
631#ifdef ENABLE_DEBUGGER_SUPPORT
632// The code patcher is used to patch (typically) small parts of code e.g. for
633// debugging and other types of instrumentation. When using the code patcher
634// the exact number of bytes specified must be emitted. It is not legal to emit
635// relocation information. If any of these constraints are violated it causes
636// an assertion to fail.
637class CodePatcher {
638 public:
639 CodePatcher(byte* address, int instructions);
640 virtual ~CodePatcher();
641
642 // Macro assembler to emit code.
643 MacroAssembler* masm() { return &masm_; }
644
645 // Emit an instruction directly.
646 void Emit(Instr x);
647
648 // Emit an address directly.
649 void Emit(Address addr);
650
651 private:
652 byte* address_; // The address of the code being patched.
653 int instructions_; // Number of instructions of the expected patch size.
654 int size_; // Number of bytes of the expected patch size.
655 MacroAssembler masm_; // Macro assembler used to generate the code.
656};
657#endif // ENABLE_DEBUGGER_SUPPORT
658
659
660// -----------------------------------------------------------------------------
661// Static helper functions.
662
Steve Blocka7e24c12009-10-30 11:49:00 +0000663#ifdef GENERATED_CODE_COVERAGE
664#define CODE_COVERAGE_STRINGIFY(x) #x
665#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
666#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
667#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
668#else
669#define ACCESS_MASM(masm) masm->
670#endif
671
672
673} } // namespace v8::internal
674
675#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_