blob: 494f2b69261ff3656f295e41e7c0a83694c030e9 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Andrei Popescu31002712010-02-23 13:46:05 +000036// ----------------------------------------------------------------------------
37// Static helper functions
38
39// Generate a MemOperand for loading a field from an object.
40static inline MemOperand FieldMemOperand(Register object, int offset) {
41 return MemOperand(object, offset - kHeapObjectTag);
42}
43
Steve Blocka7e24c12009-10-30 11:49:00 +000044
45// Give alias names to registers
46const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000047const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49enum InvokeJSFlags {
50 CALL_JS,
51 JUMP_JS
52};
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055// Flags used for the AllocateInNewSpace functions.
56enum AllocationFlags {
57 // No special flags.
58 NO_ALLOCATION_FLAGS = 0,
59 // Return the pointer to the allocated already tagged as a heap object.
60 TAG_OBJECT = 1 << 0,
61 // The content of the result register already contains the allocation top in
62 // new space.
63 RESULT_CONTAINS_TOP = 1 << 1,
64 // Specify that the requested size of the space to allocate is specified in
65 // words instead of bytes.
66 SIZE_IN_WORDS = 1 << 2
67};
68
69
Steve Blocka7e24c12009-10-30 11:49:00 +000070// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
Andrei Popescu31002712010-02-23 13:46:05 +000075 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000076 void Jump(Register target, Condition cond = al);
77 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
78 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
79 void Call(Register target, Condition cond = al);
80 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
81 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
82 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +000083
84 // Emit code to discard a non-negative number of pointer-sized elements
85 // from the stack, clobbering only the sp register.
86 void Drop(int count, Condition cond = al);
87
Steve Block6ded16b2010-05-10 14:33:55 +010088
89 // Swap two registers. If the scratch register is omitted then a slightly
90 // less efficient form using xor instead of mov is emitted.
91 void Swap(Register reg1, Register reg2, Register scratch = no_reg);
92
Leon Clarkee46be812010-01-19 14:06:41 +000093 void Call(Label* target);
94 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +010095 // May do nothing if the registers are identical.
96 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 // Jumps to the label at the index given by the Smi in "index".
98 void SmiJumpTable(Register index, Vector<Label*> targets);
99 // Load an object from the root table.
100 void LoadRoot(Register destination,
101 Heap::RootListIndex index,
102 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100103 // Store an object to the root table.
104 void StoreRoot(Register source,
105 Heap::RootListIndex index,
106 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000107
Steve Block6ded16b2010-05-10 14:33:55 +0100108
109 // Check if object is in new space.
110 // scratch can be object itself, but it will be clobbered.
111 void InNewSpace(Register object,
112 Register scratch,
113 Condition cc, // eq for new space, ne otherwise
114 Label* branch);
115
116
117 // Set the remebered set bit for an offset into an
118 // object. RecordWriteHelper only works if the object is not in new
119 // space.
120 void RecordWriteHelper(Register object, Register offset, Register scracth);
121
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 // Sets the remembered set bit for [address+offset], where address is the
123 // address of the heap object 'object'. The address must be in the first 8K
124 // of an allocated page. The 'scratch' register is used in the
125 // implementation and all 3 registers are clobbered by the operation, as
126 // well as the ip register.
127 void RecordWrite(Register object, Register offset, Register scratch);
128
Steve Block6ded16b2010-05-10 14:33:55 +0100129 // Push two registers. Pushes leftmost register first (to highest address).
130 void Push(Register src1, Register src2, Condition cond = al) {
131 ASSERT(!src1.is(src2));
132 if (src1.code() > src2.code()) {
133 stm(db_w, sp, src1.bit() | src2.bit(), cond);
134 } else {
135 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
136 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
137 }
138 }
139
140 // Push three registers. Pushes leftmost register first (to highest address).
141 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
142 ASSERT(!src1.is(src2));
143 ASSERT(!src2.is(src3));
144 ASSERT(!src1.is(src3));
145 if (src1.code() > src2.code()) {
146 if (src2.code() > src3.code()) {
147 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
148 } else {
149 stm(db_w, sp, src1.bit() | src2.bit(), cond);
150 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
151 }
152 } else {
153 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
154 Push(src2, src3, cond);
155 }
156 }
157
158 // Push four registers. Pushes leftmost register first (to highest address).
159 void Push(Register src1, Register src2,
160 Register src3, Register src4, Condition cond = al) {
161 ASSERT(!src1.is(src2));
162 ASSERT(!src2.is(src3));
163 ASSERT(!src1.is(src3));
164 ASSERT(!src1.is(src4));
165 ASSERT(!src2.is(src4));
166 ASSERT(!src3.is(src4));
167 if (src1.code() > src2.code()) {
168 if (src2.code() > src3.code()) {
169 if (src3.code() > src4.code()) {
170 stm(db_w,
171 sp,
172 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
173 cond);
174 } else {
175 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
176 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
177 }
178 } else {
179 stm(db_w, sp, src1.bit() | src2.bit(), cond);
180 Push(src3, src4, cond);
181 }
182 } else {
183 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
184 Push(src2, src3, src4, cond);
185 }
186 }
187
Leon Clarkef7060e22010-06-03 12:02:55 +0100188 // Load two consecutive registers with two consecutive memory locations.
189 void Ldrd(Register dst1,
190 Register dst2,
191 const MemOperand& src,
192 Condition cond = al);
193
194 // Store two consecutive registers to two consecutive memory locations.
195 void Strd(Register src1,
196 Register src2,
197 const MemOperand& dst,
198 Condition cond = al);
199
Steve Blocka7e24c12009-10-30 11:49:00 +0000200 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000201 // Stack limit support
202
203 void StackLimitCheck(Label* on_stack_limit_hit);
204
205 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000206 // Activation frames
207
208 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
209 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
210
211 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
212 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
213
Steve Blockd0582a62009-12-15 09:54:21 +0000214 // Enter specific kind of exit frame; either normal or debug mode.
215 // Expects the number of arguments in register r0 and
Steve Blocka7e24c12009-10-30 11:49:00 +0000216 // the builtin function to call in register r1. Exits with argc in
217 // r4, argv in r6, and and the builtin function to call in r5.
Steve Blockd0582a62009-12-15 09:54:21 +0000218 void EnterExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000219
220 // Leave the current exit frame. Expects the return value in r0.
Steve Blockd0582a62009-12-15 09:54:21 +0000221 void LeaveExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000222
Steve Block6ded16b2010-05-10 14:33:55 +0100223 // Get the actual activation frame alignment for target environment.
224 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000225
Steve Blockd0582a62009-12-15 09:54:21 +0000226 void LoadContext(Register dst, int context_chain_length);
227
Steve Blocka7e24c12009-10-30 11:49:00 +0000228 // ---------------------------------------------------------------------------
229 // JavaScript invokes
230
231 // Invoke the JavaScript function code by either calling or jumping.
232 void InvokeCode(Register code,
233 const ParameterCount& expected,
234 const ParameterCount& actual,
235 InvokeFlag flag);
236
237 void InvokeCode(Handle<Code> code,
238 const ParameterCount& expected,
239 const ParameterCount& actual,
240 RelocInfo::Mode rmode,
241 InvokeFlag flag);
242
243 // Invoke the JavaScript function in the given register. Changes the
244 // current context to the context in the function before invoking.
245 void InvokeFunction(Register function,
246 const ParameterCount& actual,
247 InvokeFlag flag);
248
Andrei Popescu402d9372010-02-26 13:31:12 +0000249 void InvokeFunction(JSFunction* function,
250 const ParameterCount& actual,
251 InvokeFlag flag);
252
Steve Blocka7e24c12009-10-30 11:49:00 +0000253
254#ifdef ENABLE_DEBUGGER_SUPPORT
255 // ---------------------------------------------------------------------------
256 // Debugger Support
257
258 void SaveRegistersToMemory(RegList regs);
259 void RestoreRegistersFromMemory(RegList regs);
260 void CopyRegistersFromMemoryToStack(Register base, RegList regs);
261 void CopyRegistersFromStackToMemory(Register base,
262 Register scratch,
263 RegList regs);
Andrei Popescu402d9372010-02-26 13:31:12 +0000264 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000265#endif
266
267 // ---------------------------------------------------------------------------
268 // Exception handling
269
270 // Push a new try handler and link into try handler chain.
271 // The return address must be passed in register lr.
272 // On exit, r0 contains TOS (code slot).
273 void PushTryHandler(CodeLocation try_location, HandlerType type);
274
Leon Clarkee46be812010-01-19 14:06:41 +0000275 // Unlink the stack handler on top of the stack from the try handler chain.
276 // Must preserve the result register.
277 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000278
279 // ---------------------------------------------------------------------------
280 // Inline caching support
281
282 // Generates code that verifies that the maps of objects in the
283 // prototype chain of object hasn't changed since the code was
284 // generated and branches to the miss label if any map has. If
285 // necessary the function also generates code for security check
286 // in case of global object holders. The scratch and holder
287 // registers are always clobbered, but the object register is only
288 // clobbered if it the same as the holder register. The function
289 // returns a register containing the holder - either object_reg or
290 // holder_reg.
Steve Block6ded16b2010-05-10 14:33:55 +0100291 // The function can optionally (when save_at_depth !=
292 // kInvalidProtoDepth) save the object at the given depth by moving
293 // it to [sp].
Steve Blocka7e24c12009-10-30 11:49:00 +0000294 Register CheckMaps(JSObject* object, Register object_reg,
295 JSObject* holder, Register holder_reg,
Steve Block6ded16b2010-05-10 14:33:55 +0100296 Register scratch,
297 int save_at_depth,
298 Label* miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000299
300 // Generate code for checking access rights - used for security checks
301 // on access to global objects across environments. The holder register
302 // is left untouched, whereas both scratch registers are clobbered.
303 void CheckAccessGlobalProxy(Register holder_reg,
304 Register scratch,
305 Label* miss);
306
307
308 // ---------------------------------------------------------------------------
309 // Allocation support
310
311 // Allocate an object in new space. The object_size is specified in words (not
312 // bytes). If the new space is exhausted control continues at the gc_required
313 // label. The allocated object is returned in result. If the flag
Kristian Monsen25f61362010-05-21 11:50:48 +0100314 // tag_allocated_object is true the result is tagged as as a heap object. All
315 // registers are clobbered also when control continues at the gc_required
316 // label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000317 void AllocateInNewSpace(int object_size,
318 Register result,
319 Register scratch1,
320 Register scratch2,
321 Label* gc_required,
322 AllocationFlags flags);
323 void AllocateInNewSpace(Register object_size,
324 Register result,
325 Register scratch1,
326 Register scratch2,
327 Label* gc_required,
328 AllocationFlags flags);
329
330 // Undo allocation in new space. The object passed and objects allocated after
331 // it will no longer be allocated. The caller must make sure that no pointers
332 // are left to the object(s) no longer allocated as they would be invalid when
333 // allocation is undone.
334 void UndoAllocationInNewSpace(Register object, Register scratch);
335
Andrei Popescu31002712010-02-23 13:46:05 +0000336
337 void AllocateTwoByteString(Register result,
338 Register length,
339 Register scratch1,
340 Register scratch2,
341 Register scratch3,
342 Label* gc_required);
343 void AllocateAsciiString(Register result,
344 Register length,
345 Register scratch1,
346 Register scratch2,
347 Register scratch3,
348 Label* gc_required);
349 void AllocateTwoByteConsString(Register result,
350 Register length,
351 Register scratch1,
352 Register scratch2,
353 Label* gc_required);
354 void AllocateAsciiConsString(Register result,
355 Register length,
356 Register scratch1,
357 Register scratch2,
358 Label* gc_required);
359
Kristian Monsen25f61362010-05-21 11:50:48 +0100360 // Allocates a heap number or jumps to the gc_required label if the young
361 // space is full and a scavenge is needed. All registers are clobbered also
362 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100363 void AllocateHeapNumber(Register result,
364 Register scratch1,
365 Register scratch2,
366 Label* gc_required);
Andrei Popescu31002712010-02-23 13:46:05 +0000367
Steve Blocka7e24c12009-10-30 11:49:00 +0000368 // ---------------------------------------------------------------------------
369 // Support functions.
370
371 // Try to get function prototype of a function and puts the value in
372 // the result register. Checks that the function really is a
373 // function and jumps to the miss label if the fast checks fail. The
374 // function register will be untouched; the other registers may be
375 // clobbered.
376 void TryGetFunctionPrototype(Register function,
377 Register result,
378 Register scratch,
379 Label* miss);
380
381 // Compare object type for heap object. heap_object contains a non-Smi
382 // whose object type should be compared with the given type. This both
383 // sets the flags and leaves the object type in the type_reg register.
384 // It leaves the map in the map register (unless the type_reg and map register
385 // are the same register). It leaves the heap object in the heap_object
386 // register unless the heap_object register is the same register as one of the
387 // other registers.
388 void CompareObjectType(Register heap_object,
389 Register map,
390 Register type_reg,
391 InstanceType type);
392
393 // Compare instance type in a map. map contains a valid map object whose
394 // object type should be compared with the given type. This both
395 // sets the flags and leaves the object type in the type_reg register. It
396 // leaves the heap object in the heap_object register unless the heap_object
397 // register is the same register as type_reg.
398 void CompareInstanceType(Register map,
399 Register type_reg,
400 InstanceType type);
401
Andrei Popescu31002712010-02-23 13:46:05 +0000402
403 // Check if the map of an object is equal to a specified map and
404 // branch to label if not. Skip the smi check if not required
405 // (object is known to be a heap object)
406 void CheckMap(Register obj,
407 Register scratch,
408 Handle<Map> map,
409 Label* fail,
410 bool is_heap_object);
411
412 // Load and check the instance type of an object for being a string.
413 // Loads the type into the second argument register.
414 // Returns a condition that will be enabled if the object was a string.
415 Condition IsObjectStringType(Register obj,
416 Register type) {
417 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
418 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
419 tst(type, Operand(kIsNotStringMask));
420 ASSERT_EQ(0, kStringTag);
421 return eq;
422 }
423
424
Steve Blocka7e24c12009-10-30 11:49:00 +0000425 inline void BranchOnSmi(Register value, Label* smi_label) {
426 tst(value, Operand(kSmiTagMask));
427 b(eq, smi_label);
428 }
429
430 inline void BranchOnNotSmi(Register value, Label* not_smi_label) {
431 tst(value, Operand(kSmiTagMask));
432 b(ne, not_smi_label);
433 }
434
435 // Generates code for reporting that an illegal operation has
436 // occurred.
437 void IllegalOperation(int num_arguments);
438
Andrei Popescu31002712010-02-23 13:46:05 +0000439 // Get the number of least significant bits from a register
440 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
441
Steve Blockd0582a62009-12-15 09:54:21 +0000442 // Uses VFP instructions to Convert a Smi to a double.
443 void IntegerToDoubleConversionWithVFP3(Register inReg,
444 Register outHighReg,
445 Register outLowReg);
446
Steve Block6ded16b2010-05-10 14:33:55 +0100447 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
448 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
449 // for 0 (31 instead of 32).
450 void CountLeadingZeros(Register source,
451 Register scratch,
452 Register zeros);
Steve Blocka7e24c12009-10-30 11:49:00 +0000453
454 // ---------------------------------------------------------------------------
455 // Runtime calls
456
457 // Call a code stub.
458 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000459
Andrei Popescu31002712010-02-23 13:46:05 +0000460 // Call a code stub.
461 void TailCallStub(CodeStub* stub, Condition cond = al);
462
Steve Blocka7e24c12009-10-30 11:49:00 +0000463 // Return from a code stub after popping its arguments.
464 void StubReturn(int argc);
465
466 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 void CallRuntime(Runtime::Function* f, int num_arguments);
468
469 // Convenience function: Same as above, but takes the fid instead.
470 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
471
Andrei Popescu402d9372010-02-26 13:31:12 +0000472 // Convenience function: call an external reference.
473 void CallExternalReference(const ExternalReference& ext,
474 int num_arguments);
475
Steve Blocka7e24c12009-10-30 11:49:00 +0000476 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100477 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000478 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100479 void TailCallExternalReference(const ExternalReference& ext,
480 int num_arguments,
481 int result_size);
482
483 // Convenience function: tail call a runtime routine (jump).
484 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000485 int num_arguments,
486 int result_size);
487
Steve Block6ded16b2010-05-10 14:33:55 +0100488 // Before calling a C-function from generated code, align arguments on stack.
489 // After aligning the frame, non-register arguments must be stored in
490 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
491 // are word sized.
492 // Some compilers/platforms require the stack to be aligned when calling
493 // C++ code.
494 // Needs a scratch register to do some arithmetic. This register will be
495 // trashed.
496 void PrepareCallCFunction(int num_arguments, Register scratch);
497
498 // Calls a C function and cleans up the space for arguments allocated
499 // by PrepareCallCFunction. The called function is not allowed to trigger a
500 // garbage collection, since that might move the code and invalidate the
501 // return address (unless this is somehow accounted for by the called
502 // function).
503 void CallCFunction(ExternalReference function, int num_arguments);
504 void CallCFunction(Register function, int num_arguments);
505
Steve Blocka7e24c12009-10-30 11:49:00 +0000506 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100507 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000508
509 // Invoke specified builtin JavaScript function. Adds an entry to
510 // the unresolved list if the name does not resolve.
511 void InvokeBuiltin(Builtins::JavaScript id, InvokeJSFlags flags);
512
513 // Store the code object for the given builtin in the target register and
514 // setup the function in r1.
515 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
516
Steve Blocka7e24c12009-10-30 11:49:00 +0000517 Handle<Object> CodeObject() { return code_object_; }
518
519
520 // ---------------------------------------------------------------------------
521 // StatsCounter support
522
523 void SetCounter(StatsCounter* counter, int value,
524 Register scratch1, Register scratch2);
525 void IncrementCounter(StatsCounter* counter, int value,
526 Register scratch1, Register scratch2);
527 void DecrementCounter(StatsCounter* counter, int value,
528 Register scratch1, Register scratch2);
529
530
531 // ---------------------------------------------------------------------------
532 // Debugging
533
534 // Calls Abort(msg) if the condition cc is not satisfied.
535 // Use --debug_code to enable.
536 void Assert(Condition cc, const char* msg);
537
538 // Like Assert(), but always enabled.
539 void Check(Condition cc, const char* msg);
540
541 // Print a message to stdout and abort execution.
542 void Abort(const char* msg);
543
544 // Verify restrictions about code generated in stubs.
545 void set_generating_stub(bool value) { generating_stub_ = value; }
546 bool generating_stub() { return generating_stub_; }
547 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
548 bool allow_stub_calls() { return allow_stub_calls_; }
549
Leon Clarked91b9f72010-01-27 17:25:45 +0000550 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000551 // Smi utilities
552
553 // Jump if either of the registers contain a non-smi.
554 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
555 // Jump if either of the registers contain a smi.
556 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
557
558 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000559 // String utilities
560
561 // Checks if both objects are sequential ASCII strings and jumps to label
562 // if either is not. Assumes that neither object is a smi.
563 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
564 Register object2,
565 Register scratch1,
566 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100567 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000568
569 // Checks if both objects are sequential ASCII strings and jumps to label
570 // if either is not.
571 void JumpIfNotBothSequentialAsciiStrings(Register first,
572 Register second,
573 Register scratch1,
574 Register scratch2,
575 Label* not_flat_ascii_strings);
576
Steve Block6ded16b2010-05-10 14:33:55 +0100577 // Checks if both instance types are sequential ASCII strings and jumps to
578 // label if either is not.
579 void JumpIfBothInstanceTypesAreNotSequentialAscii(
580 Register first_object_instance_type,
581 Register second_object_instance_type,
582 Register scratch1,
583 Register scratch2,
584 Label* failure);
585
586 // Check if instance type is sequential ASCII string and jump to label if
587 // it is not.
588 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
589 Register scratch,
590 Label* failure);
591
592
Steve Blocka7e24c12009-10-30 11:49:00 +0000593 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000594 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
595 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000596
597 // Helper functions for generating invokes.
598 void InvokePrologue(const ParameterCount& expected,
599 const ParameterCount& actual,
600 Handle<Code> code_constant,
601 Register code_reg,
602 Label* done,
603 InvokeFlag flag);
604
Steve Blocka7e24c12009-10-30 11:49:00 +0000605 // Activation support.
606 void EnterFrame(StackFrame::Type type);
607 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000608
Steve Block6ded16b2010-05-10 14:33:55 +0100609 void InitializeNewString(Register string,
610 Register length,
611 Heap::RootListIndex map_index,
612 Register scratch1,
613 Register scratch2);
614
Andrei Popescu31002712010-02-23 13:46:05 +0000615 bool generating_stub_;
616 bool allow_stub_calls_;
617 // This handle will be patched with the code object on installation.
618 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000619};
620
621
622#ifdef ENABLE_DEBUGGER_SUPPORT
623// The code patcher is used to patch (typically) small parts of code e.g. for
624// debugging and other types of instrumentation. When using the code patcher
625// the exact number of bytes specified must be emitted. It is not legal to emit
626// relocation information. If any of these constraints are violated it causes
627// an assertion to fail.
628class CodePatcher {
629 public:
630 CodePatcher(byte* address, int instructions);
631 virtual ~CodePatcher();
632
633 // Macro assembler to emit code.
634 MacroAssembler* masm() { return &masm_; }
635
636 // Emit an instruction directly.
637 void Emit(Instr x);
638
639 // Emit an address directly.
640 void Emit(Address addr);
641
642 private:
643 byte* address_; // The address of the code being patched.
644 int instructions_; // Number of instructions of the expected patch size.
645 int size_; // Number of bytes of the expected patch size.
646 MacroAssembler masm_; // Macro assembler used to generate the code.
647};
648#endif // ENABLE_DEBUGGER_SUPPORT
649
650
651// -----------------------------------------------------------------------------
652// Static helper functions.
653
Steve Blocka7e24c12009-10-30 11:49:00 +0000654#ifdef GENERATED_CODE_COVERAGE
655#define CODE_COVERAGE_STRINGIFY(x) #x
656#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
657#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
658#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
659#else
660#define ACCESS_MASM(masm) masm->
661#endif
662
663
664} } // namespace v8::internal
665
666#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_