blob: 9cf93da3418b81f92d34f08981c99cbbc8330260 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Andrei Popescu31002712010-02-23 13:46:05 +000036// ----------------------------------------------------------------------------
37// Static helper functions
38
39// Generate a MemOperand for loading a field from an object.
40static inline MemOperand FieldMemOperand(Register object, int offset) {
41 return MemOperand(object, offset - kHeapObjectTag);
42}
43
Steve Blocka7e24c12009-10-30 11:49:00 +000044
45// Give alias names to registers
46const Register cp = { 8 }; // JavaScript context pointer
Andrei Popescu31002712010-02-23 13:46:05 +000047const Register roots = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49enum InvokeJSFlags {
50 CALL_JS,
51 JUMP_JS
52};
53
54
Kristian Monsen25f61362010-05-21 11:50:48 +010055// Flags used for the AllocateInNewSpace functions.
56enum AllocationFlags {
57 // No special flags.
58 NO_ALLOCATION_FLAGS = 0,
59 // Return the pointer to the allocated already tagged as a heap object.
60 TAG_OBJECT = 1 << 0,
61 // The content of the result register already contains the allocation top in
62 // new space.
63 RESULT_CONTAINS_TOP = 1 << 1,
64 // Specify that the requested size of the space to allocate is specified in
65 // words instead of bytes.
66 SIZE_IN_WORDS = 1 << 2
67};
68
69
Steve Blocka7e24c12009-10-30 11:49:00 +000070// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
Andrei Popescu31002712010-02-23 13:46:05 +000075 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000076 void Jump(Register target, Condition cond = al);
77 void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
78 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
79 void Call(Register target, Condition cond = al);
80 void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
81 void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
82 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +000083
84 // Emit code to discard a non-negative number of pointer-sized elements
85 // from the stack, clobbering only the sp register.
86 void Drop(int count, Condition cond = al);
87
Steve Block6ded16b2010-05-10 14:33:55 +010088
89 // Swap two registers. If the scratch register is omitted then a slightly
90 // less efficient form using xor instead of mov is emitted.
91 void Swap(Register reg1, Register reg2, Register scratch = no_reg);
92
Leon Clarkee46be812010-01-19 14:06:41 +000093 void Call(Label* target);
94 void Move(Register dst, Handle<Object> value);
Steve Block6ded16b2010-05-10 14:33:55 +010095 // May do nothing if the registers are identical.
96 void Move(Register dst, Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 // Jumps to the label at the index given by the Smi in "index".
98 void SmiJumpTable(Register index, Vector<Label*> targets);
99 // Load an object from the root table.
100 void LoadRoot(Register destination,
101 Heap::RootListIndex index,
102 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100103 // Store an object to the root table.
104 void StoreRoot(Register source,
105 Heap::RootListIndex index,
106 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000107
Steve Block6ded16b2010-05-10 14:33:55 +0100108
109 // Check if object is in new space.
110 // scratch can be object itself, but it will be clobbered.
111 void InNewSpace(Register object,
112 Register scratch,
113 Condition cc, // eq for new space, ne otherwise
114 Label* branch);
115
116
117 // Set the remebered set bit for an offset into an
118 // object. RecordWriteHelper only works if the object is not in new
119 // space.
120 void RecordWriteHelper(Register object, Register offset, Register scracth);
121
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 // Sets the remembered set bit for [address+offset], where address is the
123 // address of the heap object 'object'. The address must be in the first 8K
124 // of an allocated page. The 'scratch' register is used in the
125 // implementation and all 3 registers are clobbered by the operation, as
126 // well as the ip register.
127 void RecordWrite(Register object, Register offset, Register scratch);
128
Steve Block6ded16b2010-05-10 14:33:55 +0100129 // Push two registers. Pushes leftmost register first (to highest address).
130 void Push(Register src1, Register src2, Condition cond = al) {
131 ASSERT(!src1.is(src2));
132 if (src1.code() > src2.code()) {
133 stm(db_w, sp, src1.bit() | src2.bit(), cond);
134 } else {
135 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
136 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
137 }
138 }
139
140 // Push three registers. Pushes leftmost register first (to highest address).
141 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
142 ASSERT(!src1.is(src2));
143 ASSERT(!src2.is(src3));
144 ASSERT(!src1.is(src3));
145 if (src1.code() > src2.code()) {
146 if (src2.code() > src3.code()) {
147 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
148 } else {
149 stm(db_w, sp, src1.bit() | src2.bit(), cond);
150 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
151 }
152 } else {
153 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
154 Push(src2, src3, cond);
155 }
156 }
157
158 // Push four registers. Pushes leftmost register first (to highest address).
159 void Push(Register src1, Register src2,
160 Register src3, Register src4, Condition cond = al) {
161 ASSERT(!src1.is(src2));
162 ASSERT(!src2.is(src3));
163 ASSERT(!src1.is(src3));
164 ASSERT(!src1.is(src4));
165 ASSERT(!src2.is(src4));
166 ASSERT(!src3.is(src4));
167 if (src1.code() > src2.code()) {
168 if (src2.code() > src3.code()) {
169 if (src3.code() > src4.code()) {
170 stm(db_w,
171 sp,
172 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
173 cond);
174 } else {
175 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
176 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
177 }
178 } else {
179 stm(db_w, sp, src1.bit() | src2.bit(), cond);
180 Push(src3, src4, cond);
181 }
182 } else {
183 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
184 Push(src2, src3, src4, cond);
185 }
186 }
187
Steve Blocka7e24c12009-10-30 11:49:00 +0000188 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000189 // Stack limit support
190
191 void StackLimitCheck(Label* on_stack_limit_hit);
192
193 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 // Activation frames
195
196 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
197 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
198
199 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
200 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
201
Steve Blockd0582a62009-12-15 09:54:21 +0000202 // Enter specific kind of exit frame; either normal or debug mode.
203 // Expects the number of arguments in register r0 and
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 // the builtin function to call in register r1. Exits with argc in
205 // r4, argv in r6, and and the builtin function to call in r5.
Steve Blockd0582a62009-12-15 09:54:21 +0000206 void EnterExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000207
208 // Leave the current exit frame. Expects the return value in r0.
Steve Blockd0582a62009-12-15 09:54:21 +0000209 void LeaveExitFrame(ExitFrame::Mode mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000210
Steve Block6ded16b2010-05-10 14:33:55 +0100211 // Get the actual activation frame alignment for target environment.
212 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000213
Steve Blockd0582a62009-12-15 09:54:21 +0000214 void LoadContext(Register dst, int context_chain_length);
215
Steve Blocka7e24c12009-10-30 11:49:00 +0000216 // ---------------------------------------------------------------------------
217 // JavaScript invokes
218
219 // Invoke the JavaScript function code by either calling or jumping.
220 void InvokeCode(Register code,
221 const ParameterCount& expected,
222 const ParameterCount& actual,
223 InvokeFlag flag);
224
225 void InvokeCode(Handle<Code> code,
226 const ParameterCount& expected,
227 const ParameterCount& actual,
228 RelocInfo::Mode rmode,
229 InvokeFlag flag);
230
231 // Invoke the JavaScript function in the given register. Changes the
232 // current context to the context in the function before invoking.
233 void InvokeFunction(Register function,
234 const ParameterCount& actual,
235 InvokeFlag flag);
236
Andrei Popescu402d9372010-02-26 13:31:12 +0000237 void InvokeFunction(JSFunction* function,
238 const ParameterCount& actual,
239 InvokeFlag flag);
240
Steve Blocka7e24c12009-10-30 11:49:00 +0000241
242#ifdef ENABLE_DEBUGGER_SUPPORT
243 // ---------------------------------------------------------------------------
244 // Debugger Support
245
246 void SaveRegistersToMemory(RegList regs);
247 void RestoreRegistersFromMemory(RegList regs);
248 void CopyRegistersFromMemoryToStack(Register base, RegList regs);
249 void CopyRegistersFromStackToMemory(Register base,
250 Register scratch,
251 RegList regs);
Andrei Popescu402d9372010-02-26 13:31:12 +0000252 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000253#endif
254
255 // ---------------------------------------------------------------------------
256 // Exception handling
257
258 // Push a new try handler and link into try handler chain.
259 // The return address must be passed in register lr.
260 // On exit, r0 contains TOS (code slot).
261 void PushTryHandler(CodeLocation try_location, HandlerType type);
262
Leon Clarkee46be812010-01-19 14:06:41 +0000263 // Unlink the stack handler on top of the stack from the try handler chain.
264 // Must preserve the result register.
265 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000266
267 // ---------------------------------------------------------------------------
268 // Inline caching support
269
270 // Generates code that verifies that the maps of objects in the
271 // prototype chain of object hasn't changed since the code was
272 // generated and branches to the miss label if any map has. If
273 // necessary the function also generates code for security check
274 // in case of global object holders. The scratch and holder
275 // registers are always clobbered, but the object register is only
276 // clobbered if it the same as the holder register. The function
277 // returns a register containing the holder - either object_reg or
278 // holder_reg.
Steve Block6ded16b2010-05-10 14:33:55 +0100279 // The function can optionally (when save_at_depth !=
280 // kInvalidProtoDepth) save the object at the given depth by moving
281 // it to [sp].
Steve Blocka7e24c12009-10-30 11:49:00 +0000282 Register CheckMaps(JSObject* object, Register object_reg,
283 JSObject* holder, Register holder_reg,
Steve Block6ded16b2010-05-10 14:33:55 +0100284 Register scratch,
285 int save_at_depth,
286 Label* miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000287
288 // Generate code for checking access rights - used for security checks
289 // on access to global objects across environments. The holder register
290 // is left untouched, whereas both scratch registers are clobbered.
291 void CheckAccessGlobalProxy(Register holder_reg,
292 Register scratch,
293 Label* miss);
294
295
296 // ---------------------------------------------------------------------------
297 // Allocation support
298
299 // Allocate an object in new space. The object_size is specified in words (not
300 // bytes). If the new space is exhausted control continues at the gc_required
301 // label. The allocated object is returned in result. If the flag
Kristian Monsen25f61362010-05-21 11:50:48 +0100302 // tag_allocated_object is true the result is tagged as as a heap object. All
303 // registers are clobbered also when control continues at the gc_required
304 // label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000305 void AllocateInNewSpace(int object_size,
306 Register result,
307 Register scratch1,
308 Register scratch2,
309 Label* gc_required,
310 AllocationFlags flags);
311 void AllocateInNewSpace(Register object_size,
312 Register result,
313 Register scratch1,
314 Register scratch2,
315 Label* gc_required,
316 AllocationFlags flags);
317
318 // Undo allocation in new space. The object passed and objects allocated after
319 // it will no longer be allocated. The caller must make sure that no pointers
320 // are left to the object(s) no longer allocated as they would be invalid when
321 // allocation is undone.
322 void UndoAllocationInNewSpace(Register object, Register scratch);
323
Andrei Popescu31002712010-02-23 13:46:05 +0000324
325 void AllocateTwoByteString(Register result,
326 Register length,
327 Register scratch1,
328 Register scratch2,
329 Register scratch3,
330 Label* gc_required);
331 void AllocateAsciiString(Register result,
332 Register length,
333 Register scratch1,
334 Register scratch2,
335 Register scratch3,
336 Label* gc_required);
337 void AllocateTwoByteConsString(Register result,
338 Register length,
339 Register scratch1,
340 Register scratch2,
341 Label* gc_required);
342 void AllocateAsciiConsString(Register result,
343 Register length,
344 Register scratch1,
345 Register scratch2,
346 Label* gc_required);
347
Kristian Monsen25f61362010-05-21 11:50:48 +0100348 // Allocates a heap number or jumps to the gc_required label if the young
349 // space is full and a scavenge is needed. All registers are clobbered also
350 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100351 void AllocateHeapNumber(Register result,
352 Register scratch1,
353 Register scratch2,
354 Label* gc_required);
Andrei Popescu31002712010-02-23 13:46:05 +0000355
Steve Blocka7e24c12009-10-30 11:49:00 +0000356 // ---------------------------------------------------------------------------
357 // Support functions.
358
359 // Try to get function prototype of a function and puts the value in
360 // the result register. Checks that the function really is a
361 // function and jumps to the miss label if the fast checks fail. The
362 // function register will be untouched; the other registers may be
363 // clobbered.
364 void TryGetFunctionPrototype(Register function,
365 Register result,
366 Register scratch,
367 Label* miss);
368
369 // Compare object type for heap object. heap_object contains a non-Smi
370 // whose object type should be compared with the given type. This both
371 // sets the flags and leaves the object type in the type_reg register.
372 // It leaves the map in the map register (unless the type_reg and map register
373 // are the same register). It leaves the heap object in the heap_object
374 // register unless the heap_object register is the same register as one of the
375 // other registers.
376 void CompareObjectType(Register heap_object,
377 Register map,
378 Register type_reg,
379 InstanceType type);
380
381 // Compare instance type in a map. map contains a valid map object whose
382 // object type should be compared with the given type. This both
383 // sets the flags and leaves the object type in the type_reg register. It
384 // leaves the heap object in the heap_object register unless the heap_object
385 // register is the same register as type_reg.
386 void CompareInstanceType(Register map,
387 Register type_reg,
388 InstanceType type);
389
Andrei Popescu31002712010-02-23 13:46:05 +0000390
391 // Check if the map of an object is equal to a specified map and
392 // branch to label if not. Skip the smi check if not required
393 // (object is known to be a heap object)
394 void CheckMap(Register obj,
395 Register scratch,
396 Handle<Map> map,
397 Label* fail,
398 bool is_heap_object);
399
400 // Load and check the instance type of an object for being a string.
401 // Loads the type into the second argument register.
402 // Returns a condition that will be enabled if the object was a string.
403 Condition IsObjectStringType(Register obj,
404 Register type) {
405 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
406 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
407 tst(type, Operand(kIsNotStringMask));
408 ASSERT_EQ(0, kStringTag);
409 return eq;
410 }
411
412
Steve Blocka7e24c12009-10-30 11:49:00 +0000413 inline void BranchOnSmi(Register value, Label* smi_label) {
414 tst(value, Operand(kSmiTagMask));
415 b(eq, smi_label);
416 }
417
418 inline void BranchOnNotSmi(Register value, Label* not_smi_label) {
419 tst(value, Operand(kSmiTagMask));
420 b(ne, not_smi_label);
421 }
422
423 // Generates code for reporting that an illegal operation has
424 // occurred.
425 void IllegalOperation(int num_arguments);
426
Andrei Popescu31002712010-02-23 13:46:05 +0000427 // Get the number of least significant bits from a register
428 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
429
Steve Blockd0582a62009-12-15 09:54:21 +0000430 // Uses VFP instructions to Convert a Smi to a double.
431 void IntegerToDoubleConversionWithVFP3(Register inReg,
432 Register outHighReg,
433 Register outLowReg);
434
Steve Block6ded16b2010-05-10 14:33:55 +0100435 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
436 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
437 // for 0 (31 instead of 32).
438 void CountLeadingZeros(Register source,
439 Register scratch,
440 Register zeros);
Steve Blocka7e24c12009-10-30 11:49:00 +0000441
442 // ---------------------------------------------------------------------------
443 // Runtime calls
444
445 // Call a code stub.
446 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000447
Andrei Popescu31002712010-02-23 13:46:05 +0000448 // Call a code stub.
449 void TailCallStub(CodeStub* stub, Condition cond = al);
450
Steve Blocka7e24c12009-10-30 11:49:00 +0000451 // Return from a code stub after popping its arguments.
452 void StubReturn(int argc);
453
454 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000455 void CallRuntime(Runtime::Function* f, int num_arguments);
456
457 // Convenience function: Same as above, but takes the fid instead.
458 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
459
Andrei Popescu402d9372010-02-26 13:31:12 +0000460 // Convenience function: call an external reference.
461 void CallExternalReference(const ExternalReference& ext,
462 int num_arguments);
463
Steve Blocka7e24c12009-10-30 11:49:00 +0000464 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100465 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000466 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100467 void TailCallExternalReference(const ExternalReference& ext,
468 int num_arguments,
469 int result_size);
470
471 // Convenience function: tail call a runtime routine (jump).
472 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000473 int num_arguments,
474 int result_size);
475
Steve Block6ded16b2010-05-10 14:33:55 +0100476 // Before calling a C-function from generated code, align arguments on stack.
477 // After aligning the frame, non-register arguments must be stored in
478 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
479 // are word sized.
480 // Some compilers/platforms require the stack to be aligned when calling
481 // C++ code.
482 // Needs a scratch register to do some arithmetic. This register will be
483 // trashed.
484 void PrepareCallCFunction(int num_arguments, Register scratch);
485
486 // Calls a C function and cleans up the space for arguments allocated
487 // by PrepareCallCFunction. The called function is not allowed to trigger a
488 // garbage collection, since that might move the code and invalidate the
489 // return address (unless this is somehow accounted for by the called
490 // function).
491 void CallCFunction(ExternalReference function, int num_arguments);
492 void CallCFunction(Register function, int num_arguments);
493
Steve Blocka7e24c12009-10-30 11:49:00 +0000494 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100495 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000496
497 // Invoke specified builtin JavaScript function. Adds an entry to
498 // the unresolved list if the name does not resolve.
499 void InvokeBuiltin(Builtins::JavaScript id, InvokeJSFlags flags);
500
501 // Store the code object for the given builtin in the target register and
502 // setup the function in r1.
503 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
504
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 Handle<Object> CodeObject() { return code_object_; }
506
507
508 // ---------------------------------------------------------------------------
509 // StatsCounter support
510
511 void SetCounter(StatsCounter* counter, int value,
512 Register scratch1, Register scratch2);
513 void IncrementCounter(StatsCounter* counter, int value,
514 Register scratch1, Register scratch2);
515 void DecrementCounter(StatsCounter* counter, int value,
516 Register scratch1, Register scratch2);
517
518
519 // ---------------------------------------------------------------------------
520 // Debugging
521
522 // Calls Abort(msg) if the condition cc is not satisfied.
523 // Use --debug_code to enable.
524 void Assert(Condition cc, const char* msg);
525
526 // Like Assert(), but always enabled.
527 void Check(Condition cc, const char* msg);
528
529 // Print a message to stdout and abort execution.
530 void Abort(const char* msg);
531
532 // Verify restrictions about code generated in stubs.
533 void set_generating_stub(bool value) { generating_stub_ = value; }
534 bool generating_stub() { return generating_stub_; }
535 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
536 bool allow_stub_calls() { return allow_stub_calls_; }
537
Leon Clarked91b9f72010-01-27 17:25:45 +0000538 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000539 // Smi utilities
540
541 // Jump if either of the registers contain a non-smi.
542 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
543 // Jump if either of the registers contain a smi.
544 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
545
546 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +0000547 // String utilities
548
549 // Checks if both objects are sequential ASCII strings and jumps to label
550 // if either is not. Assumes that neither object is a smi.
551 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
552 Register object2,
553 Register scratch1,
554 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100555 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +0000556
557 // Checks if both objects are sequential ASCII strings and jumps to label
558 // if either is not.
559 void JumpIfNotBothSequentialAsciiStrings(Register first,
560 Register second,
561 Register scratch1,
562 Register scratch2,
563 Label* not_flat_ascii_strings);
564
Steve Block6ded16b2010-05-10 14:33:55 +0100565 // Checks if both instance types are sequential ASCII strings and jumps to
566 // label if either is not.
567 void JumpIfBothInstanceTypesAreNotSequentialAscii(
568 Register first_object_instance_type,
569 Register second_object_instance_type,
570 Register scratch1,
571 Register scratch2,
572 Label* failure);
573
574 // Check if instance type is sequential ASCII string and jump to label if
575 // it is not.
576 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
577 Register scratch,
578 Label* failure);
579
580
Steve Blocka7e24c12009-10-30 11:49:00 +0000581 private:
Andrei Popescu31002712010-02-23 13:46:05 +0000582 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
583 void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000584
585 // Helper functions for generating invokes.
586 void InvokePrologue(const ParameterCount& expected,
587 const ParameterCount& actual,
588 Handle<Code> code_constant,
589 Register code_reg,
590 Label* done,
591 InvokeFlag flag);
592
Steve Blocka7e24c12009-10-30 11:49:00 +0000593 // Activation support.
594 void EnterFrame(StackFrame::Type type);
595 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +0000596
Steve Block6ded16b2010-05-10 14:33:55 +0100597 void InitializeNewString(Register string,
598 Register length,
599 Heap::RootListIndex map_index,
600 Register scratch1,
601 Register scratch2);
602
Andrei Popescu31002712010-02-23 13:46:05 +0000603 bool generating_stub_;
604 bool allow_stub_calls_;
605 // This handle will be patched with the code object on installation.
606 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000607};
608
609
610#ifdef ENABLE_DEBUGGER_SUPPORT
611// The code patcher is used to patch (typically) small parts of code e.g. for
612// debugging and other types of instrumentation. When using the code patcher
613// the exact number of bytes specified must be emitted. It is not legal to emit
614// relocation information. If any of these constraints are violated it causes
615// an assertion to fail.
616class CodePatcher {
617 public:
618 CodePatcher(byte* address, int instructions);
619 virtual ~CodePatcher();
620
621 // Macro assembler to emit code.
622 MacroAssembler* masm() { return &masm_; }
623
624 // Emit an instruction directly.
625 void Emit(Instr x);
626
627 // Emit an address directly.
628 void Emit(Address addr);
629
630 private:
631 byte* address_; // The address of the code being patched.
632 int instructions_; // Number of instructions of the expected patch size.
633 int size_; // Number of bytes of the expected patch size.
634 MacroAssembler masm_; // Macro assembler used to generate the code.
635};
636#endif // ENABLE_DEBUGGER_SUPPORT
637
638
639// -----------------------------------------------------------------------------
640// Static helper functions.
641
Steve Blocka7e24c12009-10-30 11:49:00 +0000642#ifdef GENERATED_CODE_COVERAGE
643#define CODE_COVERAGE_STRINGIFY(x) #x
644#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
645#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
646#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
647#else
648#define ACCESS_MASM(masm) masm->
649#endif
650
651
652} } // namespace v8::internal
653
654#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_