blob: 4f695fff208b19054d44fd90609a99f8277a8d88 [file] [log] [blame]
Ben Murdoch257744e2011-11-30 15:57:28 +00001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000032#include "v8globals.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033
34namespace v8 {
35namespace internal {
36
Andrei Popescu31002712010-02-23 13:46:05 +000037// ----------------------------------------------------------------------------
38// Static helper functions
39
40// Generate a MemOperand for loading a field from an object.
41static inline MemOperand FieldMemOperand(Register object, int offset) {
42 return MemOperand(object, offset - kHeapObjectTag);
43}
44
Steve Blocka7e24c12009-10-30 11:49:00 +000045
Steve Block1e0659c2011-05-24 12:43:12 +010046static inline Operand SmiUntagOperand(Register object) {
47 return Operand(object, ASR, kSmiTagSize);
48}
49
50
51
Steve Blocka7e24c12009-10-30 11:49:00 +000052// Give alias names to registers
53const Register cp = { 8 }; // JavaScript context pointer
Ben Murdoch692be652012-01-10 18:47:50 +000054const Register kRootRegister = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000055
Kristian Monsen25f61362010-05-21 11:50:48 +010056// Flags used for the AllocateInNewSpace functions.
57enum AllocationFlags {
58 // No special flags.
59 NO_ALLOCATION_FLAGS = 0,
60 // Return the pointer to the allocated already tagged as a heap object.
61 TAG_OBJECT = 1 << 0,
62 // The content of the result register already contains the allocation top in
63 // new space.
64 RESULT_CONTAINS_TOP = 1 << 1,
65 // Specify that the requested size of the space to allocate is specified in
66 // words instead of bytes.
67 SIZE_IN_WORDS = 1 << 2
68};
69
70
Steve Block8defd9f2010-07-08 12:39:36 +010071// Flags used for the ObjectToDoubleVFPRegister function.
72enum ObjectToDoubleFlags {
73 // No special flags.
74 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
75 // Object is known to be a non smi.
76 OBJECT_NOT_SMI = 1 << 0,
77 // Don't load NaNs or infinities, branch to the non number case instead.
78 AVOID_NANS_AND_INFINITIES = 1 << 1
79};
80
81
Steve Blocka7e24c12009-10-30 11:49:00 +000082// MacroAssembler implements a collection of frequently used macros.
83class MacroAssembler: public Assembler {
84 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010085 // The isolate parameter can be NULL if the macro assembler should
86 // not use isolate-dependent functionality. In this case, it's the
87 // responsibility of the caller to never invoke such function on the
88 // macro assembler.
89 MacroAssembler(Isolate* isolate, void* buffer, int size);
Steve Blocka7e24c12009-10-30 11:49:00 +000090
Andrei Popescu31002712010-02-23 13:46:05 +000091 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +000092 void Jump(Register target, Condition cond = al);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000093 void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +000094 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
Ben Murdoch42effa52011-08-19 16:40:31 +010095 static int CallSize(Register target, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +000096 void Call(Register target, Condition cond = al);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000097 static int CallSize(Address target,
Ben Murdoch42effa52011-08-19 16:40:31 +010098 RelocInfo::Mode rmode,
99 Condition cond = al);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000100 void Call(Address target, RelocInfo::Mode rmode, Condition cond = al);
101 static int CallSize(Handle<Code> code,
102 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
103 unsigned ast_id = kNoASTId,
104 Condition cond = al);
Ben Murdoch257744e2011-11-30 15:57:28 +0000105 void Call(Handle<Code> code,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000106 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
107 unsigned ast_id = kNoASTId,
Ben Murdoch257744e2011-11-30 15:57:28 +0000108 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +0000110
111 // Emit code to discard a non-negative number of pointer-sized elements
112 // from the stack, clobbering only the sp register.
113 void Drop(int count, Condition cond = al);
114
Ben Murdochb0fe1622011-05-05 13:52:32 +0100115 void Ret(int drop, Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100116
117 // Swap two registers. If the scratch register is omitted then a slightly
118 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100119 void Swap(Register reg1,
120 Register reg2,
121 Register scratch = no_reg,
122 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100123
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100124
125 void And(Register dst, Register src1, const Operand& src2,
126 Condition cond = al);
127 void Ubfx(Register dst, Register src, int lsb, int width,
128 Condition cond = al);
129 void Sbfx(Register dst, Register src, int lsb, int width,
130 Condition cond = al);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100131 // The scratch register is not used for ARMv7.
132 // scratch can be the same register as src (in which case it is trashed), but
133 // not the same as dst.
134 void Bfi(Register dst,
135 Register src,
136 Register scratch,
137 int lsb,
138 int width,
139 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100140 void Bfc(Register dst, int lsb, int width, Condition cond = al);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100141 void Usat(Register dst, int satpos, const Operand& src,
142 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100143
Leon Clarkee46be812010-01-19 14:06:41 +0000144 void Call(Label* target);
Ben Murdoch257744e2011-11-30 15:57:28 +0000145
146 // Register move. May do nothing if the registers are identical.
Leon Clarkee46be812010-01-19 14:06:41 +0000147 void Move(Register dst, Handle<Object> value);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000148 void Move(Register dst, Register src, Condition cond = al);
Ben Murdoch257744e2011-11-30 15:57:28 +0000149 void Move(DoubleRegister dst, DoubleRegister src);
150
Steve Blocka7e24c12009-10-30 11:49:00 +0000151 // Load an object from the root table.
152 void LoadRoot(Register destination,
153 Heap::RootListIndex index,
154 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100155 // Store an object to the root table.
156 void StoreRoot(Register source,
157 Heap::RootListIndex index,
158 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000159
Steve Block6ded16b2010-05-10 14:33:55 +0100160
161 // Check if object is in new space.
162 // scratch can be object itself, but it will be clobbered.
163 void InNewSpace(Register object,
164 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100165 Condition cond, // eq for new space, ne otherwise
Steve Block6ded16b2010-05-10 14:33:55 +0100166 Label* branch);
167
168
Steve Block8defd9f2010-07-08 12:39:36 +0100169 // For the page containing |object| mark the region covering [address]
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100170 // dirty. The object address must be in the first 8K of an allocated page.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100171 void RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100172 Register address,
173 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100174
Steve Block8defd9f2010-07-08 12:39:36 +0100175 // For the page containing |object| mark the region covering
176 // [object+offset] dirty. The object address must be in the first 8K
177 // of an allocated page. The 'scratch' registers are used in the
178 // implementation and all 3 registers are clobbered by the
179 // operation, as well as the ip register. RecordWrite updates the
180 // write barrier even when storing smis.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100181 void RecordWrite(Register object,
182 Operand offset,
183 Register scratch0,
184 Register scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000185
Steve Block8defd9f2010-07-08 12:39:36 +0100186 // For the page containing |object| mark the region covering
187 // [address] dirty. The object address must be in the first 8K of an
188 // allocated page. All 3 registers are clobbered by the operation,
189 // as well as the ip register. RecordWrite updates the write barrier
190 // even when storing smis.
191 void RecordWrite(Register object,
192 Register address,
193 Register scratch);
194
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000195 // Push a handle.
196 void Push(Handle<Object> handle);
197
Steve Block6ded16b2010-05-10 14:33:55 +0100198 // Push two registers. Pushes leftmost register first (to highest address).
199 void Push(Register src1, Register src2, Condition cond = al) {
200 ASSERT(!src1.is(src2));
201 if (src1.code() > src2.code()) {
202 stm(db_w, sp, src1.bit() | src2.bit(), cond);
203 } else {
204 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
205 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
206 }
207 }
208
209 // Push three registers. Pushes leftmost register first (to highest address).
210 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
211 ASSERT(!src1.is(src2));
212 ASSERT(!src2.is(src3));
213 ASSERT(!src1.is(src3));
214 if (src1.code() > src2.code()) {
215 if (src2.code() > src3.code()) {
216 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
217 } else {
218 stm(db_w, sp, src1.bit() | src2.bit(), cond);
219 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
220 }
221 } else {
222 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
223 Push(src2, src3, cond);
224 }
225 }
226
227 // Push four registers. Pushes leftmost register first (to highest address).
228 void Push(Register src1, Register src2,
229 Register src3, Register src4, Condition cond = al) {
230 ASSERT(!src1.is(src2));
231 ASSERT(!src2.is(src3));
232 ASSERT(!src1.is(src3));
233 ASSERT(!src1.is(src4));
234 ASSERT(!src2.is(src4));
235 ASSERT(!src3.is(src4));
236 if (src1.code() > src2.code()) {
237 if (src2.code() > src3.code()) {
238 if (src3.code() > src4.code()) {
239 stm(db_w,
240 sp,
241 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
242 cond);
243 } else {
244 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
245 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
246 }
247 } else {
248 stm(db_w, sp, src1.bit() | src2.bit(), cond);
249 Push(src3, src4, cond);
250 }
251 } else {
252 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
253 Push(src2, src3, src4, cond);
254 }
255 }
256
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100257 // Pop two registers. Pops rightmost register first (from lower address).
258 void Pop(Register src1, Register src2, Condition cond = al) {
259 ASSERT(!src1.is(src2));
260 if (src1.code() > src2.code()) {
261 ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
262 } else {
263 ldr(src2, MemOperand(sp, 4, PostIndex), cond);
264 ldr(src1, MemOperand(sp, 4, PostIndex), cond);
265 }
266 }
267
Ben Murdochb0fe1622011-05-05 13:52:32 +0100268 // Push and pop the registers that can hold pointers, as defined by the
269 // RegList constant kSafepointSavedRegisters.
270 void PushSafepointRegisters();
271 void PopSafepointRegisters();
Ben Murdochb8e0da22011-05-16 14:20:40 +0100272 void PushSafepointRegistersAndDoubles();
273 void PopSafepointRegistersAndDoubles();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100274 // Store value in register src in the safepoint stack slot for
275 // register dst.
276 void StoreToSafepointRegisterSlot(Register src, Register dst);
277 void StoreToSafepointRegistersAndDoublesSlot(Register src, Register dst);
278 // Load the value of the src register from its safepoint stack slot
279 // into register dst.
280 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100281
Leon Clarkef7060e22010-06-03 12:02:55 +0100282 // Load two consecutive registers with two consecutive memory locations.
283 void Ldrd(Register dst1,
284 Register dst2,
285 const MemOperand& src,
286 Condition cond = al);
287
288 // Store two consecutive registers to two consecutive memory locations.
289 void Strd(Register src1,
290 Register src2,
291 const MemOperand& dst,
292 Condition cond = al);
293
Ben Murdochb8e0da22011-05-16 14:20:40 +0100294 // Clear specified FPSCR bits.
295 void ClearFPSCRBits(const uint32_t bits_to_clear,
296 const Register scratch,
297 const Condition cond = al);
298
299 // Compare double values and move the result to the normal condition flags.
300 void VFPCompareAndSetFlags(const DwVfpRegister src1,
301 const DwVfpRegister src2,
302 const Condition cond = al);
303 void VFPCompareAndSetFlags(const DwVfpRegister src1,
304 const double src2,
305 const Condition cond = al);
306
307 // Compare double values and then load the fpscr flags to a register.
308 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
309 const DwVfpRegister src2,
310 const Register fpscr_flags,
311 const Condition cond = al);
312 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
313 const double src2,
314 const Register fpscr_flags,
315 const Condition cond = al);
316
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000317 void Vmov(const DwVfpRegister dst,
318 const double imm,
319 const Condition cond = al);
320
Ben Murdoch086aeea2011-05-13 15:57:08 +0100321
Steve Blocka7e24c12009-10-30 11:49:00 +0000322 // ---------------------------------------------------------------------------
323 // Activation frames
324
325 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
326 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
327
328 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
329 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
330
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100331 // Enter exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +0100332 // stack_space - extra stack space, used for alignment before call to C.
333 void EnterExitFrame(bool save_doubles, int stack_space = 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000334
335 // Leave the current exit frame. Expects the return value in r0.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100336 // Expect the number of values, pushed prior to the exit frame, to
337 // remove in a register (or no_reg, if there is nothing to remove).
338 void LeaveExitFrame(bool save_doubles, Register argument_count);
Steve Blocka7e24c12009-10-30 11:49:00 +0000339
Steve Block6ded16b2010-05-10 14:33:55 +0100340 // Get the actual activation frame alignment for target environment.
341 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000342
Steve Blockd0582a62009-12-15 09:54:21 +0000343 void LoadContext(Register dst, int context_chain_length);
344
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800345 void LoadGlobalFunction(int index, Register function);
346
347 // Load the initial map from the global function. The registers
348 // function and map can be the same, function is then overwritten.
349 void LoadGlobalFunctionInitialMap(Register function,
350 Register map,
351 Register scratch);
352
Ben Murdoch692be652012-01-10 18:47:50 +0000353 void InitializeRootRegister() {
354 ExternalReference roots_address =
355 ExternalReference::roots_address(isolate());
356 mov(kRootRegister, Operand(roots_address));
357 }
358
Steve Blocka7e24c12009-10-30 11:49:00 +0000359 // ---------------------------------------------------------------------------
360 // JavaScript invokes
361
Ben Murdoch257744e2011-11-30 15:57:28 +0000362 // Setup call kind marking in ecx. The method takes ecx as an
363 // explicit first parameter to make the code more readable at the
364 // call sites.
365 void SetCallKind(Register dst, CallKind kind);
366
Steve Blocka7e24c12009-10-30 11:49:00 +0000367 // Invoke the JavaScript function code by either calling or jumping.
368 void InvokeCode(Register code,
369 const ParameterCount& expected,
370 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100371 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000372 const CallWrapper& call_wrapper,
373 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000374
375 void InvokeCode(Handle<Code> code,
376 const ParameterCount& expected,
377 const ParameterCount& actual,
378 RelocInfo::Mode rmode,
Ben Murdoch257744e2011-11-30 15:57:28 +0000379 InvokeFlag flag,
380 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000381
382 // Invoke the JavaScript function in the given register. Changes the
383 // current context to the context in the function before invoking.
384 void InvokeFunction(Register function,
385 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100386 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000387 const CallWrapper& call_wrapper,
388 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000389
Andrei Popescu402d9372010-02-26 13:31:12 +0000390 void InvokeFunction(JSFunction* function,
391 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +0000392 InvokeFlag flag,
393 CallKind call_kind);
Andrei Popescu402d9372010-02-26 13:31:12 +0000394
Ben Murdochb0fe1622011-05-05 13:52:32 +0100395 void IsObjectJSObjectType(Register heap_object,
396 Register map,
397 Register scratch,
398 Label* fail);
399
400 void IsInstanceJSObjectType(Register map,
401 Register scratch,
402 Label* fail);
403
404 void IsObjectJSStringType(Register object,
405 Register scratch,
406 Label* fail);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407
408#ifdef ENABLE_DEBUGGER_SUPPORT
409 // ---------------------------------------------------------------------------
410 // Debugger Support
411
Andrei Popescu402d9372010-02-26 13:31:12 +0000412 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000413#endif
414
415 // ---------------------------------------------------------------------------
416 // Exception handling
417
418 // Push a new try handler and link into try handler chain.
419 // The return address must be passed in register lr.
420 // On exit, r0 contains TOS (code slot).
421 void PushTryHandler(CodeLocation try_location, HandlerType type);
422
Leon Clarkee46be812010-01-19 14:06:41 +0000423 // Unlink the stack handler on top of the stack from the try handler chain.
424 // Must preserve the result register.
425 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000426
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100427 // Passes thrown value (in r0) to the handler of top of the try handler chain.
428 void Throw(Register value);
429
430 // Propagates an uncatchable exception to the top of the current JS stack's
431 // handler chain.
432 void ThrowUncatchable(UncatchableExceptionType type, Register value);
433
Steve Blocka7e24c12009-10-30 11:49:00 +0000434 // ---------------------------------------------------------------------------
435 // Inline caching support
436
Steve Blocka7e24c12009-10-30 11:49:00 +0000437 // Generate code for checking access rights - used for security checks
438 // on access to global objects across environments. The holder register
439 // is left untouched, whereas both scratch registers are clobbered.
440 void CheckAccessGlobalProxy(Register holder_reg,
441 Register scratch,
442 Label* miss);
443
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000444
445 void LoadFromNumberDictionary(Label* miss,
446 Register elements,
447 Register key,
448 Register result,
449 Register t0,
450 Register t1,
451 Register t2);
452
453
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800454 inline void MarkCode(NopMarkerTypes type) {
455 nop(type);
456 }
457
458 // Check if the given instruction is a 'type' marker.
459 // ie. check if is is a mov r<type>, r<type> (referenced as nop(type))
460 // These instructions are generated to mark special location in the code,
461 // like some special IC code.
462 static inline bool IsMarkedCode(Instr instr, int type) {
463 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
464 return IsNop(instr, type);
465 }
466
467
468 static inline int GetCodeMarker(Instr instr) {
469 int dst_reg_offset = 12;
470 int dst_mask = 0xf << dst_reg_offset;
471 int src_mask = 0xf;
472 int dst_reg = (instr & dst_mask) >> dst_reg_offset;
473 int src_reg = instr & src_mask;
474 uint32_t non_register_mask = ~(dst_mask | src_mask);
475 uint32_t mov_mask = al | 13 << 21;
476
477 // Return <n> if we have a mov rn rn, else return -1.
478 int type = ((instr & non_register_mask) == mov_mask) &&
479 (dst_reg == src_reg) &&
480 (FIRST_IC_MARKER <= dst_reg) && (dst_reg < LAST_CODE_MARKER)
481 ? src_reg
482 : -1;
483 ASSERT((type == -1) ||
484 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
485 return type;
486 }
487
Steve Blocka7e24c12009-10-30 11:49:00 +0000488
489 // ---------------------------------------------------------------------------
490 // Allocation support
491
Ben Murdoch086aeea2011-05-13 15:57:08 +0100492 // Allocate an object in new space. The object_size is specified
493 // either in bytes or in words if the allocation flag SIZE_IN_WORDS
494 // is passed. If the new space is exhausted control continues at the
495 // gc_required label. The allocated object is returned in result. If
496 // the flag tag_allocated_object is true the result is tagged as as
497 // a heap object. All registers are clobbered also when control
498 // continues at the gc_required label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000499 void AllocateInNewSpace(int object_size,
500 Register result,
501 Register scratch1,
502 Register scratch2,
503 Label* gc_required,
504 AllocationFlags flags);
505 void AllocateInNewSpace(Register object_size,
506 Register result,
507 Register scratch1,
508 Register scratch2,
509 Label* gc_required,
510 AllocationFlags flags);
511
512 // Undo allocation in new space. The object passed and objects allocated after
513 // it will no longer be allocated. The caller must make sure that no pointers
514 // are left to the object(s) no longer allocated as they would be invalid when
515 // allocation is undone.
516 void UndoAllocationInNewSpace(Register object, Register scratch);
517
Andrei Popescu31002712010-02-23 13:46:05 +0000518
519 void AllocateTwoByteString(Register result,
520 Register length,
521 Register scratch1,
522 Register scratch2,
523 Register scratch3,
524 Label* gc_required);
525 void AllocateAsciiString(Register result,
526 Register length,
527 Register scratch1,
528 Register scratch2,
529 Register scratch3,
530 Label* gc_required);
531 void AllocateTwoByteConsString(Register result,
532 Register length,
533 Register scratch1,
534 Register scratch2,
535 Label* gc_required);
536 void AllocateAsciiConsString(Register result,
537 Register length,
538 Register scratch1,
539 Register scratch2,
540 Label* gc_required);
Ben Murdoch589d6972011-11-30 16:04:58 +0000541 void AllocateTwoByteSlicedString(Register result,
542 Register length,
543 Register scratch1,
544 Register scratch2,
545 Label* gc_required);
546 void AllocateAsciiSlicedString(Register result,
547 Register length,
548 Register scratch1,
549 Register scratch2,
550 Label* gc_required);
Andrei Popescu31002712010-02-23 13:46:05 +0000551
Kristian Monsen25f61362010-05-21 11:50:48 +0100552 // Allocates a heap number or jumps to the gc_required label if the young
553 // space is full and a scavenge is needed. All registers are clobbered also
554 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100555 void AllocateHeapNumber(Register result,
556 Register scratch1,
557 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100558 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100559 Label* gc_required);
Steve Block8defd9f2010-07-08 12:39:36 +0100560 void AllocateHeapNumberWithValue(Register result,
561 DwVfpRegister value,
562 Register scratch1,
563 Register scratch2,
564 Register heap_number_map,
565 Label* gc_required);
566
Ben Murdochbb769b22010-08-11 14:56:33 +0100567 // Copies a fixed number of fields of heap objects from src to dst.
568 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000569
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100570 // Copies a number of bytes from src to dst. All registers are clobbered. On
571 // exit src and dst will point to the place just after where the last byte was
572 // read or written and length will be zero.
573 void CopyBytes(Register src,
574 Register dst,
575 Register length,
576 Register scratch);
577
Steve Blocka7e24c12009-10-30 11:49:00 +0000578 // ---------------------------------------------------------------------------
579 // Support functions.
580
581 // Try to get function prototype of a function and puts the value in
582 // the result register. Checks that the function really is a
583 // function and jumps to the miss label if the fast checks fail. The
584 // function register will be untouched; the other registers may be
585 // clobbered.
586 void TryGetFunctionPrototype(Register function,
587 Register result,
588 Register scratch,
589 Label* miss);
590
591 // Compare object type for heap object. heap_object contains a non-Smi
592 // whose object type should be compared with the given type. This both
593 // sets the flags and leaves the object type in the type_reg register.
594 // It leaves the map in the map register (unless the type_reg and map register
595 // are the same register). It leaves the heap object in the heap_object
596 // register unless the heap_object register is the same register as one of the
597 // other registers.
598 void CompareObjectType(Register heap_object,
599 Register map,
600 Register type_reg,
601 InstanceType type);
602
603 // Compare instance type in a map. map contains a valid map object whose
604 // object type should be compared with the given type. This both
Ben Murdoch589d6972011-11-30 16:04:58 +0000605 // sets the flags and leaves the object type in the type_reg register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000606 void CompareInstanceType(Register map,
607 Register type_reg,
608 InstanceType type);
609
Andrei Popescu31002712010-02-23 13:46:05 +0000610
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000611 // Check if a map for a JSObject indicates that the object has fast elements.
612 // Jump to the specified label if it does not.
613 void CheckFastElements(Register map,
614 Register scratch,
615 Label* fail);
616
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100617 // Check if the map of an object is equal to a specified map (either
618 // given directly or as an index into the root list) and branch to
619 // label if not. Skip the smi check if not required (object is known
620 // to be a heap object)
Andrei Popescu31002712010-02-23 13:46:05 +0000621 void CheckMap(Register obj,
622 Register scratch,
623 Handle<Map> map,
624 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000625 SmiCheckType smi_check_type);
626
Andrei Popescu31002712010-02-23 13:46:05 +0000627
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100628 void CheckMap(Register obj,
629 Register scratch,
630 Heap::RootListIndex index,
631 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000632 SmiCheckType smi_check_type);
633
634
635 // Check if the map of an object is equal to a specified map and branch to a
636 // specified target if equal. Skip the smi check if not required (object is
637 // known to be a heap object)
638 void DispatchMap(Register obj,
639 Register scratch,
640 Handle<Map> map,
641 Handle<Code> success,
642 SmiCheckType smi_check_type);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100643
644
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100645 // Compare the object in a register to a value from the root list.
646 // Uses the ip register as scratch.
647 void CompareRoot(Register obj, Heap::RootListIndex index);
648
649
Andrei Popescu31002712010-02-23 13:46:05 +0000650 // Load and check the instance type of an object for being a string.
651 // Loads the type into the second argument register.
652 // Returns a condition that will be enabled if the object was a string.
653 Condition IsObjectStringType(Register obj,
654 Register type) {
655 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
656 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
657 tst(type, Operand(kIsNotStringMask));
658 ASSERT_EQ(0, kStringTag);
659 return eq;
660 }
661
662
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 // Generates code for reporting that an illegal operation has
664 // occurred.
665 void IllegalOperation(int num_arguments);
666
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100667 // Picks out an array index from the hash field.
668 // Register use:
669 // hash - holds the index's hash. Clobbered.
670 // index - holds the overwritten index on exit.
671 void IndexFromHash(Register hash, Register index);
672
Andrei Popescu31002712010-02-23 13:46:05 +0000673 // Get the number of least significant bits from a register
674 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
Steve Block1e0659c2011-05-24 12:43:12 +0100675 void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +0000676
Steve Blockd0582a62009-12-15 09:54:21 +0000677 // Uses VFP instructions to Convert a Smi to a double.
678 void IntegerToDoubleConversionWithVFP3(Register inReg,
679 Register outHighReg,
680 Register outLowReg);
681
Steve Block8defd9f2010-07-08 12:39:36 +0100682 // Load the value of a number object into a VFP double register. If the object
683 // is not a number a jump to the label not_number is performed and the VFP
684 // double register is unchanged.
685 void ObjectToDoubleVFPRegister(
686 Register object,
687 DwVfpRegister value,
688 Register scratch1,
689 Register scratch2,
690 Register heap_number_map,
691 SwVfpRegister scratch3,
692 Label* not_number,
693 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
694
695 // Load the value of a smi object into a VFP double register. The register
696 // scratch1 can be the same register as smi in which case smi will hold the
697 // untagged value afterwards.
698 void SmiToDoubleVFPRegister(Register smi,
699 DwVfpRegister value,
700 Register scratch1,
701 SwVfpRegister scratch2);
702
Iain Merrick9ac36c92010-09-13 15:29:50 +0100703 // Convert the HeapNumber pointed to by source to a 32bits signed integer
704 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
Steve Block1e0659c2011-05-24 12:43:12 +0100705 // to not_int32 label. If VFP3 is available double_scratch is used but not
706 // scratch2.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100707 void ConvertToInt32(Register source,
708 Register dest,
709 Register scratch,
710 Register scratch2,
Steve Block1e0659c2011-05-24 12:43:12 +0100711 DwVfpRegister double_scratch,
Iain Merrick9ac36c92010-09-13 15:29:50 +0100712 Label *not_int32);
713
Steve Block44f0eee2011-05-26 01:26:41 +0100714 // Truncates a double using a specific rounding mode.
715 // Clears the z flag (ne condition) if an overflow occurs.
716 // If exact_conversion is true, the z flag is also cleared if the conversion
717 // was inexact, ie. if the double value could not be converted exactly
718 // to a 32bit integer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100719 void EmitVFPTruncate(VFPRoundingMode rounding_mode,
720 SwVfpRegister result,
721 DwVfpRegister double_input,
722 Register scratch1,
723 Register scratch2,
724 CheckForInexactConversion check
725 = kDontCheckForInexactConversion);
726
Steve Block44f0eee2011-05-26 01:26:41 +0100727 // Helper for EmitECMATruncate.
728 // This will truncate a floating-point value outside of the singed 32bit
729 // integer range to a 32bit signed integer.
730 // Expects the double value loaded in input_high and input_low.
731 // Exits with the answer in 'result'.
732 // Note that this code does not work for values in the 32bit range!
733 void EmitOutOfInt32RangeTruncate(Register result,
734 Register input_high,
735 Register input_low,
736 Register scratch);
737
738 // Performs a truncating conversion of a floating point number as used by
739 // the JS bitwise operations. See ECMA-262 9.5: ToInt32.
740 // Exits with 'result' holding the answer and all other registers clobbered.
741 void EmitECMATruncate(Register result,
742 DwVfpRegister double_input,
743 SwVfpRegister single_scratch,
744 Register scratch,
745 Register scratch2,
746 Register scratch3);
747
Steve Block6ded16b2010-05-10 14:33:55 +0100748 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
749 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
Steve Block8defd9f2010-07-08 12:39:36 +0100750 // for 0 (31 instead of 32). Source and scratch can be the same in which case
751 // the source is clobbered. Source and zeros can also be the same in which
752 // case scratch should be a different register.
753 void CountLeadingZeros(Register zeros,
754 Register source,
755 Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000756
757 // ---------------------------------------------------------------------------
758 // Runtime calls
759
760 // Call a code stub.
761 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000762
Ben Murdoch257744e2011-11-30 15:57:28 +0000763 // Call a code stub and return the code object called. Try to generate
764 // the code if necessary. Do not perform a GC but instead return a retry
765 // after GC failure.
766 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub, Condition cond = al);
767
Andrei Popescu31002712010-02-23 13:46:05 +0000768 // Call a code stub.
769 void TailCallStub(CodeStub* stub, Condition cond = al);
770
Steve Block1e0659c2011-05-24 12:43:12 +0100771 // Tail call a code stub (jump) and return the code object called. Try to
772 // generate the code if necessary. Do not perform a GC but instead return
773 // a retry after GC failure.
774 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub,
775 Condition cond = al);
776
Steve Blocka7e24c12009-10-30 11:49:00 +0000777 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100778 void CallRuntime(const Runtime::Function* f, int num_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100779 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000780
781 // Convenience function: Same as above, but takes the fid instead.
782 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
783
Andrei Popescu402d9372010-02-26 13:31:12 +0000784 // Convenience function: call an external reference.
785 void CallExternalReference(const ExternalReference& ext,
786 int num_arguments);
787
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100789 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000790 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100791 void TailCallExternalReference(const ExternalReference& ext,
792 int num_arguments,
793 int result_size);
794
Steve Block1e0659c2011-05-24 12:43:12 +0100795 // Tail call of a runtime routine (jump). Try to generate the code if
796 // necessary. Do not perform a GC but instead return a retry after GC
797 // failure.
798 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
799 const ExternalReference& ext, int num_arguments, int result_size);
800
Steve Block6ded16b2010-05-10 14:33:55 +0100801 // Convenience function: tail call a runtime routine (jump).
802 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000803 int num_arguments,
804 int result_size);
805
Ben Murdoch257744e2011-11-30 15:57:28 +0000806 int CalculateStackPassedWords(int num_reg_arguments,
807 int num_double_arguments);
808
Steve Block6ded16b2010-05-10 14:33:55 +0100809 // Before calling a C-function from generated code, align arguments on stack.
810 // After aligning the frame, non-register arguments must be stored in
811 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
Ben Murdoch257744e2011-11-30 15:57:28 +0000812 // are word sized. If double arguments are used, this function assumes that
813 // all double arguments are stored before core registers; otherwise the
814 // correct alignment of the double values is not guaranteed.
Steve Block6ded16b2010-05-10 14:33:55 +0100815 // Some compilers/platforms require the stack to be aligned when calling
816 // C++ code.
817 // Needs a scratch register to do some arithmetic. This register will be
818 // trashed.
Ben Murdoch257744e2011-11-30 15:57:28 +0000819 void PrepareCallCFunction(int num_reg_arguments,
820 int num_double_registers,
821 Register scratch);
822 void PrepareCallCFunction(int num_reg_arguments,
823 Register scratch);
824
825 // There are two ways of passing double arguments on ARM, depending on
826 // whether soft or hard floating point ABI is used. These functions
827 // abstract parameter passing for the three different ways we call
828 // C functions from generated code.
829 void SetCallCDoubleArguments(DoubleRegister dreg);
830 void SetCallCDoubleArguments(DoubleRegister dreg1, DoubleRegister dreg2);
831 void SetCallCDoubleArguments(DoubleRegister dreg, Register reg);
Steve Block6ded16b2010-05-10 14:33:55 +0100832
833 // Calls a C function and cleans up the space for arguments allocated
834 // by PrepareCallCFunction. The called function is not allowed to trigger a
835 // garbage collection, since that might move the code and invalidate the
836 // return address (unless this is somehow accounted for by the called
837 // function).
838 void CallCFunction(ExternalReference function, int num_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100839 void CallCFunction(Register function, Register scratch, int num_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +0000840 void CallCFunction(ExternalReference function,
841 int num_reg_arguments,
842 int num_double_arguments);
843 void CallCFunction(Register function, Register scratch,
844 int num_reg_arguments,
845 int num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100846
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100847 void GetCFunctionDoubleResult(const DoubleRegister dst);
848
Steve Block1e0659c2011-05-24 12:43:12 +0100849 // Calls an API function. Allocates HandleScope, extracts returned value
850 // from handle and propagates exceptions. Restores context.
851 // stack_space - space to be unwound on exit (includes the call js
852 // arguments space and the additional space allocated for the fast call).
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100853 MaybeObject* TryCallApiFunctionAndReturn(ExternalReference function,
Steve Block1e0659c2011-05-24 12:43:12 +0100854 int stack_space);
855
Steve Blocka7e24c12009-10-30 11:49:00 +0000856 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100857 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +0000858
Steve Block1e0659c2011-05-24 12:43:12 +0100859 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
860
Steve Blocka7e24c12009-10-30 11:49:00 +0000861 // Invoke specified builtin JavaScript function. Adds an entry to
862 // the unresolved list if the name does not resolve.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100863 void InvokeBuiltin(Builtins::JavaScript id,
Ben Murdoch257744e2011-11-30 15:57:28 +0000864 InvokeFlag flag,
865 const CallWrapper& call_wrapper = NullCallWrapper());
Steve Blocka7e24c12009-10-30 11:49:00 +0000866
867 // Store the code object for the given builtin in the target register and
868 // setup the function in r1.
869 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
870
Steve Block791712a2010-08-27 10:21:07 +0100871 // Store the function for the given builtin in the target register.
872 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
873
Ben Murdoch8b112d22011-06-08 16:22:53 +0100874 Handle<Object> CodeObject() {
875 ASSERT(!code_object_.is_null());
876 return code_object_;
877 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000878
879
880 // ---------------------------------------------------------------------------
881 // StatsCounter support
882
883 void SetCounter(StatsCounter* counter, int value,
884 Register scratch1, Register scratch2);
885 void IncrementCounter(StatsCounter* counter, int value,
886 Register scratch1, Register scratch2);
887 void DecrementCounter(StatsCounter* counter, int value,
888 Register scratch1, Register scratch2);
889
890
891 // ---------------------------------------------------------------------------
892 // Debugging
893
Steve Block1e0659c2011-05-24 12:43:12 +0100894 // Calls Abort(msg) if the condition cond is not satisfied.
Steve Blocka7e24c12009-10-30 11:49:00 +0000895 // Use --debug_code to enable.
Steve Block1e0659c2011-05-24 12:43:12 +0100896 void Assert(Condition cond, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100897 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Iain Merrick75681382010-08-19 15:07:18 +0100898 void AssertFastElements(Register elements);
Steve Blocka7e24c12009-10-30 11:49:00 +0000899
900 // Like Assert(), but always enabled.
Steve Block1e0659c2011-05-24 12:43:12 +0100901 void Check(Condition cond, const char* msg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000902
903 // Print a message to stdout and abort execution.
904 void Abort(const char* msg);
905
906 // Verify restrictions about code generated in stubs.
907 void set_generating_stub(bool value) { generating_stub_ = value; }
908 bool generating_stub() { return generating_stub_; }
909 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
910 bool allow_stub_calls() { return allow_stub_calls_; }
911
Ben Murdoch257744e2011-11-30 15:57:28 +0000912 // EABI variant for double arguments in use.
913 bool use_eabi_hardfloat() {
914#if USE_EABI_HARDFLOAT
915 return true;
916#else
917 return false;
918#endif
919 }
920
Leon Clarked91b9f72010-01-27 17:25:45 +0000921 // ---------------------------------------------------------------------------
Steve Block1e0659c2011-05-24 12:43:12 +0100922 // Number utilities
923
924 // Check whether the value of reg is a power of two and not zero. If not
925 // control continues at the label not_power_of_two. If reg is a power of two
926 // the register scratch contains the value of (reg - 1) when control falls
927 // through.
928 void JumpIfNotPowerOfTwoOrZero(Register reg,
929 Register scratch,
930 Label* not_power_of_two_or_zero);
Steve Block44f0eee2011-05-26 01:26:41 +0100931 // Check whether the value of reg is a power of two and not zero.
932 // Control falls through if it is, with scratch containing the mask
933 // value (reg - 1).
934 // Otherwise control jumps to the 'zero_and_neg' label if the value of reg is
935 // zero or negative, or jumps to the 'not_power_of_two' label if the value is
936 // strictly positive but not a power of two.
937 void JumpIfNotPowerOfTwoOrZeroAndNeg(Register reg,
938 Register scratch,
939 Label* zero_and_neg,
940 Label* not_power_of_two);
Steve Block1e0659c2011-05-24 12:43:12 +0100941
942 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +0000943 // Smi utilities
944
Ben Murdochb0fe1622011-05-05 13:52:32 +0100945 void SmiTag(Register reg, SBit s = LeaveCC) {
946 add(reg, reg, Operand(reg), s);
947 }
Steve Block1e0659c2011-05-24 12:43:12 +0100948 void SmiTag(Register dst, Register src, SBit s = LeaveCC) {
949 add(dst, src, Operand(src), s);
950 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100951
Ben Murdochb8e0da22011-05-16 14:20:40 +0100952 // Try to convert int32 to smi. If the value is to large, preserve
953 // the original value and jump to not_a_smi. Destroys scratch and
954 // sets flags.
955 void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
956 mov(scratch, reg);
957 SmiTag(scratch, SetCC);
958 b(vs, not_a_smi);
959 mov(reg, scratch);
960 }
961
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100962 void SmiUntag(Register reg, SBit s = LeaveCC) {
963 mov(reg, Operand(reg, ASR, kSmiTagSize), s);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100964 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100965 void SmiUntag(Register dst, Register src, SBit s = LeaveCC) {
966 mov(dst, Operand(src, ASR, kSmiTagSize), s);
Steve Block1e0659c2011-05-24 12:43:12 +0100967 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100968
Steve Block1e0659c2011-05-24 12:43:12 +0100969 // Jump the register contains a smi.
970 inline void JumpIfSmi(Register value, Label* smi_label) {
971 tst(value, Operand(kSmiTagMask));
972 b(eq, smi_label);
973 }
974 // Jump if either of the registers contain a non-smi.
975 inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
976 tst(value, Operand(kSmiTagMask));
977 b(ne, not_smi_label);
978 }
Andrei Popescu31002712010-02-23 13:46:05 +0000979 // Jump if either of the registers contain a non-smi.
980 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
981 // Jump if either of the registers contain a smi.
982 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
983
Iain Merrick75681382010-08-19 15:07:18 +0100984 // Abort execution if argument is a smi. Used in debug code.
985 void AbortIfSmi(Register object);
Steve Block1e0659c2011-05-24 12:43:12 +0100986 void AbortIfNotSmi(Register object);
987
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100988 // Abort execution if argument is a string. Used in debug code.
989 void AbortIfNotString(Register object);
990
Steve Block1e0659c2011-05-24 12:43:12 +0100991 // Abort execution if argument is not the root value with the given index.
992 void AbortIfNotRootValue(Register src,
993 Heap::RootListIndex root_value_index,
994 const char* message);
995
996 // ---------------------------------------------------------------------------
997 // HeapNumber utilities
998
999 void JumpIfNotHeapNumber(Register object,
1000 Register heap_number_map,
1001 Register scratch,
1002 Label* on_not_heap_number);
Iain Merrick75681382010-08-19 15:07:18 +01001003
Andrei Popescu31002712010-02-23 13:46:05 +00001004 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +00001005 // String utilities
1006
1007 // Checks if both objects are sequential ASCII strings and jumps to label
1008 // if either is not. Assumes that neither object is a smi.
1009 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
1010 Register object2,
1011 Register scratch1,
1012 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01001013 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00001014
1015 // Checks if both objects are sequential ASCII strings and jumps to label
1016 // if either is not.
1017 void JumpIfNotBothSequentialAsciiStrings(Register first,
1018 Register second,
1019 Register scratch1,
1020 Register scratch2,
1021 Label* not_flat_ascii_strings);
1022
Steve Block6ded16b2010-05-10 14:33:55 +01001023 // Checks if both instance types are sequential ASCII strings and jumps to
1024 // label if either is not.
1025 void JumpIfBothInstanceTypesAreNotSequentialAscii(
1026 Register first_object_instance_type,
1027 Register second_object_instance_type,
1028 Register scratch1,
1029 Register scratch2,
1030 Label* failure);
1031
1032 // Check if instance type is sequential ASCII string and jump to label if
1033 // it is not.
1034 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1035 Register scratch,
1036 Label* failure);
1037
1038
Steve Block1e0659c2011-05-24 12:43:12 +01001039 // ---------------------------------------------------------------------------
1040 // Patching helpers.
1041
1042 // Get the location of a relocated constant (its address in the constant pool)
1043 // from its load site.
1044 void GetRelocatedValueLocation(Register ldr_location,
1045 Register result);
1046
1047
Ben Murdoch257744e2011-11-30 15:57:28 +00001048 void ClampUint8(Register output_reg, Register input_reg);
1049
1050 void ClampDoubleToUint8(Register result_reg,
1051 DoubleRegister input_reg,
1052 DoubleRegister temp_double_reg);
1053
1054
1055 void LoadInstanceDescriptors(Register map, Register descriptors);
1056
Steve Blocka7e24c12009-10-30 11:49:00 +00001057 private:
Steve Block44f0eee2011-05-26 01:26:41 +01001058 void CallCFunctionHelper(Register function,
1059 ExternalReference function_reference,
1060 Register scratch,
Ben Murdoch257744e2011-11-30 15:57:28 +00001061 int num_reg_arguments,
1062 int num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01001063
Andrei Popescu31002712010-02-23 13:46:05 +00001064 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +00001065
1066 // Helper functions for generating invokes.
1067 void InvokePrologue(const ParameterCount& expected,
1068 const ParameterCount& actual,
1069 Handle<Code> code_constant,
1070 Register code_reg,
1071 Label* done,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001072 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001073 const CallWrapper& call_wrapper,
1074 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001075
Steve Blocka7e24c12009-10-30 11:49:00 +00001076 // Activation support.
1077 void EnterFrame(StackFrame::Type type);
1078 void LeaveFrame(StackFrame::Type type);
Andrei Popescu31002712010-02-23 13:46:05 +00001079
Steve Block6ded16b2010-05-10 14:33:55 +01001080 void InitializeNewString(Register string,
1081 Register length,
1082 Heap::RootListIndex map_index,
1083 Register scratch1,
1084 Register scratch2);
1085
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001086 // Compute memory operands for safepoint stack slots.
1087 static int SafepointRegisterStackIndex(int reg_code);
1088 MemOperand SafepointRegisterSlot(Register reg);
1089 MemOperand SafepointRegistersAndDoublesSlot(Register reg);
1090
Andrei Popescu31002712010-02-23 13:46:05 +00001091 bool generating_stub_;
1092 bool allow_stub_calls_;
1093 // This handle will be patched with the code object on installation.
1094 Handle<Object> code_object_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001095
1096 // Needs access to SafepointRegisterStackIndex for optimized frame
1097 // traversal.
1098 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +00001099};
1100
1101
1102#ifdef ENABLE_DEBUGGER_SUPPORT
1103// The code patcher is used to patch (typically) small parts of code e.g. for
1104// debugging and other types of instrumentation. When using the code patcher
1105// the exact number of bytes specified must be emitted. It is not legal to emit
1106// relocation information. If any of these constraints are violated it causes
1107// an assertion to fail.
1108class CodePatcher {
1109 public:
1110 CodePatcher(byte* address, int instructions);
1111 virtual ~CodePatcher();
1112
1113 // Macro assembler to emit code.
1114 MacroAssembler* masm() { return &masm_; }
1115
1116 // Emit an instruction directly.
Steve Block1e0659c2011-05-24 12:43:12 +01001117 void Emit(Instr instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00001118
1119 // Emit an address directly.
1120 void Emit(Address addr);
1121
Steve Block1e0659c2011-05-24 12:43:12 +01001122 // Emit the condition part of an instruction leaving the rest of the current
1123 // instruction unchanged.
1124 void EmitCondition(Condition cond);
1125
Steve Blocka7e24c12009-10-30 11:49:00 +00001126 private:
1127 byte* address_; // The address of the code being patched.
1128 int instructions_; // Number of instructions of the expected patch size.
1129 int size_; // Number of bytes of the expected patch size.
1130 MacroAssembler masm_; // Macro assembler used to generate the code.
1131};
1132#endif // ENABLE_DEBUGGER_SUPPORT
1133
1134
1135// -----------------------------------------------------------------------------
1136// Static helper functions.
1137
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001138static MemOperand ContextOperand(Register context, int index) {
1139 return MemOperand(context, Context::SlotOffset(index));
1140}
1141
1142
1143static inline MemOperand GlobalObjectOperand() {
1144 return ContextOperand(cp, Context::GLOBAL_INDEX);
1145}
1146
1147
Steve Blocka7e24c12009-10-30 11:49:00 +00001148#ifdef GENERATED_CODE_COVERAGE
1149#define CODE_COVERAGE_STRINGIFY(x) #x
1150#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1151#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1152#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
1153#else
1154#define ACCESS_MASM(masm) masm->
1155#endif
1156
1157
1158} } // namespace v8::internal
1159
1160#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_