blob: 47afa93a6e0e637e31255ea4c4fe43ceea5358e5 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ARM_MACRO_ASSEMBLER_ARM_H_
29#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
30
31#include "assembler.h"
Ben Murdoch3ef787d2012-04-12 10:51:47 +010032#include "frames.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "v8globals.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034
35namespace v8 {
36namespace internal {
37
Andrei Popescu31002712010-02-23 13:46:05 +000038// ----------------------------------------------------------------------------
39// Static helper functions
40
41// Generate a MemOperand for loading a field from an object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +010042inline MemOperand FieldMemOperand(Register object, int offset) {
Andrei Popescu31002712010-02-23 13:46:05 +000043 return MemOperand(object, offset - kHeapObjectTag);
44}
45
Steve Blocka7e24c12009-10-30 11:49:00 +000046
Ben Murdoch3ef787d2012-04-12 10:51:47 +010047inline Operand SmiUntagOperand(Register object) {
Steve Block1e0659c2011-05-24 12:43:12 +010048 return Operand(object, ASR, kSmiTagSize);
49}
50
51
52
Steve Blocka7e24c12009-10-30 11:49:00 +000053// Give alias names to registers
54const Register cp = { 8 }; // JavaScript context pointer
Ben Murdochc7cc0282012-03-05 14:35:55 +000055const Register kRootRegister = { 10 }; // Roots array pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +000056
Kristian Monsen25f61362010-05-21 11:50:48 +010057// Flags used for the AllocateInNewSpace functions.
58enum AllocationFlags {
59 // No special flags.
60 NO_ALLOCATION_FLAGS = 0,
61 // Return the pointer to the allocated already tagged as a heap object.
62 TAG_OBJECT = 1 << 0,
63 // The content of the result register already contains the allocation top in
64 // new space.
65 RESULT_CONTAINS_TOP = 1 << 1,
66 // Specify that the requested size of the space to allocate is specified in
67 // words instead of bytes.
68 SIZE_IN_WORDS = 1 << 2
69};
70
71
Steve Block8defd9f2010-07-08 12:39:36 +010072// Flags used for the ObjectToDoubleVFPRegister function.
73enum ObjectToDoubleFlags {
74 // No special flags.
75 NO_OBJECT_TO_DOUBLE_FLAGS = 0,
76 // Object is known to be a non smi.
77 OBJECT_NOT_SMI = 1 << 0,
78 // Don't load NaNs or infinities, branch to the non number case instead.
79 AVOID_NANS_AND_INFINITIES = 1 << 1
80};
81
82
Ben Murdoch3ef787d2012-04-12 10:51:47 +010083enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
84enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
85enum LinkRegisterStatus { kLRHasNotBeenSaved, kLRHasBeenSaved };
86
87
88bool AreAliased(Register r1, Register r2, Register r3, Register r4);
89
90
Steve Blocka7e24c12009-10-30 11:49:00 +000091// MacroAssembler implements a collection of frequently used macros.
92class MacroAssembler: public Assembler {
93 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010094 // The isolate parameter can be NULL if the macro assembler should
95 // not use isolate-dependent functionality. In this case, it's the
96 // responsibility of the caller to never invoke such function on the
97 // macro assembler.
98 MacroAssembler(Isolate* isolate, void* buffer, int size);
Steve Blocka7e24c12009-10-30 11:49:00 +000099
Andrei Popescu31002712010-02-23 13:46:05 +0000100 // Jump, Call, and Ret pseudo instructions implementing inter-working.
Steve Blocka7e24c12009-10-30 11:49:00 +0000101 void Jump(Register target, Condition cond = al);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000102 void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000103 void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
Ben Murdoch42effa52011-08-19 16:40:31 +0100104 static int CallSize(Register target, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 void Call(Register target, Condition cond = al);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000106 static int CallSize(Address target,
Ben Murdoch42effa52011-08-19 16:40:31 +0100107 RelocInfo::Mode rmode,
108 Condition cond = al);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000109 void Call(Address target, RelocInfo::Mode rmode, Condition cond = al);
110 static int CallSize(Handle<Code> code,
111 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
112 unsigned ast_id = kNoASTId,
113 Condition cond = al);
Ben Murdoch257744e2011-11-30 15:57:28 +0000114 void Call(Handle<Code> code,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000115 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
116 unsigned ast_id = kNoASTId,
Ben Murdoch257744e2011-11-30 15:57:28 +0000117 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000118 void Ret(Condition cond = al);
Leon Clarkee46be812010-01-19 14:06:41 +0000119
120 // Emit code to discard a non-negative number of pointer-sized elements
121 // from the stack, clobbering only the sp register.
122 void Drop(int count, Condition cond = al);
123
Ben Murdochb0fe1622011-05-05 13:52:32 +0100124 void Ret(int drop, Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100125
126 // Swap two registers. If the scratch register is omitted then a slightly
127 // less efficient form using xor instead of mov is emitted.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100128 void Swap(Register reg1,
129 Register reg2,
130 Register scratch = no_reg,
131 Condition cond = al);
Steve Block6ded16b2010-05-10 14:33:55 +0100132
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100133
134 void And(Register dst, Register src1, const Operand& src2,
135 Condition cond = al);
136 void Ubfx(Register dst, Register src, int lsb, int width,
137 Condition cond = al);
138 void Sbfx(Register dst, Register src, int lsb, int width,
139 Condition cond = al);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100140 // The scratch register is not used for ARMv7.
141 // scratch can be the same register as src (in which case it is trashed), but
142 // not the same as dst.
143 void Bfi(Register dst,
144 Register src,
145 Register scratch,
146 int lsb,
147 int width,
148 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100149 void Bfc(Register dst, int lsb, int width, Condition cond = al);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100150 void Usat(Register dst, int satpos, const Operand& src,
151 Condition cond = al);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100152
Leon Clarkee46be812010-01-19 14:06:41 +0000153 void Call(Label* target);
Ben Murdoch257744e2011-11-30 15:57:28 +0000154
155 // Register move. May do nothing if the registers are identical.
Leon Clarkee46be812010-01-19 14:06:41 +0000156 void Move(Register dst, Handle<Object> value);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000157 void Move(Register dst, Register src, Condition cond = al);
Ben Murdoch257744e2011-11-30 15:57:28 +0000158 void Move(DoubleRegister dst, DoubleRegister src);
159
Steve Blocka7e24c12009-10-30 11:49:00 +0000160 // Load an object from the root table.
161 void LoadRoot(Register destination,
162 Heap::RootListIndex index,
163 Condition cond = al);
Kristian Monsen25f61362010-05-21 11:50:48 +0100164 // Store an object to the root table.
165 void StoreRoot(Register source,
166 Heap::RootListIndex index,
167 Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000168
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100169 void LoadHeapObject(Register dst, Handle<HeapObject> object);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000170
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100171 void LoadObject(Register result, Handle<Object> object) {
172 if (object->IsHeapObject()) {
173 LoadHeapObject(result, Handle<HeapObject>::cast(object));
174 } else {
175 Move(result, object);
176 }
177 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000178
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100179 // ---------------------------------------------------------------------------
180 // GC Support
Steve Block6ded16b2010-05-10 14:33:55 +0100181
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100182 void IncrementalMarkingRecordWriteHelper(Register object,
183 Register value,
184 Register address);
Steve Block6ded16b2010-05-10 14:33:55 +0100185
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100186 enum RememberedSetFinalAction {
187 kReturnAtEnd,
188 kFallThroughAtEnd
189 };
190
191 // Record in the remembered set the fact that we have a pointer to new space
192 // at the address pointed to by the addr register. Only works if addr is not
193 // in new space.
194 void RememberedSetHelper(Register object, // Used for debug code.
195 Register addr,
196 Register scratch,
197 SaveFPRegsMode save_fp,
198 RememberedSetFinalAction and_then);
199
200 void CheckPageFlag(Register object,
201 Register scratch,
202 int mask,
203 Condition cc,
204 Label* condition_met);
205
206 // Check if object is in new space. Jumps if the object is not in new space.
207 // The register scratch can be object itself, but scratch will be clobbered.
208 void JumpIfNotInNewSpace(Register object,
209 Register scratch,
210 Label* branch) {
211 InNewSpace(object, scratch, ne, branch);
212 }
213
214 // Check if object is in new space. Jumps if the object is in new space.
215 // The register scratch can be object itself, but it will be clobbered.
216 void JumpIfInNewSpace(Register object,
217 Register scratch,
218 Label* branch) {
219 InNewSpace(object, scratch, eq, branch);
220 }
221
222 // Check if an object has a given incremental marking color.
223 void HasColor(Register object,
224 Register scratch0,
225 Register scratch1,
226 Label* has_color,
227 int first_bit,
228 int second_bit);
229
230 void JumpIfBlack(Register object,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100231 Register scratch0,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100232 Register scratch1,
233 Label* on_black);
Steve Blocka7e24c12009-10-30 11:49:00 +0000234
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100235 // Checks the color of an object. If the object is already grey or black
236 // then we just fall through, since it is already live. If it is white and
237 // we can determine that it doesn't need to be scanned, then we just mark it
238 // black and fall through. For the rest we jump to the label so the
239 // incremental marker can fix its assumptions.
240 void EnsureNotWhite(Register object,
241 Register scratch1,
242 Register scratch2,
243 Register scratch3,
244 Label* object_is_white_and_not_data);
245
246 // Detects conservatively whether an object is data-only, i.e. it does need to
247 // be scanned by the garbage collector.
248 void JumpIfDataObject(Register value,
249 Register scratch,
250 Label* not_data_object);
251
252 // Notify the garbage collector that we wrote a pointer into an object.
253 // |object| is the object being stored into, |value| is the object being
254 // stored. value and scratch registers are clobbered by the operation.
255 // The offset is the offset from the start of the object, not the offset from
256 // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
257 void RecordWriteField(
258 Register object,
259 int offset,
260 Register value,
261 Register scratch,
262 LinkRegisterStatus lr_status,
263 SaveFPRegsMode save_fp,
264 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
265 SmiCheck smi_check = INLINE_SMI_CHECK);
266
267 // As above, but the offset has the tag presubtracted. For use with
268 // MemOperand(reg, off).
269 inline void RecordWriteContextSlot(
270 Register context,
271 int offset,
272 Register value,
273 Register scratch,
274 LinkRegisterStatus lr_status,
275 SaveFPRegsMode save_fp,
276 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
277 SmiCheck smi_check = INLINE_SMI_CHECK) {
278 RecordWriteField(context,
279 offset + kHeapObjectTag,
280 value,
281 scratch,
282 lr_status,
283 save_fp,
284 remembered_set_action,
285 smi_check);
286 }
287
288 // For a given |object| notify the garbage collector that the slot |address|
289 // has been written. |value| is the object being stored. The value and
290 // address registers are clobbered by the operation.
291 void RecordWrite(
292 Register object,
293 Register address,
294 Register value,
295 LinkRegisterStatus lr_status,
296 SaveFPRegsMode save_fp,
297 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
298 SmiCheck smi_check = INLINE_SMI_CHECK);
Steve Block8defd9f2010-07-08 12:39:36 +0100299
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000300 // Push a handle.
301 void Push(Handle<Object> handle);
302
Steve Block6ded16b2010-05-10 14:33:55 +0100303 // Push two registers. Pushes leftmost register first (to highest address).
304 void Push(Register src1, Register src2, Condition cond = al) {
305 ASSERT(!src1.is(src2));
306 if (src1.code() > src2.code()) {
307 stm(db_w, sp, src1.bit() | src2.bit(), cond);
308 } else {
309 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
310 str(src2, MemOperand(sp, 4, NegPreIndex), cond);
311 }
312 }
313
314 // Push three registers. Pushes leftmost register first (to highest address).
315 void Push(Register src1, Register src2, Register src3, Condition cond = al) {
316 ASSERT(!src1.is(src2));
317 ASSERT(!src2.is(src3));
318 ASSERT(!src1.is(src3));
319 if (src1.code() > src2.code()) {
320 if (src2.code() > src3.code()) {
321 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
322 } else {
323 stm(db_w, sp, src1.bit() | src2.bit(), cond);
324 str(src3, MemOperand(sp, 4, NegPreIndex), cond);
325 }
326 } else {
327 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
328 Push(src2, src3, cond);
329 }
330 }
331
332 // Push four registers. Pushes leftmost register first (to highest address).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100333 void Push(Register src1,
334 Register src2,
335 Register src3,
336 Register src4,
337 Condition cond = al) {
Steve Block6ded16b2010-05-10 14:33:55 +0100338 ASSERT(!src1.is(src2));
339 ASSERT(!src2.is(src3));
340 ASSERT(!src1.is(src3));
341 ASSERT(!src1.is(src4));
342 ASSERT(!src2.is(src4));
343 ASSERT(!src3.is(src4));
344 if (src1.code() > src2.code()) {
345 if (src2.code() > src3.code()) {
346 if (src3.code() > src4.code()) {
347 stm(db_w,
348 sp,
349 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
350 cond);
351 } else {
352 stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
353 str(src4, MemOperand(sp, 4, NegPreIndex), cond);
354 }
355 } else {
356 stm(db_w, sp, src1.bit() | src2.bit(), cond);
357 Push(src3, src4, cond);
358 }
359 } else {
360 str(src1, MemOperand(sp, 4, NegPreIndex), cond);
361 Push(src2, src3, src4, cond);
362 }
363 }
364
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100365 // Pop two registers. Pops rightmost register first (from lower address).
366 void Pop(Register src1, Register src2, Condition cond = al) {
367 ASSERT(!src1.is(src2));
368 if (src1.code() > src2.code()) {
369 ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
370 } else {
371 ldr(src2, MemOperand(sp, 4, PostIndex), cond);
372 ldr(src1, MemOperand(sp, 4, PostIndex), cond);
373 }
374 }
375
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100376 // Pop three registers. Pops rightmost register first (from lower address).
377 void Pop(Register src1, Register src2, Register src3, Condition cond = al) {
378 ASSERT(!src1.is(src2));
379 ASSERT(!src2.is(src3));
380 ASSERT(!src1.is(src3));
381 if (src1.code() > src2.code()) {
382 if (src2.code() > src3.code()) {
383 ldm(ia_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
384 } else {
385 ldr(src3, MemOperand(sp, 4, PostIndex), cond);
386 ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
387 }
388 } else {
389 Pop(src2, src3, cond);
390 str(src1, MemOperand(sp, 4, PostIndex), cond);
391 }
392 }
393
394 // Pop four registers. Pops rightmost register first (from lower address).
395 void Pop(Register src1,
396 Register src2,
397 Register src3,
398 Register src4,
399 Condition cond = al) {
400 ASSERT(!src1.is(src2));
401 ASSERT(!src2.is(src3));
402 ASSERT(!src1.is(src3));
403 ASSERT(!src1.is(src4));
404 ASSERT(!src2.is(src4));
405 ASSERT(!src3.is(src4));
406 if (src1.code() > src2.code()) {
407 if (src2.code() > src3.code()) {
408 if (src3.code() > src4.code()) {
409 ldm(ia_w,
410 sp,
411 src1.bit() | src2.bit() | src3.bit() | src4.bit(),
412 cond);
413 } else {
414 ldr(src4, MemOperand(sp, 4, PostIndex), cond);
415 ldm(ia_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
416 }
417 } else {
418 Pop(src3, src4, cond);
419 ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
420 }
421 } else {
422 Pop(src2, src3, src4, cond);
423 ldr(src1, MemOperand(sp, 4, PostIndex), cond);
424 }
425 }
426
Ben Murdochb0fe1622011-05-05 13:52:32 +0100427 // Push and pop the registers that can hold pointers, as defined by the
428 // RegList constant kSafepointSavedRegisters.
429 void PushSafepointRegisters();
430 void PopSafepointRegisters();
Ben Murdochb8e0da22011-05-16 14:20:40 +0100431 void PushSafepointRegistersAndDoubles();
432 void PopSafepointRegistersAndDoubles();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100433 // Store value in register src in the safepoint stack slot for
434 // register dst.
435 void StoreToSafepointRegisterSlot(Register src, Register dst);
436 void StoreToSafepointRegistersAndDoublesSlot(Register src, Register dst);
437 // Load the value of the src register from its safepoint stack slot
438 // into register dst.
439 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100440
Leon Clarkef7060e22010-06-03 12:02:55 +0100441 // Load two consecutive registers with two consecutive memory locations.
442 void Ldrd(Register dst1,
443 Register dst2,
444 const MemOperand& src,
445 Condition cond = al);
446
447 // Store two consecutive registers to two consecutive memory locations.
448 void Strd(Register src1,
449 Register src2,
450 const MemOperand& dst,
451 Condition cond = al);
452
Ben Murdochb8e0da22011-05-16 14:20:40 +0100453 // Clear specified FPSCR bits.
454 void ClearFPSCRBits(const uint32_t bits_to_clear,
455 const Register scratch,
456 const Condition cond = al);
457
458 // Compare double values and move the result to the normal condition flags.
459 void VFPCompareAndSetFlags(const DwVfpRegister src1,
460 const DwVfpRegister src2,
461 const Condition cond = al);
462 void VFPCompareAndSetFlags(const DwVfpRegister src1,
463 const double src2,
464 const Condition cond = al);
465
466 // Compare double values and then load the fpscr flags to a register.
467 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
468 const DwVfpRegister src2,
469 const Register fpscr_flags,
470 const Condition cond = al);
471 void VFPCompareAndLoadFlags(const DwVfpRegister src1,
472 const double src2,
473 const Register fpscr_flags,
474 const Condition cond = al);
475
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000476 void Vmov(const DwVfpRegister dst,
477 const double imm,
478 const Condition cond = al);
479
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100480 // Enter exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +0100481 // stack_space - extra stack space, used for alignment before call to C.
482 void EnterExitFrame(bool save_doubles, int stack_space = 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000483
484 // Leave the current exit frame. Expects the return value in r0.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100485 // Expect the number of values, pushed prior to the exit frame, to
486 // remove in a register (or no_reg, if there is nothing to remove).
487 void LeaveExitFrame(bool save_doubles, Register argument_count);
Steve Blocka7e24c12009-10-30 11:49:00 +0000488
Steve Block6ded16b2010-05-10 14:33:55 +0100489 // Get the actual activation frame alignment for target environment.
490 static int ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000491
Steve Blockd0582a62009-12-15 09:54:21 +0000492 void LoadContext(Register dst, int context_chain_length);
493
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 // Conditionally load the cached Array transitioned map of type
495 // transitioned_kind from the global context if the map in register
496 // map_in_out is the cached Array map in the global context of
497 // expected_kind.
498 void LoadTransitionedArrayMapConditional(
499 ElementsKind expected_kind,
500 ElementsKind transitioned_kind,
501 Register map_in_out,
502 Register scratch,
503 Label* no_map_match);
504
505 // Load the initial map for new Arrays from a JSFunction.
506 void LoadInitialArrayMap(Register function_in,
507 Register scratch,
508 Register map_out);
509
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800510 void LoadGlobalFunction(int index, Register function);
511
512 // Load the initial map from the global function. The registers
513 // function and map can be the same, function is then overwritten.
514 void LoadGlobalFunctionInitialMap(Register function,
515 Register map,
516 Register scratch);
517
Ben Murdochc7cc0282012-03-05 14:35:55 +0000518 void InitializeRootRegister() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100519 ExternalReference roots_array_start =
520 ExternalReference::roots_array_start(isolate());
521 mov(kRootRegister, Operand(roots_array_start));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000522 }
523
Steve Blocka7e24c12009-10-30 11:49:00 +0000524 // ---------------------------------------------------------------------------
525 // JavaScript invokes
526
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100527 // Set up call kind marking in ecx. The method takes ecx as an
Ben Murdoch257744e2011-11-30 15:57:28 +0000528 // explicit first parameter to make the code more readable at the
529 // call sites.
530 void SetCallKind(Register dst, CallKind kind);
531
Steve Blocka7e24c12009-10-30 11:49:00 +0000532 // Invoke the JavaScript function code by either calling or jumping.
533 void InvokeCode(Register code,
534 const ParameterCount& expected,
535 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100536 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000537 const CallWrapper& call_wrapper,
538 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000539
540 void InvokeCode(Handle<Code> code,
541 const ParameterCount& expected,
542 const ParameterCount& actual,
543 RelocInfo::Mode rmode,
Ben Murdoch257744e2011-11-30 15:57:28 +0000544 InvokeFlag flag,
545 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000546
547 // Invoke the JavaScript function in the given register. Changes the
548 // current context to the context in the function before invoking.
549 void InvokeFunction(Register function,
550 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +0100551 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000552 const CallWrapper& call_wrapper,
553 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000554
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100555 void InvokeFunction(Handle<JSFunction> function,
Andrei Popescu402d9372010-02-26 13:31:12 +0000556 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +0000557 InvokeFlag flag,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100558 const CallWrapper& call_wrapper,
Ben Murdoch257744e2011-11-30 15:57:28 +0000559 CallKind call_kind);
Andrei Popescu402d9372010-02-26 13:31:12 +0000560
Ben Murdochb0fe1622011-05-05 13:52:32 +0100561 void IsObjectJSObjectType(Register heap_object,
562 Register map,
563 Register scratch,
564 Label* fail);
565
566 void IsInstanceJSObjectType(Register map,
567 Register scratch,
568 Label* fail);
569
570 void IsObjectJSStringType(Register object,
571 Register scratch,
572 Label* fail);
Steve Blocka7e24c12009-10-30 11:49:00 +0000573
574#ifdef ENABLE_DEBUGGER_SUPPORT
575 // ---------------------------------------------------------------------------
576 // Debugger Support
577
Andrei Popescu402d9372010-02-26 13:31:12 +0000578 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000579#endif
580
581 // ---------------------------------------------------------------------------
582 // Exception handling
583
584 // Push a new try handler and link into try handler chain.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100585 void PushTryHandler(StackHandler::Kind kind, int handler_index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000586
Leon Clarkee46be812010-01-19 14:06:41 +0000587 // Unlink the stack handler on top of the stack from the try handler chain.
588 // Must preserve the result register.
589 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000590
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 // Passes thrown value to the handler of top of the try handler chain.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100592 void Throw(Register value);
593
594 // Propagates an uncatchable exception to the top of the current JS stack's
595 // handler chain.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100596 void ThrowUncatchable(Register value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100597
Steve Blocka7e24c12009-10-30 11:49:00 +0000598 // ---------------------------------------------------------------------------
599 // Inline caching support
600
Steve Blocka7e24c12009-10-30 11:49:00 +0000601 // Generate code for checking access rights - used for security checks
602 // on access to global objects across environments. The holder register
603 // is left untouched, whereas both scratch registers are clobbered.
604 void CheckAccessGlobalProxy(Register holder_reg,
605 Register scratch,
606 Label* miss);
607
Ben Murdochc7cc0282012-03-05 14:35:55 +0000608 void GetNumberHash(Register t0, Register scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000609
610 void LoadFromNumberDictionary(Label* miss,
611 Register elements,
612 Register key,
613 Register result,
614 Register t0,
615 Register t1,
616 Register t2);
617
618
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800619 inline void MarkCode(NopMarkerTypes type) {
620 nop(type);
621 }
622
623 // Check if the given instruction is a 'type' marker.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100624 // i.e. check if is is a mov r<type>, r<type> (referenced as nop(type))
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800625 // These instructions are generated to mark special location in the code,
626 // like some special IC code.
627 static inline bool IsMarkedCode(Instr instr, int type) {
628 ASSERT((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER));
629 return IsNop(instr, type);
630 }
631
632
633 static inline int GetCodeMarker(Instr instr) {
634 int dst_reg_offset = 12;
635 int dst_mask = 0xf << dst_reg_offset;
636 int src_mask = 0xf;
637 int dst_reg = (instr & dst_mask) >> dst_reg_offset;
638 int src_reg = instr & src_mask;
639 uint32_t non_register_mask = ~(dst_mask | src_mask);
640 uint32_t mov_mask = al | 13 << 21;
641
642 // Return <n> if we have a mov rn rn, else return -1.
643 int type = ((instr & non_register_mask) == mov_mask) &&
644 (dst_reg == src_reg) &&
645 (FIRST_IC_MARKER <= dst_reg) && (dst_reg < LAST_CODE_MARKER)
646 ? src_reg
647 : -1;
648 ASSERT((type == -1) ||
649 ((FIRST_IC_MARKER <= type) && (type < LAST_CODE_MARKER)));
650 return type;
651 }
652
Steve Blocka7e24c12009-10-30 11:49:00 +0000653
654 // ---------------------------------------------------------------------------
655 // Allocation support
656
Ben Murdoch086aeea2011-05-13 15:57:08 +0100657 // Allocate an object in new space. The object_size is specified
658 // either in bytes or in words if the allocation flag SIZE_IN_WORDS
659 // is passed. If the new space is exhausted control continues at the
660 // gc_required label. The allocated object is returned in result. If
661 // the flag tag_allocated_object is true the result is tagged as as
662 // a heap object. All registers are clobbered also when control
663 // continues at the gc_required label.
Steve Blocka7e24c12009-10-30 11:49:00 +0000664 void AllocateInNewSpace(int object_size,
665 Register result,
666 Register scratch1,
667 Register scratch2,
668 Label* gc_required,
669 AllocationFlags flags);
670 void AllocateInNewSpace(Register object_size,
671 Register result,
672 Register scratch1,
673 Register scratch2,
674 Label* gc_required,
675 AllocationFlags flags);
676
677 // Undo allocation in new space. The object passed and objects allocated after
678 // it will no longer be allocated. The caller must make sure that no pointers
679 // are left to the object(s) no longer allocated as they would be invalid when
680 // allocation is undone.
681 void UndoAllocationInNewSpace(Register object, Register scratch);
682
Andrei Popescu31002712010-02-23 13:46:05 +0000683
684 void AllocateTwoByteString(Register result,
685 Register length,
686 Register scratch1,
687 Register scratch2,
688 Register scratch3,
689 Label* gc_required);
690 void AllocateAsciiString(Register result,
691 Register length,
692 Register scratch1,
693 Register scratch2,
694 Register scratch3,
695 Label* gc_required);
696 void AllocateTwoByteConsString(Register result,
697 Register length,
698 Register scratch1,
699 Register scratch2,
700 Label* gc_required);
701 void AllocateAsciiConsString(Register result,
702 Register length,
703 Register scratch1,
704 Register scratch2,
705 Label* gc_required);
Ben Murdoch589d6972011-11-30 16:04:58 +0000706 void AllocateTwoByteSlicedString(Register result,
707 Register length,
708 Register scratch1,
709 Register scratch2,
710 Label* gc_required);
711 void AllocateAsciiSlicedString(Register result,
712 Register length,
713 Register scratch1,
714 Register scratch2,
715 Label* gc_required);
Andrei Popescu31002712010-02-23 13:46:05 +0000716
Kristian Monsen25f61362010-05-21 11:50:48 +0100717 // Allocates a heap number or jumps to the gc_required label if the young
718 // space is full and a scavenge is needed. All registers are clobbered also
719 // when control continues at the gc_required label.
Steve Block6ded16b2010-05-10 14:33:55 +0100720 void AllocateHeapNumber(Register result,
721 Register scratch1,
722 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100723 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +0100724 Label* gc_required);
Steve Block8defd9f2010-07-08 12:39:36 +0100725 void AllocateHeapNumberWithValue(Register result,
726 DwVfpRegister value,
727 Register scratch1,
728 Register scratch2,
729 Register heap_number_map,
730 Label* gc_required);
731
Ben Murdochbb769b22010-08-11 14:56:33 +0100732 // Copies a fixed number of fields of heap objects from src to dst.
733 void CopyFields(Register dst, Register src, RegList temps, int field_count);
Andrei Popescu31002712010-02-23 13:46:05 +0000734
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100735 // Copies a number of bytes from src to dst. All registers are clobbered. On
736 // exit src and dst will point to the place just after where the last byte was
737 // read or written and length will be zero.
738 void CopyBytes(Register src,
739 Register dst,
740 Register length,
741 Register scratch);
742
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100743 // Initialize fields with filler values. Fields starting at |start_offset|
744 // not including end_offset are overwritten with the value in |filler|. At
745 // the end the loop, |start_offset| takes the value of |end_offset|.
746 void InitializeFieldsWithFiller(Register start_offset,
747 Register end_offset,
748 Register filler);
749
Steve Blocka7e24c12009-10-30 11:49:00 +0000750 // ---------------------------------------------------------------------------
751 // Support functions.
752
753 // Try to get function prototype of a function and puts the value in
754 // the result register. Checks that the function really is a
755 // function and jumps to the miss label if the fast checks fail. The
756 // function register will be untouched; the other registers may be
757 // clobbered.
758 void TryGetFunctionPrototype(Register function,
759 Register result,
760 Register scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100761 Label* miss,
762 bool miss_on_bound_function = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000763
764 // Compare object type for heap object. heap_object contains a non-Smi
765 // whose object type should be compared with the given type. This both
766 // sets the flags and leaves the object type in the type_reg register.
767 // It leaves the map in the map register (unless the type_reg and map register
768 // are the same register). It leaves the heap object in the heap_object
769 // register unless the heap_object register is the same register as one of the
770 // other registers.
771 void CompareObjectType(Register heap_object,
772 Register map,
773 Register type_reg,
774 InstanceType type);
775
776 // Compare instance type in a map. map contains a valid map object whose
777 // object type should be compared with the given type. This both
Ben Murdoch589d6972011-11-30 16:04:58 +0000778 // sets the flags and leaves the object type in the type_reg register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000779 void CompareInstanceType(Register map,
780 Register type_reg,
781 InstanceType type);
782
Andrei Popescu31002712010-02-23 13:46:05 +0000783
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000784 // Check if a map for a JSObject indicates that the object has fast elements.
785 // Jump to the specified label if it does not.
786 void CheckFastElements(Register map,
787 Register scratch,
788 Label* fail);
789
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100790 // Check if a map for a JSObject indicates that the object can have both smi
791 // and HeapObject elements. Jump to the specified label if it does not.
792 void CheckFastObjectElements(Register map,
793 Register scratch,
794 Label* fail);
795
796 // Check if a map for a JSObject indicates that the object has fast smi only
797 // elements. Jump to the specified label if it does not.
798 void CheckFastSmiOnlyElements(Register map,
799 Register scratch,
800 Label* fail);
801
802 // Check to see if maybe_number can be stored as a double in
803 // FastDoubleElements. If it can, store it at the index specified by key in
804 // the FastDoubleElements array elements. Otherwise jump to fail, in which
805 // case scratch2, scratch3 and scratch4 are unmodified.
806 void StoreNumberToDoubleElements(Register value_reg,
807 Register key_reg,
808 Register receiver_reg,
809 Register elements_reg,
810 Register scratch1,
811 Register scratch2,
812 Register scratch3,
813 Register scratch4,
814 Label* fail);
815
816 // Compare an object's map with the specified map and its transitioned
817 // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. Condition flags are
818 // set with result of map compare. If multiple map compares are required, the
819 // compare sequences branches to early_success.
820 void CompareMap(Register obj,
821 Register scratch,
822 Handle<Map> map,
823 Label* early_success,
824 CompareMapMode mode = REQUIRE_EXACT_MAP);
825
826 // Check if the map of an object is equal to a specified map and branch to
827 // label if not. Skip the smi check if not required (object is known to be a
828 // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
829 // against maps that are ElementsKind transition maps of the specified map.
Andrei Popescu31002712010-02-23 13:46:05 +0000830 void CheckMap(Register obj,
831 Register scratch,
832 Handle<Map> map,
833 Label* fail,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100834 SmiCheckType smi_check_type,
835 CompareMapMode mode = REQUIRE_EXACT_MAP);
Ben Murdoch257744e2011-11-30 15:57:28 +0000836
Andrei Popescu31002712010-02-23 13:46:05 +0000837
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100838 void CheckMap(Register obj,
839 Register scratch,
840 Heap::RootListIndex index,
841 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000842 SmiCheckType smi_check_type);
843
844
845 // Check if the map of an object is equal to a specified map and branch to a
846 // specified target if equal. Skip the smi check if not required (object is
847 // known to be a heap object)
848 void DispatchMap(Register obj,
849 Register scratch,
850 Handle<Map> map,
851 Handle<Code> success,
852 SmiCheckType smi_check_type);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100853
854
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100855 // Compare the object in a register to a value from the root list.
856 // Uses the ip register as scratch.
857 void CompareRoot(Register obj, Heap::RootListIndex index);
858
859
Andrei Popescu31002712010-02-23 13:46:05 +0000860 // Load and check the instance type of an object for being a string.
861 // Loads the type into the second argument register.
862 // Returns a condition that will be enabled if the object was a string.
863 Condition IsObjectStringType(Register obj,
864 Register type) {
865 ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset));
866 ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset));
867 tst(type, Operand(kIsNotStringMask));
868 ASSERT_EQ(0, kStringTag);
869 return eq;
870 }
871
872
Steve Blocka7e24c12009-10-30 11:49:00 +0000873 // Generates code for reporting that an illegal operation has
874 // occurred.
875 void IllegalOperation(int num_arguments);
876
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100877 // Picks out an array index from the hash field.
878 // Register use:
879 // hash - holds the index's hash. Clobbered.
880 // index - holds the overwritten index on exit.
881 void IndexFromHash(Register hash, Register index);
882
Andrei Popescu31002712010-02-23 13:46:05 +0000883 // Get the number of least significant bits from a register
884 void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
Steve Block1e0659c2011-05-24 12:43:12 +0100885 void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +0000886
Steve Blockd0582a62009-12-15 09:54:21 +0000887 // Uses VFP instructions to Convert a Smi to a double.
888 void IntegerToDoubleConversionWithVFP3(Register inReg,
889 Register outHighReg,
890 Register outLowReg);
891
Steve Block8defd9f2010-07-08 12:39:36 +0100892 // Load the value of a number object into a VFP double register. If the object
893 // is not a number a jump to the label not_number is performed and the VFP
894 // double register is unchanged.
895 void ObjectToDoubleVFPRegister(
896 Register object,
897 DwVfpRegister value,
898 Register scratch1,
899 Register scratch2,
900 Register heap_number_map,
901 SwVfpRegister scratch3,
902 Label* not_number,
903 ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
904
905 // Load the value of a smi object into a VFP double register. The register
906 // scratch1 can be the same register as smi in which case smi will hold the
907 // untagged value afterwards.
908 void SmiToDoubleVFPRegister(Register smi,
909 DwVfpRegister value,
910 Register scratch1,
911 SwVfpRegister scratch2);
912
Iain Merrick9ac36c92010-09-13 15:29:50 +0100913 // Convert the HeapNumber pointed to by source to a 32bits signed integer
914 // dest. If the HeapNumber does not fit into a 32bits signed integer branch
Steve Block1e0659c2011-05-24 12:43:12 +0100915 // to not_int32 label. If VFP3 is available double_scratch is used but not
916 // scratch2.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100917 void ConvertToInt32(Register source,
918 Register dest,
919 Register scratch,
920 Register scratch2,
Steve Block1e0659c2011-05-24 12:43:12 +0100921 DwVfpRegister double_scratch,
Iain Merrick9ac36c92010-09-13 15:29:50 +0100922 Label *not_int32);
923
Steve Block44f0eee2011-05-26 01:26:41 +0100924 // Truncates a double using a specific rounding mode.
925 // Clears the z flag (ne condition) if an overflow occurs.
926 // If exact_conversion is true, the z flag is also cleared if the conversion
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100927 // was inexact, i.e. if the double value could not be converted exactly
Steve Block44f0eee2011-05-26 01:26:41 +0100928 // to a 32bit integer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100929 void EmitVFPTruncate(VFPRoundingMode rounding_mode,
930 SwVfpRegister result,
931 DwVfpRegister double_input,
932 Register scratch1,
933 Register scratch2,
934 CheckForInexactConversion check
935 = kDontCheckForInexactConversion);
936
Steve Block44f0eee2011-05-26 01:26:41 +0100937 // Helper for EmitECMATruncate.
938 // This will truncate a floating-point value outside of the singed 32bit
939 // integer range to a 32bit signed integer.
940 // Expects the double value loaded in input_high and input_low.
941 // Exits with the answer in 'result'.
942 // Note that this code does not work for values in the 32bit range!
943 void EmitOutOfInt32RangeTruncate(Register result,
944 Register input_high,
945 Register input_low,
946 Register scratch);
947
948 // Performs a truncating conversion of a floating point number as used by
949 // the JS bitwise operations. See ECMA-262 9.5: ToInt32.
950 // Exits with 'result' holding the answer and all other registers clobbered.
951 void EmitECMATruncate(Register result,
952 DwVfpRegister double_input,
953 SwVfpRegister single_scratch,
954 Register scratch,
955 Register scratch2,
956 Register scratch3);
957
Steve Block6ded16b2010-05-10 14:33:55 +0100958 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
959 // instruction. On pre-ARM5 hardware this routine gives the wrong answer
Steve Block8defd9f2010-07-08 12:39:36 +0100960 // for 0 (31 instead of 32). Source and scratch can be the same in which case
961 // the source is clobbered. Source and zeros can also be the same in which
962 // case scratch should be a different register.
963 void CountLeadingZeros(Register zeros,
964 Register source,
965 Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000966
967 // ---------------------------------------------------------------------------
968 // Runtime calls
969
970 // Call a code stub.
971 void CallStub(CodeStub* stub, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +0000972
Andrei Popescu31002712010-02-23 13:46:05 +0000973 // Call a code stub.
974 void TailCallStub(CodeStub* stub, Condition cond = al);
975
Steve Blocka7e24c12009-10-30 11:49:00 +0000976 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100977 void CallRuntime(const Runtime::Function* f, int num_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100978 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000979
980 // Convenience function: Same as above, but takes the fid instead.
981 void CallRuntime(Runtime::FunctionId fid, int num_arguments);
982
Andrei Popescu402d9372010-02-26 13:31:12 +0000983 // Convenience function: call an external reference.
984 void CallExternalReference(const ExternalReference& ext,
985 int num_arguments);
986
Steve Blocka7e24c12009-10-30 11:49:00 +0000987 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100988 // Like JumpToExternalReference, but also takes care of passing the number
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 // of parameters.
Steve Block6ded16b2010-05-10 14:33:55 +0100990 void TailCallExternalReference(const ExternalReference& ext,
991 int num_arguments,
992 int result_size);
993
994 // Convenience function: tail call a runtime routine (jump).
995 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000996 int num_arguments,
997 int result_size);
998
Ben Murdoch257744e2011-11-30 15:57:28 +0000999 int CalculateStackPassedWords(int num_reg_arguments,
1000 int num_double_arguments);
1001
Steve Block6ded16b2010-05-10 14:33:55 +01001002 // Before calling a C-function from generated code, align arguments on stack.
1003 // After aligning the frame, non-register arguments must be stored in
1004 // sp[0], sp[4], etc., not pushed. The argument count assumes all arguments
Ben Murdoch257744e2011-11-30 15:57:28 +00001005 // are word sized. If double arguments are used, this function assumes that
1006 // all double arguments are stored before core registers; otherwise the
1007 // correct alignment of the double values is not guaranteed.
Steve Block6ded16b2010-05-10 14:33:55 +01001008 // Some compilers/platforms require the stack to be aligned when calling
1009 // C++ code.
1010 // Needs a scratch register to do some arithmetic. This register will be
1011 // trashed.
Ben Murdoch257744e2011-11-30 15:57:28 +00001012 void PrepareCallCFunction(int num_reg_arguments,
1013 int num_double_registers,
1014 Register scratch);
1015 void PrepareCallCFunction(int num_reg_arguments,
1016 Register scratch);
1017
1018 // There are two ways of passing double arguments on ARM, depending on
1019 // whether soft or hard floating point ABI is used. These functions
1020 // abstract parameter passing for the three different ways we call
1021 // C functions from generated code.
1022 void SetCallCDoubleArguments(DoubleRegister dreg);
1023 void SetCallCDoubleArguments(DoubleRegister dreg1, DoubleRegister dreg2);
1024 void SetCallCDoubleArguments(DoubleRegister dreg, Register reg);
Steve Block6ded16b2010-05-10 14:33:55 +01001025
1026 // Calls a C function and cleans up the space for arguments allocated
1027 // by PrepareCallCFunction. The called function is not allowed to trigger a
1028 // garbage collection, since that might move the code and invalidate the
1029 // return address (unless this is somehow accounted for by the called
1030 // function).
1031 void CallCFunction(ExternalReference function, int num_arguments);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001032 void CallCFunction(Register function, int num_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00001033 void CallCFunction(ExternalReference function,
1034 int num_reg_arguments,
1035 int num_double_arguments);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001036 void CallCFunction(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00001037 int num_reg_arguments,
1038 int num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01001039
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001040 void GetCFunctionDoubleResult(const DoubleRegister dst);
1041
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001042 // Calls an API function. Allocates HandleScope, extracts returned value
1043 // from handle and propagates exceptions. Restores context. stack_space
1044 // - space to be unwound on exit (includes the call JS arguments space and
1045 // the additional space allocated for the fast call).
1046 void CallApiFunctionAndReturn(ExternalReference function, int stack_space);
Steve Block1e0659c2011-05-24 12:43:12 +01001047
Steve Blocka7e24c12009-10-30 11:49:00 +00001048 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +01001049 void JumpToExternalReference(const ExternalReference& builtin);
Steve Blocka7e24c12009-10-30 11:49:00 +00001050
1051 // Invoke specified builtin JavaScript function. Adds an entry to
1052 // the unresolved list if the name does not resolve.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001053 void InvokeBuiltin(Builtins::JavaScript id,
Ben Murdoch257744e2011-11-30 15:57:28 +00001054 InvokeFlag flag,
1055 const CallWrapper& call_wrapper = NullCallWrapper());
Steve Blocka7e24c12009-10-30 11:49:00 +00001056
1057 // Store the code object for the given builtin in the target register and
1058 // setup the function in r1.
1059 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
1060
Steve Block791712a2010-08-27 10:21:07 +01001061 // Store the function for the given builtin in the target register.
1062 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
1063
Ben Murdoch8b112d22011-06-08 16:22:53 +01001064 Handle<Object> CodeObject() {
1065 ASSERT(!code_object_.is_null());
1066 return code_object_;
1067 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001068
1069
1070 // ---------------------------------------------------------------------------
1071 // StatsCounter support
1072
1073 void SetCounter(StatsCounter* counter, int value,
1074 Register scratch1, Register scratch2);
1075 void IncrementCounter(StatsCounter* counter, int value,
1076 Register scratch1, Register scratch2);
1077 void DecrementCounter(StatsCounter* counter, int value,
1078 Register scratch1, Register scratch2);
1079
1080
1081 // ---------------------------------------------------------------------------
1082 // Debugging
1083
Steve Block1e0659c2011-05-24 12:43:12 +01001084 // Calls Abort(msg) if the condition cond is not satisfied.
Steve Blocka7e24c12009-10-30 11:49:00 +00001085 // Use --debug_code to enable.
Steve Block1e0659c2011-05-24 12:43:12 +01001086 void Assert(Condition cond, const char* msg);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001087 void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
Iain Merrick75681382010-08-19 15:07:18 +01001088 void AssertFastElements(Register elements);
Steve Blocka7e24c12009-10-30 11:49:00 +00001089
1090 // Like Assert(), but always enabled.
Steve Block1e0659c2011-05-24 12:43:12 +01001091 void Check(Condition cond, const char* msg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001092
1093 // Print a message to stdout and abort execution.
1094 void Abort(const char* msg);
1095
1096 // Verify restrictions about code generated in stubs.
1097 void set_generating_stub(bool value) { generating_stub_ = value; }
1098 bool generating_stub() { return generating_stub_; }
1099 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
1100 bool allow_stub_calls() { return allow_stub_calls_; }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001101 void set_has_frame(bool value) { has_frame_ = value; }
1102 bool has_frame() { return has_frame_; }
1103 inline bool AllowThisStubCall(CodeStub* stub);
Steve Blocka7e24c12009-10-30 11:49:00 +00001104
Ben Murdoch257744e2011-11-30 15:57:28 +00001105 // EABI variant for double arguments in use.
1106 bool use_eabi_hardfloat() {
1107#if USE_EABI_HARDFLOAT
1108 return true;
1109#else
1110 return false;
1111#endif
1112 }
1113
Leon Clarked91b9f72010-01-27 17:25:45 +00001114 // ---------------------------------------------------------------------------
Steve Block1e0659c2011-05-24 12:43:12 +01001115 // Number utilities
1116
1117 // Check whether the value of reg is a power of two and not zero. If not
1118 // control continues at the label not_power_of_two. If reg is a power of two
1119 // the register scratch contains the value of (reg - 1) when control falls
1120 // through.
1121 void JumpIfNotPowerOfTwoOrZero(Register reg,
1122 Register scratch,
1123 Label* not_power_of_two_or_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01001124 // Check whether the value of reg is a power of two and not zero.
1125 // Control falls through if it is, with scratch containing the mask
1126 // value (reg - 1).
1127 // Otherwise control jumps to the 'zero_and_neg' label if the value of reg is
1128 // zero or negative, or jumps to the 'not_power_of_two' label if the value is
1129 // strictly positive but not a power of two.
1130 void JumpIfNotPowerOfTwoOrZeroAndNeg(Register reg,
1131 Register scratch,
1132 Label* zero_and_neg,
1133 Label* not_power_of_two);
Steve Block1e0659c2011-05-24 12:43:12 +01001134
1135 // ---------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +00001136 // Smi utilities
1137
Ben Murdochb0fe1622011-05-05 13:52:32 +01001138 void SmiTag(Register reg, SBit s = LeaveCC) {
1139 add(reg, reg, Operand(reg), s);
1140 }
Steve Block1e0659c2011-05-24 12:43:12 +01001141 void SmiTag(Register dst, Register src, SBit s = LeaveCC) {
1142 add(dst, src, Operand(src), s);
1143 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001144
Ben Murdochb8e0da22011-05-16 14:20:40 +01001145 // Try to convert int32 to smi. If the value is to large, preserve
1146 // the original value and jump to not_a_smi. Destroys scratch and
1147 // sets flags.
1148 void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
1149 mov(scratch, reg);
1150 SmiTag(scratch, SetCC);
1151 b(vs, not_a_smi);
1152 mov(reg, scratch);
1153 }
1154
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001155 void SmiUntag(Register reg, SBit s = LeaveCC) {
1156 mov(reg, Operand(reg, ASR, kSmiTagSize), s);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001157 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001158 void SmiUntag(Register dst, Register src, SBit s = LeaveCC) {
1159 mov(dst, Operand(src, ASR, kSmiTagSize), s);
Steve Block1e0659c2011-05-24 12:43:12 +01001160 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001161
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001162 // Untag the source value into destination and jump if source is a smi.
1163 // Souce and destination can be the same register.
1164 void UntagAndJumpIfSmi(Register dst, Register src, Label* smi_case);
1165
1166 // Untag the source value into destination and jump if source is not a smi.
1167 // Souce and destination can be the same register.
1168 void UntagAndJumpIfNotSmi(Register dst, Register src, Label* non_smi_case);
1169
Steve Block1e0659c2011-05-24 12:43:12 +01001170 // Jump the register contains a smi.
1171 inline void JumpIfSmi(Register value, Label* smi_label) {
1172 tst(value, Operand(kSmiTagMask));
1173 b(eq, smi_label);
1174 }
1175 // Jump if either of the registers contain a non-smi.
1176 inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
1177 tst(value, Operand(kSmiTagMask));
1178 b(ne, not_smi_label);
1179 }
Andrei Popescu31002712010-02-23 13:46:05 +00001180 // Jump if either of the registers contain a non-smi.
1181 void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
1182 // Jump if either of the registers contain a smi.
1183 void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi);
1184
Iain Merrick75681382010-08-19 15:07:18 +01001185 // Abort execution if argument is a smi. Used in debug code.
1186 void AbortIfSmi(Register object);
Steve Block1e0659c2011-05-24 12:43:12 +01001187 void AbortIfNotSmi(Register object);
1188
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001189 // Abort execution if argument is a string. Used in debug code.
1190 void AbortIfNotString(Register object);
1191
Steve Block1e0659c2011-05-24 12:43:12 +01001192 // Abort execution if argument is not the root value with the given index.
1193 void AbortIfNotRootValue(Register src,
1194 Heap::RootListIndex root_value_index,
1195 const char* message);
1196
1197 // ---------------------------------------------------------------------------
1198 // HeapNumber utilities
1199
1200 void JumpIfNotHeapNumber(Register object,
1201 Register heap_number_map,
1202 Register scratch,
1203 Label* on_not_heap_number);
Iain Merrick75681382010-08-19 15:07:18 +01001204
Andrei Popescu31002712010-02-23 13:46:05 +00001205 // ---------------------------------------------------------------------------
Leon Clarked91b9f72010-01-27 17:25:45 +00001206 // String utilities
1207
1208 // Checks if both objects are sequential ASCII strings and jumps to label
1209 // if either is not. Assumes that neither object is a smi.
1210 void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
1211 Register object2,
1212 Register scratch1,
1213 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01001214 Label* failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00001215
1216 // Checks if both objects are sequential ASCII strings and jumps to label
1217 // if either is not.
1218 void JumpIfNotBothSequentialAsciiStrings(Register first,
1219 Register second,
1220 Register scratch1,
1221 Register scratch2,
1222 Label* not_flat_ascii_strings);
1223
Steve Block6ded16b2010-05-10 14:33:55 +01001224 // Checks if both instance types are sequential ASCII strings and jumps to
1225 // label if either is not.
1226 void JumpIfBothInstanceTypesAreNotSequentialAscii(
1227 Register first_object_instance_type,
1228 Register second_object_instance_type,
1229 Register scratch1,
1230 Register scratch2,
1231 Label* failure);
1232
1233 // Check if instance type is sequential ASCII string and jump to label if
1234 // it is not.
1235 void JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1236 Register scratch,
1237 Label* failure);
1238
1239
Steve Block1e0659c2011-05-24 12:43:12 +01001240 // ---------------------------------------------------------------------------
1241 // Patching helpers.
1242
1243 // Get the location of a relocated constant (its address in the constant pool)
1244 // from its load site.
1245 void GetRelocatedValueLocation(Register ldr_location,
1246 Register result);
1247
1248
Ben Murdoch257744e2011-11-30 15:57:28 +00001249 void ClampUint8(Register output_reg, Register input_reg);
1250
1251 void ClampDoubleToUint8(Register result_reg,
1252 DoubleRegister input_reg,
1253 DoubleRegister temp_double_reg);
1254
1255
1256 void LoadInstanceDescriptors(Register map, Register descriptors);
1257
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001258 // Activation support.
1259 void EnterFrame(StackFrame::Type type);
1260 void LeaveFrame(StackFrame::Type type);
1261
1262 // Expects object in r0 and returns map with validated enum cache
1263 // in r0. Assumes that any other register can be used as a scratch.
1264 void CheckEnumCache(Register null_value, Label* call_runtime);
1265
Steve Blocka7e24c12009-10-30 11:49:00 +00001266 private:
Steve Block44f0eee2011-05-26 01:26:41 +01001267 void CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00001268 int num_reg_arguments,
1269 int num_double_arguments);
Steve Block44f0eee2011-05-26 01:26:41 +01001270
Andrei Popescu31002712010-02-23 13:46:05 +00001271 void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
Steve Blocka7e24c12009-10-30 11:49:00 +00001272
1273 // Helper functions for generating invokes.
1274 void InvokePrologue(const ParameterCount& expected,
1275 const ParameterCount& actual,
1276 Handle<Code> code_constant,
1277 Register code_reg,
1278 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001279 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001280 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001281 const CallWrapper& call_wrapper,
1282 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001283
Steve Block6ded16b2010-05-10 14:33:55 +01001284 void InitializeNewString(Register string,
1285 Register length,
1286 Heap::RootListIndex map_index,
1287 Register scratch1,
1288 Register scratch2);
1289
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001290 // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace.
1291 void InNewSpace(Register object,
1292 Register scratch,
1293 Condition cond, // eq for new space, ne otherwise.
1294 Label* branch);
1295
1296 // Helper for finding the mark bits for an address. Afterwards, the
1297 // bitmap register points at the word with the mark bits and the mask
1298 // the position of the first bit. Leaves addr_reg unchanged.
1299 inline void GetMarkBits(Register addr_reg,
1300 Register bitmap_reg,
1301 Register mask_reg);
1302
1303 // Helper for throwing exceptions. Compute a handler address and jump to
1304 // it. See the implementation for register usage.
1305 void JumpToHandlerEntry();
1306
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001307 // Compute memory operands for safepoint stack slots.
1308 static int SafepointRegisterStackIndex(int reg_code);
1309 MemOperand SafepointRegisterSlot(Register reg);
1310 MemOperand SafepointRegistersAndDoublesSlot(Register reg);
1311
Andrei Popescu31002712010-02-23 13:46:05 +00001312 bool generating_stub_;
1313 bool allow_stub_calls_;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001314 bool has_frame_;
Andrei Popescu31002712010-02-23 13:46:05 +00001315 // This handle will be patched with the code object on installation.
1316 Handle<Object> code_object_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001317
1318 // Needs access to SafepointRegisterStackIndex for optimized frame
1319 // traversal.
1320 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +00001321};
1322
1323
1324#ifdef ENABLE_DEBUGGER_SUPPORT
1325// The code patcher is used to patch (typically) small parts of code e.g. for
1326// debugging and other types of instrumentation. When using the code patcher
1327// the exact number of bytes specified must be emitted. It is not legal to emit
1328// relocation information. If any of these constraints are violated it causes
1329// an assertion to fail.
1330class CodePatcher {
1331 public:
1332 CodePatcher(byte* address, int instructions);
1333 virtual ~CodePatcher();
1334
1335 // Macro assembler to emit code.
1336 MacroAssembler* masm() { return &masm_; }
1337
1338 // Emit an instruction directly.
Steve Block1e0659c2011-05-24 12:43:12 +01001339 void Emit(Instr instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00001340
1341 // Emit an address directly.
1342 void Emit(Address addr);
1343
Steve Block1e0659c2011-05-24 12:43:12 +01001344 // Emit the condition part of an instruction leaving the rest of the current
1345 // instruction unchanged.
1346 void EmitCondition(Condition cond);
1347
Steve Blocka7e24c12009-10-30 11:49:00 +00001348 private:
1349 byte* address_; // The address of the code being patched.
1350 int instructions_; // Number of instructions of the expected patch size.
1351 int size_; // Number of bytes of the expected patch size.
1352 MacroAssembler masm_; // Macro assembler used to generate the code.
1353};
1354#endif // ENABLE_DEBUGGER_SUPPORT
1355
1356
1357// -----------------------------------------------------------------------------
1358// Static helper functions.
1359
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001360inline MemOperand ContextOperand(Register context, int index) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001361 return MemOperand(context, Context::SlotOffset(index));
1362}
1363
1364
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001365inline MemOperand GlobalObjectOperand() {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001366 return ContextOperand(cp, Context::GLOBAL_INDEX);
1367}
1368
1369
Steve Blocka7e24c12009-10-30 11:49:00 +00001370#ifdef GENERATED_CODE_COVERAGE
1371#define CODE_COVERAGE_STRINGIFY(x) #x
1372#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1373#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1374#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
1375#else
1376#define ACCESS_MASM(masm) masm->
1377#endif
1378
1379
1380} } // namespace v8::internal
1381
1382#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_