blob: 81fc11ef31e99fa88e7d986bc20241728d5b8953 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
41 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000042 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
45}
46
47
48// We always generate arm code, never thumb code, even if V8 is compiled to
49// thumb, so we require inter-working support
50#if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
51#error "flag -mthumb-interwork missing"
52#endif
53
54
55// We do not support thumb inter-working with an arm architecture not supporting
56// the blx instruction (below v5t). If you know what CPU you are compiling for
57// you can use -march=armv7 or similar.
58#if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
59# error "For thumb inter-working we require an architecture which supports blx"
60#endif
61
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063// Using bx does not yield better code, so use it only when required
64#if defined(USE_THUMB_INTERWORK)
65#define USE_BX 1
66#endif
67
68
69void MacroAssembler::Jump(Register target, Condition cond) {
70#if USE_BX
71 bx(target, cond);
72#else
73 mov(pc, Operand(target), LeaveCC, cond);
74#endif
75}
76
77
78void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
79 Condition cond) {
80#if USE_BX
81 mov(ip, Operand(target, rmode), LeaveCC, cond);
82 bx(ip, cond);
83#else
84 mov(pc, Operand(target, rmode), LeaveCC, cond);
85#endif
86}
87
88
89void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
90 Condition cond) {
91 ASSERT(!RelocInfo::IsCodeTarget(rmode));
92 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
93}
94
95
96void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
97 Condition cond) {
98 ASSERT(RelocInfo::IsCodeTarget(rmode));
99 // 'code' is always generated ARM code, never THUMB code
100 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
101}
102
103
104void MacroAssembler::Call(Register target, Condition cond) {
105#if USE_BLX
106 blx(target, cond);
107#else
108 // set lr for return at current pc + 8
109 mov(lr, Operand(pc), LeaveCC, cond);
110 mov(pc, Operand(target), LeaveCC, cond);
111#endif
112}
113
114
115void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
116 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100117#if USE_BLX
118 // On ARMv5 and after the recommended call sequence is:
119 // ldr ip, [pc, #...]
120 // blx ip
121
122 // The two instructions (ldr and blx) could be separated by a constant
123 // pool and the code would still work. The issue comes from the
124 // patching code which expect the ldr to be just above the blx.
125 { BlockConstPoolScope block_const_pool(this);
126 // Statement positions are expected to be recorded when the target
127 // address is loaded. The mov method will automatically record
128 // positions when pc is the target, since this is not the case here
129 // we have to do it explicitly.
130 WriteRecordedPositions();
131
132 mov(ip, Operand(target, rmode), LeaveCC, cond);
133 blx(ip, cond);
134 }
135
136 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
137#else
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 // Set lr for return at current pc + 8.
139 mov(lr, Operand(pc), LeaveCC, cond);
140 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
141 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Steve Blocka7e24c12009-10-30 11:49:00 +0000143 ASSERT(kCallTargetAddressOffset == kInstrSize);
Steve Block6ded16b2010-05-10 14:33:55 +0100144#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000145}
146
147
148void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
149 Condition cond) {
150 ASSERT(!RelocInfo::IsCodeTarget(rmode));
151 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
152}
153
154
155void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
156 Condition cond) {
157 ASSERT(RelocInfo::IsCodeTarget(rmode));
158 // 'code' is always generated ARM code, never THUMB code
159 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
160}
161
162
163void MacroAssembler::Ret(Condition cond) {
164#if USE_BX
165 bx(lr, cond);
166#else
167 mov(pc, Operand(lr), LeaveCC, cond);
168#endif
169}
170
171
Steve Blockd0582a62009-12-15 09:54:21 +0000172void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
173 LoadRoot(ip, Heap::kStackLimitRootIndex);
174 cmp(sp, Operand(ip));
175 b(lo, on_stack_overflow);
176}
177
178
Leon Clarkee46be812010-01-19 14:06:41 +0000179void MacroAssembler::Drop(int count, Condition cond) {
180 if (count > 0) {
181 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
182 }
183}
184
185
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100186void MacroAssembler::Swap(Register reg1,
187 Register reg2,
188 Register scratch,
189 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100190 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100191 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
192 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
193 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100194 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100195 mov(scratch, reg1, LeaveCC, cond);
196 mov(reg1, reg2, LeaveCC, cond);
197 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100198 }
199}
200
201
Leon Clarkee46be812010-01-19 14:06:41 +0000202void MacroAssembler::Call(Label* target) {
203 bl(target);
204}
205
206
207void MacroAssembler::Move(Register dst, Handle<Object> value) {
208 mov(dst, Operand(value));
209}
Steve Blockd0582a62009-12-15 09:54:21 +0000210
211
Steve Block6ded16b2010-05-10 14:33:55 +0100212void MacroAssembler::Move(Register dst, Register src) {
213 if (!dst.is(src)) {
214 mov(dst, src);
215 }
216}
217
218
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100219void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
220 Condition cond) {
221 if (!CpuFeatures::IsSupported(ARMv7) || src2.is_single_instruction()) {
222 and_(dst, src1, src2, LeaveCC, cond);
223 return;
224 }
225 int32_t immediate = src2.immediate();
226 if (immediate == 0) {
227 mov(dst, Operand(0), LeaveCC, cond);
228 return;
229 }
230 if (IsPowerOf2(immediate + 1) && ((immediate & 1) != 0)) {
231 ubfx(dst, src1, 0, WhichPowerOf2(immediate + 1), cond);
232 return;
233 }
234 and_(dst, src1, src2, LeaveCC, cond);
235}
236
237
238void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
239 Condition cond) {
240 ASSERT(lsb < 32);
241 if (!CpuFeatures::IsSupported(ARMv7)) {
242 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
243 and_(dst, src1, Operand(mask), LeaveCC, cond);
244 if (lsb != 0) {
245 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
246 }
247 } else {
248 ubfx(dst, src1, lsb, width, cond);
249 }
250}
251
252
253void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
254 Condition cond) {
255 ASSERT(lsb < 32);
256 if (!CpuFeatures::IsSupported(ARMv7)) {
257 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
258 and_(dst, src1, Operand(mask), LeaveCC, cond);
259 int shift_up = 32 - lsb - width;
260 int shift_down = lsb + shift_up;
261 if (shift_up != 0) {
262 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
263 }
264 if (shift_down != 0) {
265 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
266 }
267 } else {
268 sbfx(dst, src1, lsb, width, cond);
269 }
270}
271
272
273void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
274 ASSERT(lsb < 32);
275 if (!CpuFeatures::IsSupported(ARMv7)) {
276 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
277 bic(dst, dst, Operand(mask));
278 } else {
279 bfc(dst, lsb, width, cond);
280 }
281}
282
283
Steve Blocka7e24c12009-10-30 11:49:00 +0000284void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
285 // Empty the const pool.
286 CheckConstPool(true, true);
287 add(pc, pc, Operand(index,
288 LSL,
289 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize));
290 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * kInstrSize);
291 nop(); // Jump table alignment.
292 for (int i = 0; i < targets.length(); i++) {
293 b(targets[i]);
294 }
295}
296
297
298void MacroAssembler::LoadRoot(Register destination,
299 Heap::RootListIndex index,
300 Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +0000301 ldr(destination, MemOperand(roots, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000302}
303
304
Kristian Monsen25f61362010-05-21 11:50:48 +0100305void MacroAssembler::StoreRoot(Register source,
306 Heap::RootListIndex index,
307 Condition cond) {
308 str(source, MemOperand(roots, index << kPointerSizeLog2), cond);
309}
310
311
Steve Block6ded16b2010-05-10 14:33:55 +0100312void MacroAssembler::RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100313 Register address,
314 Register scratch) {
Steve Block6ded16b2010-05-10 14:33:55 +0100315 if (FLAG_debug_code) {
316 // Check that the object is not in new space.
317 Label not_in_new_space;
Steve Block8defd9f2010-07-08 12:39:36 +0100318 InNewSpace(object, scratch, ne, &not_in_new_space);
Steve Block6ded16b2010-05-10 14:33:55 +0100319 Abort("new-space object passed to RecordWriteHelper");
320 bind(&not_in_new_space);
321 }
Leon Clarke4515c472010-02-03 11:58:03 +0000322
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100323 // Calculate page address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100324 Bfc(object, 0, kPageSizeBits);
325
326 // Calculate region number.
Steve Block8defd9f2010-07-08 12:39:36 +0100327 Ubfx(address, address, Page::kRegionSizeLog2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100328 kPageSizeBits - Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000329
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100330 // Mark region dirty.
Steve Block8defd9f2010-07-08 12:39:36 +0100331 ldr(scratch, MemOperand(object, Page::kDirtyFlagOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000332 mov(ip, Operand(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100333 orr(scratch, scratch, Operand(ip, LSL, address));
334 str(scratch, MemOperand(object, Page::kDirtyFlagOffset));
Steve Block6ded16b2010-05-10 14:33:55 +0100335}
336
337
338void MacroAssembler::InNewSpace(Register object,
339 Register scratch,
340 Condition cc,
341 Label* branch) {
342 ASSERT(cc == eq || cc == ne);
343 and_(scratch, object, Operand(ExternalReference::new_space_mask()));
344 cmp(scratch, Operand(ExternalReference::new_space_start()));
345 b(cc, branch);
346}
347
348
349// Will clobber 4 registers: object, offset, scratch, ip. The
350// register 'object' contains a heap object pointer. The heap object
351// tag is shifted away.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100352void MacroAssembler::RecordWrite(Register object,
353 Operand offset,
354 Register scratch0,
355 Register scratch1) {
Steve Block6ded16b2010-05-10 14:33:55 +0100356 // The compiled code assumes that record write doesn't change the
357 // context register, so we check that none of the clobbered
358 // registers are cp.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100359 ASSERT(!object.is(cp) && !scratch0.is(cp) && !scratch1.is(cp));
Steve Block6ded16b2010-05-10 14:33:55 +0100360
361 Label done;
362
363 // First, test that the object is not in the new space. We cannot set
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100364 // region marks for new space pages.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100365 InNewSpace(object, scratch0, eq, &done);
Steve Block6ded16b2010-05-10 14:33:55 +0100366
Steve Block8defd9f2010-07-08 12:39:36 +0100367 // Add offset into the object.
368 add(scratch0, object, offset);
369
Steve Block6ded16b2010-05-10 14:33:55 +0100370 // Record the actual write.
Steve Block8defd9f2010-07-08 12:39:36 +0100371 RecordWriteHelper(object, scratch0, scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000372
373 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000374
375 // Clobber all input registers when running with the debug-code flag
376 // turned on to provoke errors.
377 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100378 mov(object, Operand(BitCast<int32_t>(kZapValue)));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100379 mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
380 mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000381 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000382}
383
384
Steve Block8defd9f2010-07-08 12:39:36 +0100385// Will clobber 4 registers: object, address, scratch, ip. The
386// register 'object' contains a heap object pointer. The heap object
387// tag is shifted away.
388void MacroAssembler::RecordWrite(Register object,
389 Register address,
390 Register scratch) {
391 // The compiled code assumes that record write doesn't change the
392 // context register, so we check that none of the clobbered
393 // registers are cp.
394 ASSERT(!object.is(cp) && !address.is(cp) && !scratch.is(cp));
395
396 Label done;
397
398 // First, test that the object is not in the new space. We cannot set
399 // region marks for new space pages.
400 InNewSpace(object, scratch, eq, &done);
401
402 // Record the actual write.
403 RecordWriteHelper(object, address, scratch);
404
405 bind(&done);
406
407 // Clobber all input registers when running with the debug-code flag
408 // turned on to provoke errors.
409 if (FLAG_debug_code) {
410 mov(object, Operand(BitCast<int32_t>(kZapValue)));
411 mov(address, Operand(BitCast<int32_t>(kZapValue)));
412 mov(scratch, Operand(BitCast<int32_t>(kZapValue)));
413 }
414}
415
416
Leon Clarkef7060e22010-06-03 12:02:55 +0100417void MacroAssembler::Ldrd(Register dst1, Register dst2,
418 const MemOperand& src, Condition cond) {
419 ASSERT(src.rm().is(no_reg));
420 ASSERT(!dst1.is(lr)); // r14.
421 ASSERT_EQ(0, dst1.code() % 2);
422 ASSERT_EQ(dst1.code() + 1, dst2.code());
423
424 // Generate two ldr instructions if ldrd is not available.
425 if (CpuFeatures::IsSupported(ARMv7)) {
426 CpuFeatures::Scope scope(ARMv7);
427 ldrd(dst1, dst2, src, cond);
428 } else {
429 MemOperand src2(src);
430 src2.set_offset(src2.offset() + 4);
431 if (dst1.is(src.rn())) {
432 ldr(dst2, src2, cond);
433 ldr(dst1, src, cond);
434 } else {
435 ldr(dst1, src, cond);
436 ldr(dst2, src2, cond);
437 }
438 }
439}
440
441
442void MacroAssembler::Strd(Register src1, Register src2,
443 const MemOperand& dst, Condition cond) {
444 ASSERT(dst.rm().is(no_reg));
445 ASSERT(!src1.is(lr)); // r14.
446 ASSERT_EQ(0, src1.code() % 2);
447 ASSERT_EQ(src1.code() + 1, src2.code());
448
449 // Generate two str instructions if strd is not available.
450 if (CpuFeatures::IsSupported(ARMv7)) {
451 CpuFeatures::Scope scope(ARMv7);
452 strd(src1, src2, dst, cond);
453 } else {
454 MemOperand dst2(dst);
455 dst2.set_offset(dst2.offset() + 4);
456 str(src1, dst, cond);
457 str(src2, dst2, cond);
458 }
459}
460
461
Steve Blocka7e24c12009-10-30 11:49:00 +0000462void MacroAssembler::EnterFrame(StackFrame::Type type) {
463 // r0-r3: preserved
464 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
465 mov(ip, Operand(Smi::FromInt(type)));
466 push(ip);
467 mov(ip, Operand(CodeObject()));
468 push(ip);
469 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
470}
471
472
473void MacroAssembler::LeaveFrame(StackFrame::Type type) {
474 // r0: preserved
475 // r1: preserved
476 // r2: preserved
477
478 // Drop the execution stack down to the frame pointer and restore
479 // the caller frame pointer and return address.
480 mov(sp, fp);
481 ldm(ia_w, sp, fp.bit() | lr.bit());
482}
483
484
Steve Blockd0582a62009-12-15 09:54:21 +0000485void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000486 // Compute the argv pointer and keep it in a callee-saved register.
487 // r0 is argc.
488 add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
489 sub(r6, r6, Operand(kPointerSize));
490
491 // Compute callee's stack pointer before making changes and save it as
492 // ip register so that it is restored as sp register on exit, thereby
493 // popping the args.
494
495 // ip = sp + kPointerSize * #args;
496 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
497
Steve Block6ded16b2010-05-10 14:33:55 +0100498 // Prepare the stack to be aligned when calling into C. After this point there
499 // are 5 pushes before the call into C, so the stack needs to be aligned after
500 // 5 pushes.
501 int frame_alignment = ActivationFrameAlignment();
502 int frame_alignment_mask = frame_alignment - 1;
503 if (frame_alignment != kPointerSize) {
504 // The following code needs to be more general if this assert does not hold.
505 ASSERT(frame_alignment == 2 * kPointerSize);
506 // With 5 pushes left the frame must be unaligned at this point.
507 mov(r7, Operand(Smi::FromInt(0)));
508 tst(sp, Operand((frame_alignment - kPointerSize) & frame_alignment_mask));
509 push(r7, eq); // Push if aligned to make it unaligned.
510 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000511
512 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
513 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
Andrei Popescu402d9372010-02-26 13:31:12 +0000514 mov(fp, Operand(sp)); // Setup new frame pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +0000515
Andrei Popescu402d9372010-02-26 13:31:12 +0000516 mov(ip, Operand(CodeObject()));
517 push(ip); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000518
519 // Save the frame pointer and the context in top.
520 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
521 str(fp, MemOperand(ip));
522 mov(ip, Operand(ExternalReference(Top::k_context_address)));
523 str(cp, MemOperand(ip));
524
525 // Setup argc and the builtin function in callee-saved registers.
526 mov(r4, Operand(r0));
527 mov(r5, Operand(r1));
528
529
530#ifdef ENABLE_DEBUGGER_SUPPORT
531 // Save the state of all registers to the stack from the memory
532 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000533 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000534 // Use sp as base to push.
535 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
536 }
537#endif
538}
539
540
Steve Block6ded16b2010-05-10 14:33:55 +0100541void MacroAssembler::InitializeNewString(Register string,
542 Register length,
543 Heap::RootListIndex map_index,
544 Register scratch1,
545 Register scratch2) {
546 mov(scratch1, Operand(length, LSL, kSmiTagSize));
547 LoadRoot(scratch2, map_index);
548 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
549 mov(scratch1, Operand(String::kEmptyHashField));
550 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
551 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
552}
553
554
555int MacroAssembler::ActivationFrameAlignment() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000556#if defined(V8_HOST_ARCH_ARM)
557 // Running on the real platform. Use the alignment as mandated by the local
558 // environment.
559 // Note: This will break if we ever start generating snapshots on one ARM
560 // platform for another ARM platform with a different alignment.
Steve Block6ded16b2010-05-10 14:33:55 +0100561 return OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000562#else // defined(V8_HOST_ARCH_ARM)
563 // If we are using the simulator then we should always align to the expected
564 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +0100565 // if the target platform will need alignment, so this is controlled from a
566 // flag.
567 return FLAG_sim_stack_alignment;
Steve Blocka7e24c12009-10-30 11:49:00 +0000568#endif // defined(V8_HOST_ARCH_ARM)
Steve Blocka7e24c12009-10-30 11:49:00 +0000569}
570
571
Steve Blockd0582a62009-12-15 09:54:21 +0000572void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000573#ifdef ENABLE_DEBUGGER_SUPPORT
574 // Restore the memory copy of the registers by digging them out from
575 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000576 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000577 // This code intentionally clobbers r2 and r3.
578 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000579 const int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000580 add(r3, fp, Operand(kOffset));
581 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
582 }
583#endif
584
585 // Clear top frame.
586 mov(r3, Operand(0));
587 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
588 str(r3, MemOperand(ip));
589
590 // Restore current context from top and clear it in debug mode.
591 mov(ip, Operand(ExternalReference(Top::k_context_address)));
592 ldr(cp, MemOperand(ip));
593#ifdef DEBUG
594 str(r3, MemOperand(ip));
595#endif
596
597 // Pop the arguments, restore registers, and return.
598 mov(sp, Operand(fp)); // respect ABI stack constraint
599 ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
600}
601
602
603void MacroAssembler::InvokePrologue(const ParameterCount& expected,
604 const ParameterCount& actual,
605 Handle<Code> code_constant,
606 Register code_reg,
607 Label* done,
608 InvokeFlag flag) {
609 bool definitely_matches = false;
610 Label regular_invoke;
611
612 // Check whether the expected and actual arguments count match. If not,
613 // setup registers according to contract with ArgumentsAdaptorTrampoline:
614 // r0: actual arguments count
615 // r1: function (passed through to callee)
616 // r2: expected arguments count
617 // r3: callee code entry
618
619 // The code below is made a lot easier because the calling code already sets
620 // up actual and expected registers according to the contract if values are
621 // passed in registers.
622 ASSERT(actual.is_immediate() || actual.reg().is(r0));
623 ASSERT(expected.is_immediate() || expected.reg().is(r2));
624 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
625
626 if (expected.is_immediate()) {
627 ASSERT(actual.is_immediate());
628 if (expected.immediate() == actual.immediate()) {
629 definitely_matches = true;
630 } else {
631 mov(r0, Operand(actual.immediate()));
632 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
633 if (expected.immediate() == sentinel) {
634 // Don't worry about adapting arguments for builtins that
635 // don't want that done. Skip adaption code by making it look
636 // like we have a match between expected and actual number of
637 // arguments.
638 definitely_matches = true;
639 } else {
640 mov(r2, Operand(expected.immediate()));
641 }
642 }
643 } else {
644 if (actual.is_immediate()) {
645 cmp(expected.reg(), Operand(actual.immediate()));
646 b(eq, &regular_invoke);
647 mov(r0, Operand(actual.immediate()));
648 } else {
649 cmp(expected.reg(), Operand(actual.reg()));
650 b(eq, &regular_invoke);
651 }
652 }
653
654 if (!definitely_matches) {
655 if (!code_constant.is_null()) {
656 mov(r3, Operand(code_constant));
657 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
658 }
659
660 Handle<Code> adaptor =
661 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
662 if (flag == CALL_FUNCTION) {
663 Call(adaptor, RelocInfo::CODE_TARGET);
664 b(done);
665 } else {
666 Jump(adaptor, RelocInfo::CODE_TARGET);
667 }
668 bind(&regular_invoke);
669 }
670}
671
672
673void MacroAssembler::InvokeCode(Register code,
674 const ParameterCount& expected,
675 const ParameterCount& actual,
676 InvokeFlag flag) {
677 Label done;
678
679 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
680 if (flag == CALL_FUNCTION) {
681 Call(code);
682 } else {
683 ASSERT(flag == JUMP_FUNCTION);
684 Jump(code);
685 }
686
687 // Continue here if InvokePrologue does handle the invocation due to
688 // mismatched parameter counts.
689 bind(&done);
690}
691
692
693void MacroAssembler::InvokeCode(Handle<Code> code,
694 const ParameterCount& expected,
695 const ParameterCount& actual,
696 RelocInfo::Mode rmode,
697 InvokeFlag flag) {
698 Label done;
699
700 InvokePrologue(expected, actual, code, no_reg, &done, flag);
701 if (flag == CALL_FUNCTION) {
702 Call(code, rmode);
703 } else {
704 Jump(code, rmode);
705 }
706
707 // Continue here if InvokePrologue does handle the invocation due to
708 // mismatched parameter counts.
709 bind(&done);
710}
711
712
713void MacroAssembler::InvokeFunction(Register fun,
714 const ParameterCount& actual,
715 InvokeFlag flag) {
716 // Contract with called JS functions requires that function is passed in r1.
717 ASSERT(fun.is(r1));
718
719 Register expected_reg = r2;
720 Register code_reg = r3;
721
722 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
723 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
724 ldr(expected_reg,
725 FieldMemOperand(code_reg,
726 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100727 mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000728 ldr(code_reg,
729 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
730 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
731
732 ParameterCount expected(expected_reg);
733 InvokeCode(code_reg, expected, actual, flag);
734}
735
736
Andrei Popescu402d9372010-02-26 13:31:12 +0000737void MacroAssembler::InvokeFunction(JSFunction* function,
738 const ParameterCount& actual,
739 InvokeFlag flag) {
740 ASSERT(function->is_compiled());
741
742 // Get the function and setup the context.
743 mov(r1, Operand(Handle<JSFunction>(function)));
744 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
745
746 // Invoke the cached code.
747 Handle<Code> code(function->code());
748 ParameterCount expected(function->shared()->formal_parameter_count());
749 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
750}
751
Steve Blocka7e24c12009-10-30 11:49:00 +0000752#ifdef ENABLE_DEBUGGER_SUPPORT
753void MacroAssembler::SaveRegistersToMemory(RegList regs) {
754 ASSERT((regs & ~kJSCallerSaved) == 0);
755 // Copy the content of registers to memory location.
756 for (int i = 0; i < kNumJSCallerSaved; i++) {
757 int r = JSCallerSavedCode(i);
758 if ((regs & (1 << r)) != 0) {
759 Register reg = { r };
760 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
761 str(reg, MemOperand(ip));
762 }
763 }
764}
765
766
767void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
768 ASSERT((regs & ~kJSCallerSaved) == 0);
769 // Copy the content of memory location to registers.
770 for (int i = kNumJSCallerSaved; --i >= 0;) {
771 int r = JSCallerSavedCode(i);
772 if ((regs & (1 << r)) != 0) {
773 Register reg = { r };
774 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
775 ldr(reg, MemOperand(ip));
776 }
777 }
778}
779
780
781void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
782 RegList regs) {
783 ASSERT((regs & ~kJSCallerSaved) == 0);
784 // Copy the content of the memory location to the stack and adjust base.
785 for (int i = kNumJSCallerSaved; --i >= 0;) {
786 int r = JSCallerSavedCode(i);
787 if ((regs & (1 << r)) != 0) {
788 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
789 ldr(ip, MemOperand(ip));
790 str(ip, MemOperand(base, 4, NegPreIndex));
791 }
792 }
793}
794
795
796void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
797 Register scratch,
798 RegList regs) {
799 ASSERT((regs & ~kJSCallerSaved) == 0);
800 // Copy the content of the stack to the memory location and adjust base.
801 for (int i = 0; i < kNumJSCallerSaved; i++) {
802 int r = JSCallerSavedCode(i);
803 if ((regs & (1 << r)) != 0) {
804 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
805 ldr(scratch, MemOperand(base, 4, PostIndex));
806 str(scratch, MemOperand(ip));
807 }
808 }
809}
Andrei Popescu402d9372010-02-26 13:31:12 +0000810
811
812void MacroAssembler::DebugBreak() {
813 ASSERT(allow_stub_calls());
814 mov(r0, Operand(0));
815 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak)));
816 CEntryStub ces(1);
817 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
818}
Steve Blocka7e24c12009-10-30 11:49:00 +0000819#endif
820
821
822void MacroAssembler::PushTryHandler(CodeLocation try_location,
823 HandlerType type) {
824 // Adjust this code if not the case.
825 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
826 // The pc (return address) is passed in register lr.
827 if (try_location == IN_JAVASCRIPT) {
828 if (type == TRY_CATCH_HANDLER) {
829 mov(r3, Operand(StackHandler::TRY_CATCH));
830 } else {
831 mov(r3, Operand(StackHandler::TRY_FINALLY));
832 }
833 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
834 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
835 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
836 stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
837 // Save the current handler as the next handler.
838 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
839 ldr(r1, MemOperand(r3));
840 ASSERT(StackHandlerConstants::kNextOffset == 0);
841 push(r1);
842 // Link this handler as the new current one.
843 str(sp, MemOperand(r3));
844 } else {
845 // Must preserve r0-r4, r5-r7 are available.
846 ASSERT(try_location == IN_JS_ENTRY);
847 // The frame pointer does not point to a JS frame so we save NULL
848 // for fp. We expect the code throwing an exception to check fp
849 // before dereferencing it to restore the context.
850 mov(ip, Operand(0)); // To save a NULL frame pointer.
851 mov(r6, Operand(StackHandler::ENTRY));
852 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
853 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
854 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
855 stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
856 // Save the current handler as the next handler.
857 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
858 ldr(r6, MemOperand(r7));
859 ASSERT(StackHandlerConstants::kNextOffset == 0);
860 push(r6);
861 // Link this handler as the new current one.
862 str(sp, MemOperand(r7));
863 }
864}
865
866
Leon Clarkee46be812010-01-19 14:06:41 +0000867void MacroAssembler::PopTryHandler() {
868 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
869 pop(r1);
870 mov(ip, Operand(ExternalReference(Top::k_handler_address)));
871 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
872 str(r1, MemOperand(ip));
873}
874
875
Steve Blocka7e24c12009-10-30 11:49:00 +0000876Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
877 JSObject* holder, Register holder_reg,
878 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +0100879 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +0000880 Label* miss) {
881 // Make sure there's no overlap between scratch and the other
882 // registers.
883 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
884
885 // Keep track of the current object in register reg.
886 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +0100887 int depth = 0;
888
889 if (save_at_depth == depth) {
890 str(reg, MemOperand(sp));
891 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000892
893 // Check the maps in the prototype chain.
894 // Traverse the prototype chain from the object and do map checks.
895 while (object != holder) {
896 depth++;
897
898 // Only global objects and objects that do not require access
899 // checks are allowed in stubs.
900 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
901
902 // Get the map of the current object.
903 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
904 cmp(scratch, Operand(Handle<Map>(object->map())));
905
906 // Branch on the result of the map check.
907 b(ne, miss);
908
909 // Check access rights to the global object. This has to happen
910 // after the map check so that we know that the object is
911 // actually a global object.
912 if (object->IsJSGlobalProxy()) {
913 CheckAccessGlobalProxy(reg, scratch, miss);
914 // Restore scratch register to be the map of the object. In the
915 // new space case below, we load the prototype from the map in
916 // the scratch register.
917 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
918 }
919
920 reg = holder_reg; // from now the object is in holder_reg
921 JSObject* prototype = JSObject::cast(object->GetPrototype());
922 if (Heap::InNewSpace(prototype)) {
923 // The prototype is in new space; we cannot store a reference
924 // to it in the code. Load it from the map.
925 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
926 } else {
927 // The prototype is in old space; load it directly.
928 mov(reg, Operand(Handle<JSObject>(prototype)));
929 }
930
Steve Block6ded16b2010-05-10 14:33:55 +0100931 if (save_at_depth == depth) {
932 str(reg, MemOperand(sp));
933 }
934
Steve Blocka7e24c12009-10-30 11:49:00 +0000935 // Go to the next object in the prototype chain.
936 object = prototype;
937 }
938
939 // Check the holder map.
940 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
941 cmp(scratch, Operand(Handle<Map>(object->map())));
942 b(ne, miss);
943
944 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +0100945 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000946
947 // Perform security check for access to the global object and return
948 // the holder register.
949 ASSERT(object == holder);
950 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
951 if (object->IsJSGlobalProxy()) {
952 CheckAccessGlobalProxy(reg, scratch, miss);
953 }
954 return reg;
955}
956
957
958void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
959 Register scratch,
960 Label* miss) {
961 Label same_contexts;
962
963 ASSERT(!holder_reg.is(scratch));
964 ASSERT(!holder_reg.is(ip));
965 ASSERT(!scratch.is(ip));
966
967 // Load current lexical context from the stack frame.
968 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
969 // In debug mode, make sure the lexical context is set.
970#ifdef DEBUG
971 cmp(scratch, Operand(0));
972 Check(ne, "we should not have an empty lexical context");
973#endif
974
975 // Load the global context of the current context.
976 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
977 ldr(scratch, FieldMemOperand(scratch, offset));
978 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
979
980 // Check the context is a global context.
981 if (FLAG_debug_code) {
982 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
983 // Cannot use ip as a temporary in this verification code. Due to the fact
984 // that ip is clobbered as part of cmp with an object Operand.
985 push(holder_reg); // Temporarily save holder on the stack.
986 // Read the first word and compare to the global_context_map.
987 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
988 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
989 cmp(holder_reg, ip);
990 Check(eq, "JSGlobalObject::global_context should be a global context.");
991 pop(holder_reg); // Restore holder.
992 }
993
994 // Check if both contexts are the same.
995 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
996 cmp(scratch, Operand(ip));
997 b(eq, &same_contexts);
998
999 // Check the context is a global context.
1000 if (FLAG_debug_code) {
1001 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
1002 // Cannot use ip as a temporary in this verification code. Due to the fact
1003 // that ip is clobbered as part of cmp with an object Operand.
1004 push(holder_reg); // Temporarily save holder on the stack.
1005 mov(holder_reg, ip); // Move ip to its holding place.
1006 LoadRoot(ip, Heap::kNullValueRootIndex);
1007 cmp(holder_reg, ip);
1008 Check(ne, "JSGlobalProxy::context() should not be null.");
1009
1010 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
1011 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1012 cmp(holder_reg, ip);
1013 Check(eq, "JSGlobalObject::global_context should be a global context.");
1014 // Restore ip is not needed. ip is reloaded below.
1015 pop(holder_reg); // Restore holder.
1016 // Restore ip to holder's context.
1017 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
1018 }
1019
1020 // Check that the security token in the calling global object is
1021 // compatible with the security token in the receiving global
1022 // object.
1023 int token_offset = Context::kHeaderSize +
1024 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1025
1026 ldr(scratch, FieldMemOperand(scratch, token_offset));
1027 ldr(ip, FieldMemOperand(ip, token_offset));
1028 cmp(scratch, Operand(ip));
1029 b(ne, miss);
1030
1031 bind(&same_contexts);
1032}
1033
1034
1035void MacroAssembler::AllocateInNewSpace(int object_size,
1036 Register result,
1037 Register scratch1,
1038 Register scratch2,
1039 Label* gc_required,
1040 AllocationFlags flags) {
1041 ASSERT(!result.is(scratch1));
1042 ASSERT(!scratch1.is(scratch2));
1043
Kristian Monsen25f61362010-05-21 11:50:48 +01001044 // Make object size into bytes.
1045 if ((flags & SIZE_IN_WORDS) != 0) {
1046 object_size *= kPointerSize;
1047 }
1048 ASSERT_EQ(0, object_size & kObjectAlignmentMask);
1049
Steve Blocka7e24c12009-10-30 11:49:00 +00001050 // Load address of new object into result and allocation top address into
1051 // scratch1.
1052 ExternalReference new_space_allocation_top =
1053 ExternalReference::new_space_allocation_top_address();
1054 mov(scratch1, Operand(new_space_allocation_top));
1055 if ((flags & RESULT_CONTAINS_TOP) == 0) {
1056 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +00001057 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001058 // Assert that result actually contains top on entry. scratch2 is used
1059 // immediately below so this use of scratch2 does not cause difference with
1060 // respect to register content between debug and release mode.
1061 ldr(scratch2, MemOperand(scratch1));
1062 cmp(result, scratch2);
1063 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +00001064 }
1065
1066 // Calculate new top and bail out if new space is exhausted. Use result
1067 // to calculate the new top.
1068 ExternalReference new_space_allocation_limit =
1069 ExternalReference::new_space_allocation_limit_address();
1070 mov(scratch2, Operand(new_space_allocation_limit));
1071 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +01001072 add(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +00001073 cmp(result, Operand(scratch2));
1074 b(hi, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001075 str(result, MemOperand(scratch1));
1076
1077 // Tag and adjust back to start of new object.
1078 if ((flags & TAG_OBJECT) != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001079 sub(result, result, Operand(object_size - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001080 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01001081 sub(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +00001082 }
1083}
1084
1085
1086void MacroAssembler::AllocateInNewSpace(Register object_size,
1087 Register result,
1088 Register scratch1,
1089 Register scratch2,
1090 Label* gc_required,
1091 AllocationFlags flags) {
1092 ASSERT(!result.is(scratch1));
1093 ASSERT(!scratch1.is(scratch2));
1094
1095 // Load address of new object into result and allocation top address into
1096 // scratch1.
1097 ExternalReference new_space_allocation_top =
1098 ExternalReference::new_space_allocation_top_address();
1099 mov(scratch1, Operand(new_space_allocation_top));
1100 if ((flags & RESULT_CONTAINS_TOP) == 0) {
1101 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +00001102 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001103 // Assert that result actually contains top on entry. scratch2 is used
1104 // immediately below so this use of scratch2 does not cause difference with
1105 // respect to register content between debug and release mode.
1106 ldr(scratch2, MemOperand(scratch1));
1107 cmp(result, scratch2);
1108 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +00001109 }
1110
1111 // Calculate new top and bail out if new space is exhausted. Use result
1112 // to calculate the new top. Object size is in words so a shift is required to
1113 // get the number of bytes
1114 ExternalReference new_space_allocation_limit =
1115 ExternalReference::new_space_allocation_limit_address();
1116 mov(scratch2, Operand(new_space_allocation_limit));
1117 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +01001118 if ((flags & SIZE_IN_WORDS) != 0) {
1119 add(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1120 } else {
1121 add(result, result, Operand(object_size));
1122 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001123 cmp(result, Operand(scratch2));
1124 b(hi, gc_required);
1125
Steve Blockd0582a62009-12-15 09:54:21 +00001126 // Update allocation top. result temporarily holds the new top.
1127 if (FLAG_debug_code) {
1128 tst(result, Operand(kObjectAlignmentMask));
1129 Check(eq, "Unaligned allocation in new space");
1130 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001131 str(result, MemOperand(scratch1));
1132
1133 // Adjust back to start of new object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001134 if ((flags & SIZE_IN_WORDS) != 0) {
1135 sub(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1136 } else {
1137 sub(result, result, Operand(object_size));
1138 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001139
1140 // Tag object if requested.
1141 if ((flags & TAG_OBJECT) != 0) {
1142 add(result, result, Operand(kHeapObjectTag));
1143 }
1144}
1145
1146
1147void MacroAssembler::UndoAllocationInNewSpace(Register object,
1148 Register scratch) {
1149 ExternalReference new_space_allocation_top =
1150 ExternalReference::new_space_allocation_top_address();
1151
1152 // Make sure the object has no tag before resetting top.
1153 and_(object, object, Operand(~kHeapObjectTagMask));
1154#ifdef DEBUG
1155 // Check that the object un-allocated is below the current top.
1156 mov(scratch, Operand(new_space_allocation_top));
1157 ldr(scratch, MemOperand(scratch));
1158 cmp(object, scratch);
1159 Check(lt, "Undo allocation of non allocated memory");
1160#endif
1161 // Write the address of the object to un-allocate as the current top.
1162 mov(scratch, Operand(new_space_allocation_top));
1163 str(object, MemOperand(scratch));
1164}
1165
1166
Andrei Popescu31002712010-02-23 13:46:05 +00001167void MacroAssembler::AllocateTwoByteString(Register result,
1168 Register length,
1169 Register scratch1,
1170 Register scratch2,
1171 Register scratch3,
1172 Label* gc_required) {
1173 // Calculate the number of bytes needed for the characters in the string while
1174 // observing object alignment.
1175 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1176 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1177 add(scratch1, scratch1,
1178 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001179 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001180
1181 // Allocate two-byte string in new space.
1182 AllocateInNewSpace(scratch1,
1183 result,
1184 scratch2,
1185 scratch3,
1186 gc_required,
1187 TAG_OBJECT);
1188
1189 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001190 InitializeNewString(result,
1191 length,
1192 Heap::kStringMapRootIndex,
1193 scratch1,
1194 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001195}
1196
1197
1198void MacroAssembler::AllocateAsciiString(Register result,
1199 Register length,
1200 Register scratch1,
1201 Register scratch2,
1202 Register scratch3,
1203 Label* gc_required) {
1204 // Calculate the number of bytes needed for the characters in the string while
1205 // observing object alignment.
1206 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1207 ASSERT(kCharSize == 1);
1208 add(scratch1, length,
1209 Operand(kObjectAlignmentMask + SeqAsciiString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001210 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001211
1212 // Allocate ASCII string in new space.
1213 AllocateInNewSpace(scratch1,
1214 result,
1215 scratch2,
1216 scratch3,
1217 gc_required,
1218 TAG_OBJECT);
1219
1220 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001221 InitializeNewString(result,
1222 length,
1223 Heap::kAsciiStringMapRootIndex,
1224 scratch1,
1225 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001226}
1227
1228
1229void MacroAssembler::AllocateTwoByteConsString(Register result,
1230 Register length,
1231 Register scratch1,
1232 Register scratch2,
1233 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001234 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001235 result,
1236 scratch1,
1237 scratch2,
1238 gc_required,
1239 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001240
1241 InitializeNewString(result,
1242 length,
1243 Heap::kConsStringMapRootIndex,
1244 scratch1,
1245 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001246}
1247
1248
1249void MacroAssembler::AllocateAsciiConsString(Register result,
1250 Register length,
1251 Register scratch1,
1252 Register scratch2,
1253 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001254 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001255 result,
1256 scratch1,
1257 scratch2,
1258 gc_required,
1259 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001260
1261 InitializeNewString(result,
1262 length,
1263 Heap::kConsAsciiStringMapRootIndex,
1264 scratch1,
1265 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001266}
1267
1268
Steve Block6ded16b2010-05-10 14:33:55 +01001269void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00001270 Register map,
1271 Register type_reg,
1272 InstanceType type) {
Steve Block6ded16b2010-05-10 14:33:55 +01001273 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001274 CompareInstanceType(map, type_reg, type);
1275}
1276
1277
1278void MacroAssembler::CompareInstanceType(Register map,
1279 Register type_reg,
1280 InstanceType type) {
1281 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
1282 cmp(type_reg, Operand(type));
1283}
1284
1285
Andrei Popescu31002712010-02-23 13:46:05 +00001286void MacroAssembler::CheckMap(Register obj,
1287 Register scratch,
1288 Handle<Map> map,
1289 Label* fail,
1290 bool is_heap_object) {
1291 if (!is_heap_object) {
1292 BranchOnSmi(obj, fail);
1293 }
1294 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1295 mov(ip, Operand(map));
1296 cmp(scratch, ip);
1297 b(ne, fail);
1298}
1299
1300
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001301void MacroAssembler::CheckMap(Register obj,
1302 Register scratch,
1303 Heap::RootListIndex index,
1304 Label* fail,
1305 bool is_heap_object) {
1306 if (!is_heap_object) {
1307 BranchOnSmi(obj, fail);
1308 }
1309 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1310 LoadRoot(ip, index);
1311 cmp(scratch, ip);
1312 b(ne, fail);
1313}
1314
1315
Steve Blocka7e24c12009-10-30 11:49:00 +00001316void MacroAssembler::TryGetFunctionPrototype(Register function,
1317 Register result,
1318 Register scratch,
1319 Label* miss) {
1320 // Check that the receiver isn't a smi.
1321 BranchOnSmi(function, miss);
1322
1323 // Check that the function really is a function. Load map into result reg.
1324 CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
1325 b(ne, miss);
1326
1327 // Make sure that the function has an instance prototype.
1328 Label non_instance;
1329 ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
1330 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1331 b(ne, &non_instance);
1332
1333 // Get the prototype or initial map from the function.
1334 ldr(result,
1335 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1336
1337 // If the prototype or initial map is the hole, don't return it and
1338 // simply miss the cache instead. This will allow us to allocate a
1339 // prototype object on-demand in the runtime system.
1340 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1341 cmp(result, ip);
1342 b(eq, miss);
1343
1344 // If the function does not have an initial map, we're done.
1345 Label done;
1346 CompareObjectType(result, scratch, scratch, MAP_TYPE);
1347 b(ne, &done);
1348
1349 // Get the prototype from the initial map.
1350 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
1351 jmp(&done);
1352
1353 // Non-instance prototype: Fetch prototype from constructor field
1354 // in initial map.
1355 bind(&non_instance);
1356 ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1357
1358 // All done.
1359 bind(&done);
1360}
1361
1362
1363void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
1364 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1365 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1366}
1367
1368
Andrei Popescu31002712010-02-23 13:46:05 +00001369void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1370 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1371 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1372}
1373
1374
Steve Blocka7e24c12009-10-30 11:49:00 +00001375void MacroAssembler::StubReturn(int argc) {
1376 ASSERT(argc >= 1 && generating_stub());
Andrei Popescu31002712010-02-23 13:46:05 +00001377 if (argc > 1) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001378 add(sp, sp, Operand((argc - 1) * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00001379 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001380 Ret();
1381}
1382
1383
1384void MacroAssembler::IllegalOperation(int num_arguments) {
1385 if (num_arguments > 0) {
1386 add(sp, sp, Operand(num_arguments * kPointerSize));
1387 }
1388 LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1389}
1390
1391
Steve Blockd0582a62009-12-15 09:54:21 +00001392void MacroAssembler::IntegerToDoubleConversionWithVFP3(Register inReg,
1393 Register outHighReg,
1394 Register outLowReg) {
1395 // ARMv7 VFP3 instructions to implement integer to double conversion.
1396 mov(r7, Operand(inReg, ASR, kSmiTagSize));
Leon Clarkee46be812010-01-19 14:06:41 +00001397 vmov(s15, r7);
Steve Block6ded16b2010-05-10 14:33:55 +01001398 vcvt_f64_s32(d7, s15);
Leon Clarkee46be812010-01-19 14:06:41 +00001399 vmov(outLowReg, outHighReg, d7);
Steve Blockd0582a62009-12-15 09:54:21 +00001400}
1401
1402
Steve Block8defd9f2010-07-08 12:39:36 +01001403void MacroAssembler::ObjectToDoubleVFPRegister(Register object,
1404 DwVfpRegister result,
1405 Register scratch1,
1406 Register scratch2,
1407 Register heap_number_map,
1408 SwVfpRegister scratch3,
1409 Label* not_number,
1410 ObjectToDoubleFlags flags) {
1411 Label done;
1412 if ((flags & OBJECT_NOT_SMI) == 0) {
1413 Label not_smi;
1414 BranchOnNotSmi(object, &not_smi);
1415 // Remove smi tag and convert to double.
1416 mov(scratch1, Operand(object, ASR, kSmiTagSize));
1417 vmov(scratch3, scratch1);
1418 vcvt_f64_s32(result, scratch3);
1419 b(&done);
1420 bind(&not_smi);
1421 }
1422 // Check for heap number and load double value from it.
1423 ldr(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
1424 sub(scratch2, object, Operand(kHeapObjectTag));
1425 cmp(scratch1, heap_number_map);
1426 b(ne, not_number);
1427 if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
1428 // If exponent is all ones the number is either a NaN or +/-Infinity.
1429 ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
1430 Sbfx(scratch1,
1431 scratch1,
1432 HeapNumber::kExponentShift,
1433 HeapNumber::kExponentBits);
1434 // All-one value sign extend to -1.
1435 cmp(scratch1, Operand(-1));
1436 b(eq, not_number);
1437 }
1438 vldr(result, scratch2, HeapNumber::kValueOffset);
1439 bind(&done);
1440}
1441
1442
1443void MacroAssembler::SmiToDoubleVFPRegister(Register smi,
1444 DwVfpRegister value,
1445 Register scratch1,
1446 SwVfpRegister scratch2) {
1447 mov(scratch1, Operand(smi, ASR, kSmiTagSize));
1448 vmov(scratch2, scratch1);
1449 vcvt_f64_s32(value, scratch2);
1450}
1451
1452
Andrei Popescu31002712010-02-23 13:46:05 +00001453void MacroAssembler::GetLeastBitsFromSmi(Register dst,
1454 Register src,
1455 int num_least_bits) {
1456 if (CpuFeatures::IsSupported(ARMv7)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001457 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00001458 } else {
1459 mov(dst, Operand(src, ASR, kSmiTagSize));
1460 and_(dst, dst, Operand((1 << num_least_bits) - 1));
1461 }
1462}
1463
1464
Steve Blocka7e24c12009-10-30 11:49:00 +00001465void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1466 // All parameters are on the stack. r0 has the return value after call.
1467
1468 // If the expected number of arguments of the runtime function is
1469 // constant, we check that the actual number of arguments match the
1470 // expectation.
1471 if (f->nargs >= 0 && f->nargs != num_arguments) {
1472 IllegalOperation(num_arguments);
1473 return;
1474 }
1475
Leon Clarke4515c472010-02-03 11:58:03 +00001476 // TODO(1236192): Most runtime routines don't need the number of
1477 // arguments passed in because it is constant. At some point we
1478 // should remove this need and make the runtime routine entry code
1479 // smarter.
1480 mov(r0, Operand(num_arguments));
1481 mov(r1, Operand(ExternalReference(f)));
1482 CEntryStub stub(1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001483 CallStub(&stub);
1484}
1485
1486
1487void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
1488 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1489}
1490
1491
Andrei Popescu402d9372010-02-26 13:31:12 +00001492void MacroAssembler::CallExternalReference(const ExternalReference& ext,
1493 int num_arguments) {
1494 mov(r0, Operand(num_arguments));
1495 mov(r1, Operand(ext));
1496
1497 CEntryStub stub(1);
1498 CallStub(&stub);
1499}
1500
1501
Steve Block6ded16b2010-05-10 14:33:55 +01001502void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1503 int num_arguments,
1504 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001505 // TODO(1236192): Most runtime routines don't need the number of
1506 // arguments passed in because it is constant. At some point we
1507 // should remove this need and make the runtime routine entry code
1508 // smarter.
1509 mov(r0, Operand(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001510 JumpToExternalReference(ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001511}
1512
1513
Steve Block6ded16b2010-05-10 14:33:55 +01001514void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1515 int num_arguments,
1516 int result_size) {
1517 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
1518}
1519
1520
1521void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001522#if defined(__thumb__)
1523 // Thumb mode builtin.
1524 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
1525#endif
1526 mov(r1, Operand(builtin));
1527 CEntryStub stub(1);
1528 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1529}
1530
1531
Steve Blocka7e24c12009-10-30 11:49:00 +00001532void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1533 InvokeJSFlags flags) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001534 GetBuiltinEntry(r2, id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001535 if (flags == CALL_JS) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001536 Call(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001537 } else {
1538 ASSERT(flags == JUMP_JS);
Andrei Popescu402d9372010-02-26 13:31:12 +00001539 Jump(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001540 }
1541}
1542
1543
1544void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001545 ASSERT(!target.is(r1));
1546
1547 // Load the builtins object into target register.
1548 ldr(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
1549 ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
1550
Andrei Popescu402d9372010-02-26 13:31:12 +00001551 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001552 ldr(r1, FieldMemOperand(target,
1553 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1554
1555 // Load the code entry point from the builtins object.
1556 ldr(target, FieldMemOperand(target,
1557 JSBuiltinsObject::OffsetOfCodeWithId(id)));
1558 if (FLAG_debug_code) {
1559 // Make sure the code objects in the builtins object and in the
1560 // builtin function are the same.
1561 push(r1);
1562 ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1563 ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCodeOffset));
1564 cmp(r1, target);
1565 Assert(eq, "Builtin code object changed");
1566 pop(r1);
1567 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001568 add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
1569}
1570
1571
1572void MacroAssembler::SetCounter(StatsCounter* counter, int value,
1573 Register scratch1, Register scratch2) {
1574 if (FLAG_native_code_counters && counter->Enabled()) {
1575 mov(scratch1, Operand(value));
1576 mov(scratch2, Operand(ExternalReference(counter)));
1577 str(scratch1, MemOperand(scratch2));
1578 }
1579}
1580
1581
1582void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
1583 Register scratch1, Register scratch2) {
1584 ASSERT(value > 0);
1585 if (FLAG_native_code_counters && counter->Enabled()) {
1586 mov(scratch2, Operand(ExternalReference(counter)));
1587 ldr(scratch1, MemOperand(scratch2));
1588 add(scratch1, scratch1, Operand(value));
1589 str(scratch1, MemOperand(scratch2));
1590 }
1591}
1592
1593
1594void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
1595 Register scratch1, Register scratch2) {
1596 ASSERT(value > 0);
1597 if (FLAG_native_code_counters && counter->Enabled()) {
1598 mov(scratch2, Operand(ExternalReference(counter)));
1599 ldr(scratch1, MemOperand(scratch2));
1600 sub(scratch1, scratch1, Operand(value));
1601 str(scratch1, MemOperand(scratch2));
1602 }
1603}
1604
1605
1606void MacroAssembler::Assert(Condition cc, const char* msg) {
1607 if (FLAG_debug_code)
1608 Check(cc, msg);
1609}
1610
1611
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001612void MacroAssembler::AssertRegisterIsRoot(Register reg,
1613 Heap::RootListIndex index) {
1614 if (FLAG_debug_code) {
1615 LoadRoot(ip, index);
1616 cmp(reg, ip);
1617 Check(eq, "Register did not match expected root");
1618 }
1619}
1620
1621
Steve Blocka7e24c12009-10-30 11:49:00 +00001622void MacroAssembler::Check(Condition cc, const char* msg) {
1623 Label L;
1624 b(cc, &L);
1625 Abort(msg);
1626 // will not return here
1627 bind(&L);
1628}
1629
1630
1631void MacroAssembler::Abort(const char* msg) {
Steve Block8defd9f2010-07-08 12:39:36 +01001632 Label abort_start;
1633 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00001634 // We want to pass the msg string like a smi to avoid GC
1635 // problems, however msg is not guaranteed to be aligned
1636 // properly. Instead, we pass an aligned pointer that is
1637 // a proper v8 smi, but also pass the alignment difference
1638 // from the real pointer as a smi.
1639 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1640 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1641 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1642#ifdef DEBUG
1643 if (msg != NULL) {
1644 RecordComment("Abort message: ");
1645 RecordComment(msg);
1646 }
1647#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001648 // Disable stub call restrictions to always allow calls to abort.
1649 set_allow_stub_calls(true);
1650
Steve Blocka7e24c12009-10-30 11:49:00 +00001651 mov(r0, Operand(p0));
1652 push(r0);
1653 mov(r0, Operand(Smi::FromInt(p1 - p0)));
1654 push(r0);
1655 CallRuntime(Runtime::kAbort, 2);
1656 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01001657 if (is_const_pool_blocked()) {
1658 // If the calling code cares about the exact number of
1659 // instructions generated, we insert padding here to keep the size
1660 // of the Abort macro constant.
1661 static const int kExpectedAbortInstructions = 10;
1662 int abort_instructions = InstructionsGeneratedSince(&abort_start);
1663 ASSERT(abort_instructions <= kExpectedAbortInstructions);
1664 while (abort_instructions++ < kExpectedAbortInstructions) {
1665 nop();
1666 }
1667 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001668}
1669
1670
Steve Blockd0582a62009-12-15 09:54:21 +00001671void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1672 if (context_chain_length > 0) {
1673 // Move up the chain of contexts to the context containing the slot.
1674 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::CLOSURE_INDEX)));
1675 // Load the function context (which is the incoming, outer context).
1676 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1677 for (int i = 1; i < context_chain_length; i++) {
1678 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1679 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1680 }
1681 // The context may be an intermediate context, not a function context.
1682 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1683 } else { // Slot is in the current function context.
1684 // The context may be an intermediate context, not a function context.
1685 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1686 }
1687}
1688
1689
Andrei Popescu31002712010-02-23 13:46:05 +00001690void MacroAssembler::JumpIfNotBothSmi(Register reg1,
1691 Register reg2,
1692 Label* on_not_both_smi) {
1693 ASSERT_EQ(0, kSmiTag);
1694 tst(reg1, Operand(kSmiTagMask));
1695 tst(reg2, Operand(kSmiTagMask), eq);
1696 b(ne, on_not_both_smi);
1697}
1698
1699
1700void MacroAssembler::JumpIfEitherSmi(Register reg1,
1701 Register reg2,
1702 Label* on_either_smi) {
1703 ASSERT_EQ(0, kSmiTag);
1704 tst(reg1, Operand(kSmiTagMask));
1705 tst(reg2, Operand(kSmiTagMask), ne);
1706 b(eq, on_either_smi);
1707}
1708
1709
Leon Clarked91b9f72010-01-27 17:25:45 +00001710void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
1711 Register first,
1712 Register second,
1713 Register scratch1,
1714 Register scratch2,
1715 Label* failure) {
1716 // Test that both first and second are sequential ASCII strings.
1717 // Assume that they are non-smis.
1718 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
1719 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
1720 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
1721 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001722
1723 JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
1724 scratch2,
1725 scratch1,
1726 scratch2,
1727 failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00001728}
1729
1730void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
1731 Register second,
1732 Register scratch1,
1733 Register scratch2,
1734 Label* failure) {
1735 // Check that neither is a smi.
1736 ASSERT_EQ(0, kSmiTag);
1737 and_(scratch1, first, Operand(second));
1738 tst(scratch1, Operand(kSmiTagMask));
1739 b(eq, failure);
1740 JumpIfNonSmisNotBothSequentialAsciiStrings(first,
1741 second,
1742 scratch1,
1743 scratch2,
1744 failure);
1745}
1746
Steve Blockd0582a62009-12-15 09:54:21 +00001747
Steve Block6ded16b2010-05-10 14:33:55 +01001748// Allocates a heap number or jumps to the need_gc label if the young space
1749// is full and a scavenge is needed.
1750void MacroAssembler::AllocateHeapNumber(Register result,
1751 Register scratch1,
1752 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001753 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +01001754 Label* gc_required) {
1755 // Allocate an object in the heap for the heap number and tag it as a heap
1756 // object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001757 AllocateInNewSpace(HeapNumber::kSize,
Steve Block6ded16b2010-05-10 14:33:55 +01001758 result,
1759 scratch1,
1760 scratch2,
1761 gc_required,
1762 TAG_OBJECT);
1763
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001764 // Store heap number map in the allocated object.
1765 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1766 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001767}
1768
1769
Steve Block8defd9f2010-07-08 12:39:36 +01001770void MacroAssembler::AllocateHeapNumberWithValue(Register result,
1771 DwVfpRegister value,
1772 Register scratch1,
1773 Register scratch2,
1774 Register heap_number_map,
1775 Label* gc_required) {
1776 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
1777 sub(scratch1, result, Operand(kHeapObjectTag));
1778 vstr(value, scratch1, HeapNumber::kValueOffset);
1779}
1780
1781
1782void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
1783 Register source, // Input.
1784 Register scratch) {
1785 ASSERT(!zeros.is(source) || !source.is(zeros));
1786 ASSERT(!zeros.is(scratch));
1787 ASSERT(!scratch.is(ip));
1788 ASSERT(!source.is(ip));
1789 ASSERT(!zeros.is(ip));
Steve Block6ded16b2010-05-10 14:33:55 +01001790#ifdef CAN_USE_ARMV5_INSTRUCTIONS
1791 clz(zeros, source); // This instruction is only supported after ARM5.
1792#else
1793 mov(zeros, Operand(0));
Steve Block8defd9f2010-07-08 12:39:36 +01001794 Move(scratch, source);
Steve Block6ded16b2010-05-10 14:33:55 +01001795 // Top 16.
1796 tst(scratch, Operand(0xffff0000));
1797 add(zeros, zeros, Operand(16), LeaveCC, eq);
1798 mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq);
1799 // Top 8.
1800 tst(scratch, Operand(0xff000000));
1801 add(zeros, zeros, Operand(8), LeaveCC, eq);
1802 mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq);
1803 // Top 4.
1804 tst(scratch, Operand(0xf0000000));
1805 add(zeros, zeros, Operand(4), LeaveCC, eq);
1806 mov(scratch, Operand(scratch, LSL, 4), LeaveCC, eq);
1807 // Top 2.
1808 tst(scratch, Operand(0xc0000000));
1809 add(zeros, zeros, Operand(2), LeaveCC, eq);
1810 mov(scratch, Operand(scratch, LSL, 2), LeaveCC, eq);
1811 // Top bit.
1812 tst(scratch, Operand(0x80000000u));
1813 add(zeros, zeros, Operand(1), LeaveCC, eq);
1814#endif
1815}
1816
1817
1818void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1819 Register first,
1820 Register second,
1821 Register scratch1,
1822 Register scratch2,
1823 Label* failure) {
1824 int kFlatAsciiStringMask =
1825 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1826 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1827 and_(scratch1, first, Operand(kFlatAsciiStringMask));
1828 and_(scratch2, second, Operand(kFlatAsciiStringMask));
1829 cmp(scratch1, Operand(kFlatAsciiStringTag));
1830 // Ignore second test if first test failed.
1831 cmp(scratch2, Operand(kFlatAsciiStringTag), eq);
1832 b(ne, failure);
1833}
1834
1835
1836void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1837 Register scratch,
1838 Label* failure) {
1839 int kFlatAsciiStringMask =
1840 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1841 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1842 and_(scratch, type, Operand(kFlatAsciiStringMask));
1843 cmp(scratch, Operand(kFlatAsciiStringTag));
1844 b(ne, failure);
1845}
1846
1847
1848void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1849 int frame_alignment = ActivationFrameAlignment();
1850 // Up to four simple arguments are passed in registers r0..r3.
1851 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1852 if (frame_alignment > kPointerSize) {
1853 // Make stack end at alignment and make room for num_arguments - 4 words
1854 // and the original value of sp.
1855 mov(scratch, sp);
1856 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
1857 ASSERT(IsPowerOf2(frame_alignment));
1858 and_(sp, sp, Operand(-frame_alignment));
1859 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
1860 } else {
1861 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
1862 }
1863}
1864
1865
1866void MacroAssembler::CallCFunction(ExternalReference function,
1867 int num_arguments) {
1868 mov(ip, Operand(function));
1869 CallCFunction(ip, num_arguments);
1870}
1871
1872
1873void MacroAssembler::CallCFunction(Register function, int num_arguments) {
1874 // Make sure that the stack is aligned before calling a C function unless
1875 // running in the simulator. The simulator has its own alignment check which
1876 // provides more information.
1877#if defined(V8_HOST_ARCH_ARM)
1878 if (FLAG_debug_code) {
1879 int frame_alignment = OS::ActivationFrameAlignment();
1880 int frame_alignment_mask = frame_alignment - 1;
1881 if (frame_alignment > kPointerSize) {
1882 ASSERT(IsPowerOf2(frame_alignment));
1883 Label alignment_as_expected;
1884 tst(sp, Operand(frame_alignment_mask));
1885 b(eq, &alignment_as_expected);
1886 // Don't use Check here, as it will call Runtime_Abort possibly
1887 // re-entering here.
1888 stop("Unexpected alignment");
1889 bind(&alignment_as_expected);
1890 }
1891 }
1892#endif
1893
1894 // Just call directly. The function called cannot cause a GC, or
1895 // allow preemption, so the return address in the link register
1896 // stays correct.
1897 Call(function);
1898 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1899 if (OS::ActivationFrameAlignment() > kPointerSize) {
1900 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
1901 } else {
1902 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
1903 }
1904}
1905
1906
Steve Blocka7e24c12009-10-30 11:49:00 +00001907#ifdef ENABLE_DEBUGGER_SUPPORT
1908CodePatcher::CodePatcher(byte* address, int instructions)
1909 : address_(address),
1910 instructions_(instructions),
1911 size_(instructions * Assembler::kInstrSize),
1912 masm_(address, size_ + Assembler::kGap) {
1913 // Create a new macro assembler pointing to the address of the code to patch.
1914 // The size is adjusted with kGap on order for the assembler to generate size
1915 // bytes of instructions without failing with buffer size constraints.
1916 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1917}
1918
1919
1920CodePatcher::~CodePatcher() {
1921 // Indicate that code has changed.
1922 CPU::FlushICache(address_, size_);
1923
1924 // Check that the code was patched as expected.
1925 ASSERT(masm_.pc_ == address_ + size_);
1926 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1927}
1928
1929
1930void CodePatcher::Emit(Instr x) {
1931 masm()->emit(x);
1932}
1933
1934
1935void CodePatcher::Emit(Address addr) {
1936 masm()->emit(reinterpret_cast<Instr>(addr));
1937}
1938#endif // ENABLE_DEBUGGER_SUPPORT
1939
1940
1941} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001942
1943#endif // V8_TARGET_ARCH_ARM