blob: 2058ee2881a9b534e92d2d675d924010feea5ec6 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
41 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000042 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
45}
46
47
48// We always generate arm code, never thumb code, even if V8 is compiled to
49// thumb, so we require inter-working support
50#if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
51#error "flag -mthumb-interwork missing"
52#endif
53
54
55// We do not support thumb inter-working with an arm architecture not supporting
56// the blx instruction (below v5t). If you know what CPU you are compiling for
57// you can use -march=armv7 or similar.
58#if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
59# error "For thumb inter-working we require an architecture which supports blx"
60#endif
61
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063// Using bx does not yield better code, so use it only when required
64#if defined(USE_THUMB_INTERWORK)
65#define USE_BX 1
66#endif
67
68
69void MacroAssembler::Jump(Register target, Condition cond) {
70#if USE_BX
71 bx(target, cond);
72#else
73 mov(pc, Operand(target), LeaveCC, cond);
74#endif
75}
76
77
78void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
79 Condition cond) {
80#if USE_BX
81 mov(ip, Operand(target, rmode), LeaveCC, cond);
82 bx(ip, cond);
83#else
84 mov(pc, Operand(target, rmode), LeaveCC, cond);
85#endif
86}
87
88
89void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
90 Condition cond) {
91 ASSERT(!RelocInfo::IsCodeTarget(rmode));
92 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
93}
94
95
96void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
97 Condition cond) {
98 ASSERT(RelocInfo::IsCodeTarget(rmode));
99 // 'code' is always generated ARM code, never THUMB code
100 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
101}
102
103
104void MacroAssembler::Call(Register target, Condition cond) {
105#if USE_BLX
106 blx(target, cond);
107#else
108 // set lr for return at current pc + 8
109 mov(lr, Operand(pc), LeaveCC, cond);
110 mov(pc, Operand(target), LeaveCC, cond);
111#endif
112}
113
114
115void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
116 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100117#if USE_BLX
118 // On ARMv5 and after the recommended call sequence is:
119 // ldr ip, [pc, #...]
120 // blx ip
121
122 // The two instructions (ldr and blx) could be separated by a constant
123 // pool and the code would still work. The issue comes from the
124 // patching code which expect the ldr to be just above the blx.
125 { BlockConstPoolScope block_const_pool(this);
126 // Statement positions are expected to be recorded when the target
127 // address is loaded. The mov method will automatically record
128 // positions when pc is the target, since this is not the case here
129 // we have to do it explicitly.
130 WriteRecordedPositions();
131
132 mov(ip, Operand(target, rmode), LeaveCC, cond);
133 blx(ip, cond);
134 }
135
136 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
137#else
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 // Set lr for return at current pc + 8.
139 mov(lr, Operand(pc), LeaveCC, cond);
140 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
141 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Steve Blocka7e24c12009-10-30 11:49:00 +0000143 ASSERT(kCallTargetAddressOffset == kInstrSize);
Steve Block6ded16b2010-05-10 14:33:55 +0100144#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000145}
146
147
148void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
149 Condition cond) {
150 ASSERT(!RelocInfo::IsCodeTarget(rmode));
151 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
152}
153
154
155void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
156 Condition cond) {
157 ASSERT(RelocInfo::IsCodeTarget(rmode));
158 // 'code' is always generated ARM code, never THUMB code
159 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
160}
161
162
163void MacroAssembler::Ret(Condition cond) {
164#if USE_BX
165 bx(lr, cond);
166#else
167 mov(pc, Operand(lr), LeaveCC, cond);
168#endif
169}
170
171
Steve Blockd0582a62009-12-15 09:54:21 +0000172void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
173 LoadRoot(ip, Heap::kStackLimitRootIndex);
174 cmp(sp, Operand(ip));
175 b(lo, on_stack_overflow);
176}
177
178
Leon Clarkee46be812010-01-19 14:06:41 +0000179void MacroAssembler::Drop(int count, Condition cond) {
180 if (count > 0) {
181 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
182 }
183}
184
185
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100186void MacroAssembler::Swap(Register reg1,
187 Register reg2,
188 Register scratch,
189 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100190 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100191 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
192 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
193 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100194 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100195 mov(scratch, reg1, LeaveCC, cond);
196 mov(reg1, reg2, LeaveCC, cond);
197 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100198 }
199}
200
201
Leon Clarkee46be812010-01-19 14:06:41 +0000202void MacroAssembler::Call(Label* target) {
203 bl(target);
204}
205
206
207void MacroAssembler::Move(Register dst, Handle<Object> value) {
208 mov(dst, Operand(value));
209}
Steve Blockd0582a62009-12-15 09:54:21 +0000210
211
Steve Block6ded16b2010-05-10 14:33:55 +0100212void MacroAssembler::Move(Register dst, Register src) {
213 if (!dst.is(src)) {
214 mov(dst, src);
215 }
216}
217
218
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100219void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
220 Condition cond) {
221 if (!CpuFeatures::IsSupported(ARMv7) || src2.is_single_instruction()) {
222 and_(dst, src1, src2, LeaveCC, cond);
223 return;
224 }
225 int32_t immediate = src2.immediate();
226 if (immediate == 0) {
227 mov(dst, Operand(0), LeaveCC, cond);
228 return;
229 }
230 if (IsPowerOf2(immediate + 1) && ((immediate & 1) != 0)) {
231 ubfx(dst, src1, 0, WhichPowerOf2(immediate + 1), cond);
232 return;
233 }
234 and_(dst, src1, src2, LeaveCC, cond);
235}
236
237
238void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
239 Condition cond) {
240 ASSERT(lsb < 32);
241 if (!CpuFeatures::IsSupported(ARMv7)) {
242 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
243 and_(dst, src1, Operand(mask), LeaveCC, cond);
244 if (lsb != 0) {
245 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
246 }
247 } else {
248 ubfx(dst, src1, lsb, width, cond);
249 }
250}
251
252
253void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
254 Condition cond) {
255 ASSERT(lsb < 32);
256 if (!CpuFeatures::IsSupported(ARMv7)) {
257 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
258 and_(dst, src1, Operand(mask), LeaveCC, cond);
259 int shift_up = 32 - lsb - width;
260 int shift_down = lsb + shift_up;
261 if (shift_up != 0) {
262 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
263 }
264 if (shift_down != 0) {
265 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
266 }
267 } else {
268 sbfx(dst, src1, lsb, width, cond);
269 }
270}
271
272
273void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
274 ASSERT(lsb < 32);
275 if (!CpuFeatures::IsSupported(ARMv7)) {
276 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
277 bic(dst, dst, Operand(mask));
278 } else {
279 bfc(dst, lsb, width, cond);
280 }
281}
282
283
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100284void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
285 Condition cond) {
286 if (!CpuFeatures::IsSupported(ARMv7)) {
287 ASSERT(!dst.is(pc) && !src.rm().is(pc));
288 ASSERT((satpos >= 0) && (satpos <= 31));
289
290 // These asserts are required to ensure compatibility with the ARMv7
291 // implementation.
292 ASSERT((src.shift_op() == ASR) || (src.shift_op() == LSL));
293 ASSERT(src.rs().is(no_reg));
294
295 Label done;
296 int satval = (1 << satpos) - 1;
297
298 if (cond != al) {
299 b(NegateCondition(cond), &done); // Skip saturate if !condition.
300 }
301 if (!(src.is_reg() && dst.is(src.rm()))) {
302 mov(dst, src);
303 }
304 tst(dst, Operand(~satval));
305 b(eq, &done);
306 mov(dst, Operand(0), LeaveCC, mi); // 0 if negative.
307 mov(dst, Operand(satval), LeaveCC, pl); // satval if positive.
308 bind(&done);
309 } else {
310 usat(dst, satpos, src, cond);
311 }
312}
313
314
Steve Blocka7e24c12009-10-30 11:49:00 +0000315void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
316 // Empty the const pool.
317 CheckConstPool(true, true);
318 add(pc, pc, Operand(index,
319 LSL,
320 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize));
321 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * kInstrSize);
322 nop(); // Jump table alignment.
323 for (int i = 0; i < targets.length(); i++) {
324 b(targets[i]);
325 }
326}
327
328
329void MacroAssembler::LoadRoot(Register destination,
330 Heap::RootListIndex index,
331 Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +0000332 ldr(destination, MemOperand(roots, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000333}
334
335
Kristian Monsen25f61362010-05-21 11:50:48 +0100336void MacroAssembler::StoreRoot(Register source,
337 Heap::RootListIndex index,
338 Condition cond) {
339 str(source, MemOperand(roots, index << kPointerSizeLog2), cond);
340}
341
342
Steve Block6ded16b2010-05-10 14:33:55 +0100343void MacroAssembler::RecordWriteHelper(Register object,
Steve Block8defd9f2010-07-08 12:39:36 +0100344 Register address,
345 Register scratch) {
Steve Block6ded16b2010-05-10 14:33:55 +0100346 if (FLAG_debug_code) {
347 // Check that the object is not in new space.
348 Label not_in_new_space;
Steve Block8defd9f2010-07-08 12:39:36 +0100349 InNewSpace(object, scratch, ne, &not_in_new_space);
Steve Block6ded16b2010-05-10 14:33:55 +0100350 Abort("new-space object passed to RecordWriteHelper");
351 bind(&not_in_new_space);
352 }
Leon Clarke4515c472010-02-03 11:58:03 +0000353
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100354 // Calculate page address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100355 Bfc(object, 0, kPageSizeBits);
356
357 // Calculate region number.
Steve Block8defd9f2010-07-08 12:39:36 +0100358 Ubfx(address, address, Page::kRegionSizeLog2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100359 kPageSizeBits - Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000360
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100361 // Mark region dirty.
Steve Block8defd9f2010-07-08 12:39:36 +0100362 ldr(scratch, MemOperand(object, Page::kDirtyFlagOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000363 mov(ip, Operand(1));
Steve Block8defd9f2010-07-08 12:39:36 +0100364 orr(scratch, scratch, Operand(ip, LSL, address));
365 str(scratch, MemOperand(object, Page::kDirtyFlagOffset));
Steve Block6ded16b2010-05-10 14:33:55 +0100366}
367
368
369void MacroAssembler::InNewSpace(Register object,
370 Register scratch,
371 Condition cc,
372 Label* branch) {
373 ASSERT(cc == eq || cc == ne);
374 and_(scratch, object, Operand(ExternalReference::new_space_mask()));
375 cmp(scratch, Operand(ExternalReference::new_space_start()));
376 b(cc, branch);
377}
378
379
380// Will clobber 4 registers: object, offset, scratch, ip. The
381// register 'object' contains a heap object pointer. The heap object
382// tag is shifted away.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100383void MacroAssembler::RecordWrite(Register object,
384 Operand offset,
385 Register scratch0,
386 Register scratch1) {
Steve Block6ded16b2010-05-10 14:33:55 +0100387 // The compiled code assumes that record write doesn't change the
388 // context register, so we check that none of the clobbered
389 // registers are cp.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100390 ASSERT(!object.is(cp) && !scratch0.is(cp) && !scratch1.is(cp));
Steve Block6ded16b2010-05-10 14:33:55 +0100391
392 Label done;
393
394 // First, test that the object is not in the new space. We cannot set
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100395 // region marks for new space pages.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100396 InNewSpace(object, scratch0, eq, &done);
Steve Block6ded16b2010-05-10 14:33:55 +0100397
Steve Block8defd9f2010-07-08 12:39:36 +0100398 // Add offset into the object.
399 add(scratch0, object, offset);
400
Steve Block6ded16b2010-05-10 14:33:55 +0100401 // Record the actual write.
Steve Block8defd9f2010-07-08 12:39:36 +0100402 RecordWriteHelper(object, scratch0, scratch1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000403
404 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000405
406 // Clobber all input registers when running with the debug-code flag
407 // turned on to provoke errors.
408 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100409 mov(object, Operand(BitCast<int32_t>(kZapValue)));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100410 mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
411 mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000412 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000413}
414
415
Steve Block8defd9f2010-07-08 12:39:36 +0100416// Will clobber 4 registers: object, address, scratch, ip. The
417// register 'object' contains a heap object pointer. The heap object
418// tag is shifted away.
419void MacroAssembler::RecordWrite(Register object,
420 Register address,
421 Register scratch) {
422 // The compiled code assumes that record write doesn't change the
423 // context register, so we check that none of the clobbered
424 // registers are cp.
425 ASSERT(!object.is(cp) && !address.is(cp) && !scratch.is(cp));
426
427 Label done;
428
429 // First, test that the object is not in the new space. We cannot set
430 // region marks for new space pages.
431 InNewSpace(object, scratch, eq, &done);
432
433 // Record the actual write.
434 RecordWriteHelper(object, address, scratch);
435
436 bind(&done);
437
438 // Clobber all input registers when running with the debug-code flag
439 // turned on to provoke errors.
440 if (FLAG_debug_code) {
441 mov(object, Operand(BitCast<int32_t>(kZapValue)));
442 mov(address, Operand(BitCast<int32_t>(kZapValue)));
443 mov(scratch, Operand(BitCast<int32_t>(kZapValue)));
444 }
445}
446
447
Leon Clarkef7060e22010-06-03 12:02:55 +0100448void MacroAssembler::Ldrd(Register dst1, Register dst2,
449 const MemOperand& src, Condition cond) {
450 ASSERT(src.rm().is(no_reg));
451 ASSERT(!dst1.is(lr)); // r14.
452 ASSERT_EQ(0, dst1.code() % 2);
453 ASSERT_EQ(dst1.code() + 1, dst2.code());
454
455 // Generate two ldr instructions if ldrd is not available.
456 if (CpuFeatures::IsSupported(ARMv7)) {
457 CpuFeatures::Scope scope(ARMv7);
458 ldrd(dst1, dst2, src, cond);
459 } else {
460 MemOperand src2(src);
461 src2.set_offset(src2.offset() + 4);
462 if (dst1.is(src.rn())) {
463 ldr(dst2, src2, cond);
464 ldr(dst1, src, cond);
465 } else {
466 ldr(dst1, src, cond);
467 ldr(dst2, src2, cond);
468 }
469 }
470}
471
472
473void MacroAssembler::Strd(Register src1, Register src2,
474 const MemOperand& dst, Condition cond) {
475 ASSERT(dst.rm().is(no_reg));
476 ASSERT(!src1.is(lr)); // r14.
477 ASSERT_EQ(0, src1.code() % 2);
478 ASSERT_EQ(src1.code() + 1, src2.code());
479
480 // Generate two str instructions if strd is not available.
481 if (CpuFeatures::IsSupported(ARMv7)) {
482 CpuFeatures::Scope scope(ARMv7);
483 strd(src1, src2, dst, cond);
484 } else {
485 MemOperand dst2(dst);
486 dst2.set_offset(dst2.offset() + 4);
487 str(src1, dst, cond);
488 str(src2, dst2, cond);
489 }
490}
491
492
Steve Blocka7e24c12009-10-30 11:49:00 +0000493void MacroAssembler::EnterFrame(StackFrame::Type type) {
494 // r0-r3: preserved
495 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
496 mov(ip, Operand(Smi::FromInt(type)));
497 push(ip);
498 mov(ip, Operand(CodeObject()));
499 push(ip);
500 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
501}
502
503
504void MacroAssembler::LeaveFrame(StackFrame::Type type) {
505 // r0: preserved
506 // r1: preserved
507 // r2: preserved
508
509 // Drop the execution stack down to the frame pointer and restore
510 // the caller frame pointer and return address.
511 mov(sp, fp);
512 ldm(ia_w, sp, fp.bit() | lr.bit());
513}
514
515
Steve Blockd0582a62009-12-15 09:54:21 +0000516void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000517 // Compute the argv pointer and keep it in a callee-saved register.
518 // r0 is argc.
519 add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
520 sub(r6, r6, Operand(kPointerSize));
521
522 // Compute callee's stack pointer before making changes and save it as
523 // ip register so that it is restored as sp register on exit, thereby
524 // popping the args.
525
526 // ip = sp + kPointerSize * #args;
527 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
528
Steve Block6ded16b2010-05-10 14:33:55 +0100529 // Prepare the stack to be aligned when calling into C. After this point there
530 // are 5 pushes before the call into C, so the stack needs to be aligned after
531 // 5 pushes.
532 int frame_alignment = ActivationFrameAlignment();
533 int frame_alignment_mask = frame_alignment - 1;
534 if (frame_alignment != kPointerSize) {
535 // The following code needs to be more general if this assert does not hold.
536 ASSERT(frame_alignment == 2 * kPointerSize);
537 // With 5 pushes left the frame must be unaligned at this point.
538 mov(r7, Operand(Smi::FromInt(0)));
539 tst(sp, Operand((frame_alignment - kPointerSize) & frame_alignment_mask));
540 push(r7, eq); // Push if aligned to make it unaligned.
541 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000542
543 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
544 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
Andrei Popescu402d9372010-02-26 13:31:12 +0000545 mov(fp, Operand(sp)); // Setup new frame pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +0000546
Andrei Popescu402d9372010-02-26 13:31:12 +0000547 mov(ip, Operand(CodeObject()));
548 push(ip); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000549
550 // Save the frame pointer and the context in top.
551 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
552 str(fp, MemOperand(ip));
553 mov(ip, Operand(ExternalReference(Top::k_context_address)));
554 str(cp, MemOperand(ip));
555
556 // Setup argc and the builtin function in callee-saved registers.
557 mov(r4, Operand(r0));
558 mov(r5, Operand(r1));
559
560
561#ifdef ENABLE_DEBUGGER_SUPPORT
562 // Save the state of all registers to the stack from the memory
563 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000564 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000565 // Use sp as base to push.
566 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
567 }
568#endif
569}
570
571
Steve Block6ded16b2010-05-10 14:33:55 +0100572void MacroAssembler::InitializeNewString(Register string,
573 Register length,
574 Heap::RootListIndex map_index,
575 Register scratch1,
576 Register scratch2) {
577 mov(scratch1, Operand(length, LSL, kSmiTagSize));
578 LoadRoot(scratch2, map_index);
579 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
580 mov(scratch1, Operand(String::kEmptyHashField));
581 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
582 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
583}
584
585
586int MacroAssembler::ActivationFrameAlignment() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000587#if defined(V8_HOST_ARCH_ARM)
588 // Running on the real platform. Use the alignment as mandated by the local
589 // environment.
590 // Note: This will break if we ever start generating snapshots on one ARM
591 // platform for another ARM platform with a different alignment.
Steve Block6ded16b2010-05-10 14:33:55 +0100592 return OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000593#else // defined(V8_HOST_ARCH_ARM)
594 // If we are using the simulator then we should always align to the expected
595 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +0100596 // if the target platform will need alignment, so this is controlled from a
597 // flag.
598 return FLAG_sim_stack_alignment;
Steve Blocka7e24c12009-10-30 11:49:00 +0000599#endif // defined(V8_HOST_ARCH_ARM)
Steve Blocka7e24c12009-10-30 11:49:00 +0000600}
601
602
Steve Blockd0582a62009-12-15 09:54:21 +0000603void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000604#ifdef ENABLE_DEBUGGER_SUPPORT
605 // Restore the memory copy of the registers by digging them out from
606 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000607 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000608 // This code intentionally clobbers r2 and r3.
609 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000610 const int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000611 add(r3, fp, Operand(kOffset));
612 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
613 }
614#endif
615
616 // Clear top frame.
617 mov(r3, Operand(0));
618 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
619 str(r3, MemOperand(ip));
620
621 // Restore current context from top and clear it in debug mode.
622 mov(ip, Operand(ExternalReference(Top::k_context_address)));
623 ldr(cp, MemOperand(ip));
624#ifdef DEBUG
625 str(r3, MemOperand(ip));
626#endif
627
628 // Pop the arguments, restore registers, and return.
629 mov(sp, Operand(fp)); // respect ABI stack constraint
630 ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
631}
632
633
634void MacroAssembler::InvokePrologue(const ParameterCount& expected,
635 const ParameterCount& actual,
636 Handle<Code> code_constant,
637 Register code_reg,
638 Label* done,
639 InvokeFlag flag) {
640 bool definitely_matches = false;
641 Label regular_invoke;
642
643 // Check whether the expected and actual arguments count match. If not,
644 // setup registers according to contract with ArgumentsAdaptorTrampoline:
645 // r0: actual arguments count
646 // r1: function (passed through to callee)
647 // r2: expected arguments count
648 // r3: callee code entry
649
650 // The code below is made a lot easier because the calling code already sets
651 // up actual and expected registers according to the contract if values are
652 // passed in registers.
653 ASSERT(actual.is_immediate() || actual.reg().is(r0));
654 ASSERT(expected.is_immediate() || expected.reg().is(r2));
655 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
656
657 if (expected.is_immediate()) {
658 ASSERT(actual.is_immediate());
659 if (expected.immediate() == actual.immediate()) {
660 definitely_matches = true;
661 } else {
662 mov(r0, Operand(actual.immediate()));
663 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
664 if (expected.immediate() == sentinel) {
665 // Don't worry about adapting arguments for builtins that
666 // don't want that done. Skip adaption code by making it look
667 // like we have a match between expected and actual number of
668 // arguments.
669 definitely_matches = true;
670 } else {
671 mov(r2, Operand(expected.immediate()));
672 }
673 }
674 } else {
675 if (actual.is_immediate()) {
676 cmp(expected.reg(), Operand(actual.immediate()));
677 b(eq, &regular_invoke);
678 mov(r0, Operand(actual.immediate()));
679 } else {
680 cmp(expected.reg(), Operand(actual.reg()));
681 b(eq, &regular_invoke);
682 }
683 }
684
685 if (!definitely_matches) {
686 if (!code_constant.is_null()) {
687 mov(r3, Operand(code_constant));
688 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
689 }
690
691 Handle<Code> adaptor =
692 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
693 if (flag == CALL_FUNCTION) {
694 Call(adaptor, RelocInfo::CODE_TARGET);
695 b(done);
696 } else {
697 Jump(adaptor, RelocInfo::CODE_TARGET);
698 }
699 bind(&regular_invoke);
700 }
701}
702
703
704void MacroAssembler::InvokeCode(Register code,
705 const ParameterCount& expected,
706 const ParameterCount& actual,
707 InvokeFlag flag) {
708 Label done;
709
710 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
711 if (flag == CALL_FUNCTION) {
712 Call(code);
713 } else {
714 ASSERT(flag == JUMP_FUNCTION);
715 Jump(code);
716 }
717
718 // Continue here if InvokePrologue does handle the invocation due to
719 // mismatched parameter counts.
720 bind(&done);
721}
722
723
724void MacroAssembler::InvokeCode(Handle<Code> code,
725 const ParameterCount& expected,
726 const ParameterCount& actual,
727 RelocInfo::Mode rmode,
728 InvokeFlag flag) {
729 Label done;
730
731 InvokePrologue(expected, actual, code, no_reg, &done, flag);
732 if (flag == CALL_FUNCTION) {
733 Call(code, rmode);
734 } else {
735 Jump(code, rmode);
736 }
737
738 // Continue here if InvokePrologue does handle the invocation due to
739 // mismatched parameter counts.
740 bind(&done);
741}
742
743
744void MacroAssembler::InvokeFunction(Register fun,
745 const ParameterCount& actual,
746 InvokeFlag flag) {
747 // Contract with called JS functions requires that function is passed in r1.
748 ASSERT(fun.is(r1));
749
750 Register expected_reg = r2;
751 Register code_reg = r3;
752
753 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
754 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
755 ldr(expected_reg,
756 FieldMemOperand(code_reg,
757 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100758 mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000759 ldr(code_reg,
Iain Merrick75681382010-08-19 15:07:18 +0100760 MemOperand(r1, JSFunction::kCodeOffset - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000761 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
762
763 ParameterCount expected(expected_reg);
764 InvokeCode(code_reg, expected, actual, flag);
765}
766
767
Andrei Popescu402d9372010-02-26 13:31:12 +0000768void MacroAssembler::InvokeFunction(JSFunction* function,
769 const ParameterCount& actual,
770 InvokeFlag flag) {
771 ASSERT(function->is_compiled());
772
773 // Get the function and setup the context.
774 mov(r1, Operand(Handle<JSFunction>(function)));
775 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
776
777 // Invoke the cached code.
778 Handle<Code> code(function->code());
779 ParameterCount expected(function->shared()->formal_parameter_count());
780 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
781}
782
Steve Blocka7e24c12009-10-30 11:49:00 +0000783#ifdef ENABLE_DEBUGGER_SUPPORT
784void MacroAssembler::SaveRegistersToMemory(RegList regs) {
785 ASSERT((regs & ~kJSCallerSaved) == 0);
786 // Copy the content of registers to memory location.
787 for (int i = 0; i < kNumJSCallerSaved; i++) {
788 int r = JSCallerSavedCode(i);
789 if ((regs & (1 << r)) != 0) {
790 Register reg = { r };
791 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
792 str(reg, MemOperand(ip));
793 }
794 }
795}
796
797
798void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
799 ASSERT((regs & ~kJSCallerSaved) == 0);
800 // Copy the content of memory location to registers.
801 for (int i = kNumJSCallerSaved; --i >= 0;) {
802 int r = JSCallerSavedCode(i);
803 if ((regs & (1 << r)) != 0) {
804 Register reg = { r };
805 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
806 ldr(reg, MemOperand(ip));
807 }
808 }
809}
810
811
812void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
813 RegList regs) {
814 ASSERT((regs & ~kJSCallerSaved) == 0);
815 // Copy the content of the memory location to the stack and adjust base.
816 for (int i = kNumJSCallerSaved; --i >= 0;) {
817 int r = JSCallerSavedCode(i);
818 if ((regs & (1 << r)) != 0) {
819 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
820 ldr(ip, MemOperand(ip));
821 str(ip, MemOperand(base, 4, NegPreIndex));
822 }
823 }
824}
825
826
827void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
828 Register scratch,
829 RegList regs) {
830 ASSERT((regs & ~kJSCallerSaved) == 0);
831 // Copy the content of the stack to the memory location and adjust base.
832 for (int i = 0; i < kNumJSCallerSaved; i++) {
833 int r = JSCallerSavedCode(i);
834 if ((regs & (1 << r)) != 0) {
835 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
836 ldr(scratch, MemOperand(base, 4, PostIndex));
837 str(scratch, MemOperand(ip));
838 }
839 }
840}
Andrei Popescu402d9372010-02-26 13:31:12 +0000841
842
843void MacroAssembler::DebugBreak() {
844 ASSERT(allow_stub_calls());
845 mov(r0, Operand(0));
846 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak)));
847 CEntryStub ces(1);
848 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
849}
Steve Blocka7e24c12009-10-30 11:49:00 +0000850#endif
851
852
853void MacroAssembler::PushTryHandler(CodeLocation try_location,
854 HandlerType type) {
855 // Adjust this code if not the case.
856 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
857 // The pc (return address) is passed in register lr.
858 if (try_location == IN_JAVASCRIPT) {
859 if (type == TRY_CATCH_HANDLER) {
860 mov(r3, Operand(StackHandler::TRY_CATCH));
861 } else {
862 mov(r3, Operand(StackHandler::TRY_FINALLY));
863 }
864 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
865 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
866 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
867 stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
868 // Save the current handler as the next handler.
869 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
870 ldr(r1, MemOperand(r3));
871 ASSERT(StackHandlerConstants::kNextOffset == 0);
872 push(r1);
873 // Link this handler as the new current one.
874 str(sp, MemOperand(r3));
875 } else {
876 // Must preserve r0-r4, r5-r7 are available.
877 ASSERT(try_location == IN_JS_ENTRY);
878 // The frame pointer does not point to a JS frame so we save NULL
879 // for fp. We expect the code throwing an exception to check fp
880 // before dereferencing it to restore the context.
881 mov(ip, Operand(0)); // To save a NULL frame pointer.
882 mov(r6, Operand(StackHandler::ENTRY));
883 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
884 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
885 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
886 stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
887 // Save the current handler as the next handler.
888 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
889 ldr(r6, MemOperand(r7));
890 ASSERT(StackHandlerConstants::kNextOffset == 0);
891 push(r6);
892 // Link this handler as the new current one.
893 str(sp, MemOperand(r7));
894 }
895}
896
897
Leon Clarkee46be812010-01-19 14:06:41 +0000898void MacroAssembler::PopTryHandler() {
899 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
900 pop(r1);
901 mov(ip, Operand(ExternalReference(Top::k_handler_address)));
902 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
903 str(r1, MemOperand(ip));
904}
905
906
Steve Blocka7e24c12009-10-30 11:49:00 +0000907void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
908 Register scratch,
909 Label* miss) {
910 Label same_contexts;
911
912 ASSERT(!holder_reg.is(scratch));
913 ASSERT(!holder_reg.is(ip));
914 ASSERT(!scratch.is(ip));
915
916 // Load current lexical context from the stack frame.
917 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
918 // In debug mode, make sure the lexical context is set.
919#ifdef DEBUG
920 cmp(scratch, Operand(0));
921 Check(ne, "we should not have an empty lexical context");
922#endif
923
924 // Load the global context of the current context.
925 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
926 ldr(scratch, FieldMemOperand(scratch, offset));
927 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
928
929 // Check the context is a global context.
930 if (FLAG_debug_code) {
931 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
932 // Cannot use ip as a temporary in this verification code. Due to the fact
933 // that ip is clobbered as part of cmp with an object Operand.
934 push(holder_reg); // Temporarily save holder on the stack.
935 // Read the first word and compare to the global_context_map.
936 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
937 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
938 cmp(holder_reg, ip);
939 Check(eq, "JSGlobalObject::global_context should be a global context.");
940 pop(holder_reg); // Restore holder.
941 }
942
943 // Check if both contexts are the same.
944 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
945 cmp(scratch, Operand(ip));
946 b(eq, &same_contexts);
947
948 // Check the context is a global context.
949 if (FLAG_debug_code) {
950 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
951 // Cannot use ip as a temporary in this verification code. Due to the fact
952 // that ip is clobbered as part of cmp with an object Operand.
953 push(holder_reg); // Temporarily save holder on the stack.
954 mov(holder_reg, ip); // Move ip to its holding place.
955 LoadRoot(ip, Heap::kNullValueRootIndex);
956 cmp(holder_reg, ip);
957 Check(ne, "JSGlobalProxy::context() should not be null.");
958
959 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
960 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
961 cmp(holder_reg, ip);
962 Check(eq, "JSGlobalObject::global_context should be a global context.");
963 // Restore ip is not needed. ip is reloaded below.
964 pop(holder_reg); // Restore holder.
965 // Restore ip to holder's context.
966 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
967 }
968
969 // Check that the security token in the calling global object is
970 // compatible with the security token in the receiving global
971 // object.
972 int token_offset = Context::kHeaderSize +
973 Context::SECURITY_TOKEN_INDEX * kPointerSize;
974
975 ldr(scratch, FieldMemOperand(scratch, token_offset));
976 ldr(ip, FieldMemOperand(ip, token_offset));
977 cmp(scratch, Operand(ip));
978 b(ne, miss);
979
980 bind(&same_contexts);
981}
982
983
984void MacroAssembler::AllocateInNewSpace(int object_size,
985 Register result,
986 Register scratch1,
987 Register scratch2,
988 Label* gc_required,
989 AllocationFlags flags) {
990 ASSERT(!result.is(scratch1));
991 ASSERT(!scratch1.is(scratch2));
992
Kristian Monsen25f61362010-05-21 11:50:48 +0100993 // Make object size into bytes.
994 if ((flags & SIZE_IN_WORDS) != 0) {
995 object_size *= kPointerSize;
996 }
997 ASSERT_EQ(0, object_size & kObjectAlignmentMask);
998
Steve Blocka7e24c12009-10-30 11:49:00 +0000999 // Load address of new object into result and allocation top address into
1000 // scratch1.
1001 ExternalReference new_space_allocation_top =
1002 ExternalReference::new_space_allocation_top_address();
1003 mov(scratch1, Operand(new_space_allocation_top));
1004 if ((flags & RESULT_CONTAINS_TOP) == 0) {
1005 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +00001006 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001007 // Assert that result actually contains top on entry. scratch2 is used
1008 // immediately below so this use of scratch2 does not cause difference with
1009 // respect to register content between debug and release mode.
1010 ldr(scratch2, MemOperand(scratch1));
1011 cmp(result, scratch2);
1012 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 }
1014
1015 // Calculate new top and bail out if new space is exhausted. Use result
1016 // to calculate the new top.
1017 ExternalReference new_space_allocation_limit =
1018 ExternalReference::new_space_allocation_limit_address();
1019 mov(scratch2, Operand(new_space_allocation_limit));
1020 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +01001021 add(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +00001022 cmp(result, Operand(scratch2));
1023 b(hi, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001024 str(result, MemOperand(scratch1));
1025
1026 // Tag and adjust back to start of new object.
1027 if ((flags & TAG_OBJECT) != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001028 sub(result, result, Operand(object_size - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001029 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01001030 sub(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +00001031 }
1032}
1033
1034
1035void MacroAssembler::AllocateInNewSpace(Register object_size,
1036 Register result,
1037 Register scratch1,
1038 Register scratch2,
1039 Label* gc_required,
1040 AllocationFlags flags) {
1041 ASSERT(!result.is(scratch1));
1042 ASSERT(!scratch1.is(scratch2));
1043
1044 // Load address of new object into result and allocation top address into
1045 // scratch1.
1046 ExternalReference new_space_allocation_top =
1047 ExternalReference::new_space_allocation_top_address();
1048 mov(scratch1, Operand(new_space_allocation_top));
1049 if ((flags & RESULT_CONTAINS_TOP) == 0) {
1050 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +00001051 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001052 // Assert that result actually contains top on entry. scratch2 is used
1053 // immediately below so this use of scratch2 does not cause difference with
1054 // respect to register content between debug and release mode.
1055 ldr(scratch2, MemOperand(scratch1));
1056 cmp(result, scratch2);
1057 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +00001058 }
1059
1060 // Calculate new top and bail out if new space is exhausted. Use result
1061 // to calculate the new top. Object size is in words so a shift is required to
1062 // get the number of bytes
1063 ExternalReference new_space_allocation_limit =
1064 ExternalReference::new_space_allocation_limit_address();
1065 mov(scratch2, Operand(new_space_allocation_limit));
1066 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +01001067 if ((flags & SIZE_IN_WORDS) != 0) {
1068 add(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1069 } else {
1070 add(result, result, Operand(object_size));
1071 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001072 cmp(result, Operand(scratch2));
1073 b(hi, gc_required);
1074
Steve Blockd0582a62009-12-15 09:54:21 +00001075 // Update allocation top. result temporarily holds the new top.
1076 if (FLAG_debug_code) {
1077 tst(result, Operand(kObjectAlignmentMask));
1078 Check(eq, "Unaligned allocation in new space");
1079 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001080 str(result, MemOperand(scratch1));
1081
1082 // Adjust back to start of new object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001083 if ((flags & SIZE_IN_WORDS) != 0) {
1084 sub(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1085 } else {
1086 sub(result, result, Operand(object_size));
1087 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001088
1089 // Tag object if requested.
1090 if ((flags & TAG_OBJECT) != 0) {
1091 add(result, result, Operand(kHeapObjectTag));
1092 }
1093}
1094
1095
1096void MacroAssembler::UndoAllocationInNewSpace(Register object,
1097 Register scratch) {
1098 ExternalReference new_space_allocation_top =
1099 ExternalReference::new_space_allocation_top_address();
1100
1101 // Make sure the object has no tag before resetting top.
1102 and_(object, object, Operand(~kHeapObjectTagMask));
1103#ifdef DEBUG
1104 // Check that the object un-allocated is below the current top.
1105 mov(scratch, Operand(new_space_allocation_top));
1106 ldr(scratch, MemOperand(scratch));
1107 cmp(object, scratch);
1108 Check(lt, "Undo allocation of non allocated memory");
1109#endif
1110 // Write the address of the object to un-allocate as the current top.
1111 mov(scratch, Operand(new_space_allocation_top));
1112 str(object, MemOperand(scratch));
1113}
1114
1115
Andrei Popescu31002712010-02-23 13:46:05 +00001116void MacroAssembler::AllocateTwoByteString(Register result,
1117 Register length,
1118 Register scratch1,
1119 Register scratch2,
1120 Register scratch3,
1121 Label* gc_required) {
1122 // Calculate the number of bytes needed for the characters in the string while
1123 // observing object alignment.
1124 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1125 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1126 add(scratch1, scratch1,
1127 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001128 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001129
1130 // Allocate two-byte string in new space.
1131 AllocateInNewSpace(scratch1,
1132 result,
1133 scratch2,
1134 scratch3,
1135 gc_required,
1136 TAG_OBJECT);
1137
1138 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001139 InitializeNewString(result,
1140 length,
1141 Heap::kStringMapRootIndex,
1142 scratch1,
1143 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001144}
1145
1146
1147void MacroAssembler::AllocateAsciiString(Register result,
1148 Register length,
1149 Register scratch1,
1150 Register scratch2,
1151 Register scratch3,
1152 Label* gc_required) {
1153 // Calculate the number of bytes needed for the characters in the string while
1154 // observing object alignment.
1155 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1156 ASSERT(kCharSize == 1);
1157 add(scratch1, length,
1158 Operand(kObjectAlignmentMask + SeqAsciiString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001159 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001160
1161 // Allocate ASCII string in new space.
1162 AllocateInNewSpace(scratch1,
1163 result,
1164 scratch2,
1165 scratch3,
1166 gc_required,
1167 TAG_OBJECT);
1168
1169 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001170 InitializeNewString(result,
1171 length,
1172 Heap::kAsciiStringMapRootIndex,
1173 scratch1,
1174 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001175}
1176
1177
1178void MacroAssembler::AllocateTwoByteConsString(Register result,
1179 Register length,
1180 Register scratch1,
1181 Register scratch2,
1182 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001183 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001184 result,
1185 scratch1,
1186 scratch2,
1187 gc_required,
1188 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001189
1190 InitializeNewString(result,
1191 length,
1192 Heap::kConsStringMapRootIndex,
1193 scratch1,
1194 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001195}
1196
1197
1198void MacroAssembler::AllocateAsciiConsString(Register result,
1199 Register length,
1200 Register scratch1,
1201 Register scratch2,
1202 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001203 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001204 result,
1205 scratch1,
1206 scratch2,
1207 gc_required,
1208 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001209
1210 InitializeNewString(result,
1211 length,
1212 Heap::kConsAsciiStringMapRootIndex,
1213 scratch1,
1214 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001215}
1216
1217
Steve Block6ded16b2010-05-10 14:33:55 +01001218void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00001219 Register map,
1220 Register type_reg,
1221 InstanceType type) {
Steve Block6ded16b2010-05-10 14:33:55 +01001222 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001223 CompareInstanceType(map, type_reg, type);
1224}
1225
1226
1227void MacroAssembler::CompareInstanceType(Register map,
1228 Register type_reg,
1229 InstanceType type) {
1230 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
1231 cmp(type_reg, Operand(type));
1232}
1233
1234
Andrei Popescu31002712010-02-23 13:46:05 +00001235void MacroAssembler::CheckMap(Register obj,
1236 Register scratch,
1237 Handle<Map> map,
1238 Label* fail,
1239 bool is_heap_object) {
1240 if (!is_heap_object) {
1241 BranchOnSmi(obj, fail);
1242 }
1243 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1244 mov(ip, Operand(map));
1245 cmp(scratch, ip);
1246 b(ne, fail);
1247}
1248
1249
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001250void MacroAssembler::CheckMap(Register obj,
1251 Register scratch,
1252 Heap::RootListIndex index,
1253 Label* fail,
1254 bool is_heap_object) {
1255 if (!is_heap_object) {
1256 BranchOnSmi(obj, fail);
1257 }
1258 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1259 LoadRoot(ip, index);
1260 cmp(scratch, ip);
1261 b(ne, fail);
1262}
1263
1264
Steve Blocka7e24c12009-10-30 11:49:00 +00001265void MacroAssembler::TryGetFunctionPrototype(Register function,
1266 Register result,
1267 Register scratch,
1268 Label* miss) {
1269 // Check that the receiver isn't a smi.
1270 BranchOnSmi(function, miss);
1271
1272 // Check that the function really is a function. Load map into result reg.
1273 CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
1274 b(ne, miss);
1275
1276 // Make sure that the function has an instance prototype.
1277 Label non_instance;
1278 ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
1279 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1280 b(ne, &non_instance);
1281
1282 // Get the prototype or initial map from the function.
1283 ldr(result,
1284 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1285
1286 // If the prototype or initial map is the hole, don't return it and
1287 // simply miss the cache instead. This will allow us to allocate a
1288 // prototype object on-demand in the runtime system.
1289 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1290 cmp(result, ip);
1291 b(eq, miss);
1292
1293 // If the function does not have an initial map, we're done.
1294 Label done;
1295 CompareObjectType(result, scratch, scratch, MAP_TYPE);
1296 b(ne, &done);
1297
1298 // Get the prototype from the initial map.
1299 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
1300 jmp(&done);
1301
1302 // Non-instance prototype: Fetch prototype from constructor field
1303 // in initial map.
1304 bind(&non_instance);
1305 ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1306
1307 // All done.
1308 bind(&done);
1309}
1310
1311
1312void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
1313 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1314 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1315}
1316
1317
Andrei Popescu31002712010-02-23 13:46:05 +00001318void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1319 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1320 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1321}
1322
1323
Leon Clarkeac952652010-07-15 11:15:24 +01001324void MacroAssembler::StubReturn(int argc, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001325 ASSERT(argc >= 1 && generating_stub());
Andrei Popescu31002712010-02-23 13:46:05 +00001326 if (argc > 1) {
Leon Clarkeac952652010-07-15 11:15:24 +01001327 add(sp, sp, Operand((argc - 1) * kPointerSize), LeaveCC, cond);
Andrei Popescu31002712010-02-23 13:46:05 +00001328 }
Leon Clarkeac952652010-07-15 11:15:24 +01001329 Ret(cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00001330}
1331
1332
1333void MacroAssembler::IllegalOperation(int num_arguments) {
1334 if (num_arguments > 0) {
1335 add(sp, sp, Operand(num_arguments * kPointerSize));
1336 }
1337 LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1338}
1339
1340
Steve Blockd0582a62009-12-15 09:54:21 +00001341void MacroAssembler::IntegerToDoubleConversionWithVFP3(Register inReg,
1342 Register outHighReg,
1343 Register outLowReg) {
1344 // ARMv7 VFP3 instructions to implement integer to double conversion.
1345 mov(r7, Operand(inReg, ASR, kSmiTagSize));
Leon Clarkee46be812010-01-19 14:06:41 +00001346 vmov(s15, r7);
Steve Block6ded16b2010-05-10 14:33:55 +01001347 vcvt_f64_s32(d7, s15);
Leon Clarkee46be812010-01-19 14:06:41 +00001348 vmov(outLowReg, outHighReg, d7);
Steve Blockd0582a62009-12-15 09:54:21 +00001349}
1350
1351
Steve Block8defd9f2010-07-08 12:39:36 +01001352void MacroAssembler::ObjectToDoubleVFPRegister(Register object,
1353 DwVfpRegister result,
1354 Register scratch1,
1355 Register scratch2,
1356 Register heap_number_map,
1357 SwVfpRegister scratch3,
1358 Label* not_number,
1359 ObjectToDoubleFlags flags) {
1360 Label done;
1361 if ((flags & OBJECT_NOT_SMI) == 0) {
1362 Label not_smi;
1363 BranchOnNotSmi(object, &not_smi);
1364 // Remove smi tag and convert to double.
1365 mov(scratch1, Operand(object, ASR, kSmiTagSize));
1366 vmov(scratch3, scratch1);
1367 vcvt_f64_s32(result, scratch3);
1368 b(&done);
1369 bind(&not_smi);
1370 }
1371 // Check for heap number and load double value from it.
1372 ldr(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
1373 sub(scratch2, object, Operand(kHeapObjectTag));
1374 cmp(scratch1, heap_number_map);
1375 b(ne, not_number);
1376 if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
1377 // If exponent is all ones the number is either a NaN or +/-Infinity.
1378 ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
1379 Sbfx(scratch1,
1380 scratch1,
1381 HeapNumber::kExponentShift,
1382 HeapNumber::kExponentBits);
1383 // All-one value sign extend to -1.
1384 cmp(scratch1, Operand(-1));
1385 b(eq, not_number);
1386 }
1387 vldr(result, scratch2, HeapNumber::kValueOffset);
1388 bind(&done);
1389}
1390
1391
1392void MacroAssembler::SmiToDoubleVFPRegister(Register smi,
1393 DwVfpRegister value,
1394 Register scratch1,
1395 SwVfpRegister scratch2) {
1396 mov(scratch1, Operand(smi, ASR, kSmiTagSize));
1397 vmov(scratch2, scratch1);
1398 vcvt_f64_s32(value, scratch2);
1399}
1400
1401
Andrei Popescu31002712010-02-23 13:46:05 +00001402void MacroAssembler::GetLeastBitsFromSmi(Register dst,
1403 Register src,
1404 int num_least_bits) {
1405 if (CpuFeatures::IsSupported(ARMv7)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001406 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00001407 } else {
1408 mov(dst, Operand(src, ASR, kSmiTagSize));
1409 and_(dst, dst, Operand((1 << num_least_bits) - 1));
1410 }
1411}
1412
1413
Steve Blocka7e24c12009-10-30 11:49:00 +00001414void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1415 // All parameters are on the stack. r0 has the return value after call.
1416
1417 // If the expected number of arguments of the runtime function is
1418 // constant, we check that the actual number of arguments match the
1419 // expectation.
1420 if (f->nargs >= 0 && f->nargs != num_arguments) {
1421 IllegalOperation(num_arguments);
1422 return;
1423 }
1424
Leon Clarke4515c472010-02-03 11:58:03 +00001425 // TODO(1236192): Most runtime routines don't need the number of
1426 // arguments passed in because it is constant. At some point we
1427 // should remove this need and make the runtime routine entry code
1428 // smarter.
1429 mov(r0, Operand(num_arguments));
1430 mov(r1, Operand(ExternalReference(f)));
1431 CEntryStub stub(1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001432 CallStub(&stub);
1433}
1434
1435
1436void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
1437 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1438}
1439
1440
Andrei Popescu402d9372010-02-26 13:31:12 +00001441void MacroAssembler::CallExternalReference(const ExternalReference& ext,
1442 int num_arguments) {
1443 mov(r0, Operand(num_arguments));
1444 mov(r1, Operand(ext));
1445
1446 CEntryStub stub(1);
1447 CallStub(&stub);
1448}
1449
1450
Steve Block6ded16b2010-05-10 14:33:55 +01001451void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1452 int num_arguments,
1453 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001454 // TODO(1236192): Most runtime routines don't need the number of
1455 // arguments passed in because it is constant. At some point we
1456 // should remove this need and make the runtime routine entry code
1457 // smarter.
1458 mov(r0, Operand(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001459 JumpToExternalReference(ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001460}
1461
1462
Steve Block6ded16b2010-05-10 14:33:55 +01001463void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1464 int num_arguments,
1465 int result_size) {
1466 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
1467}
1468
1469
1470void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001471#if defined(__thumb__)
1472 // Thumb mode builtin.
1473 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
1474#endif
1475 mov(r1, Operand(builtin));
1476 CEntryStub stub(1);
1477 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1478}
1479
1480
Steve Blocka7e24c12009-10-30 11:49:00 +00001481void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1482 InvokeJSFlags flags) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001483 GetBuiltinEntry(r2, id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001484 if (flags == CALL_JS) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001485 Call(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001486 } else {
1487 ASSERT(flags == JUMP_JS);
Andrei Popescu402d9372010-02-26 13:31:12 +00001488 Jump(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001489 }
1490}
1491
1492
1493void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001494 ASSERT(!target.is(r1));
1495
1496 // Load the builtins object into target register.
1497 ldr(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
1498 ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
1499
Andrei Popescu402d9372010-02-26 13:31:12 +00001500 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001501 ldr(r1, FieldMemOperand(target,
1502 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1503
1504 // Load the code entry point from the builtins object.
1505 ldr(target, FieldMemOperand(target,
1506 JSBuiltinsObject::OffsetOfCodeWithId(id)));
1507 if (FLAG_debug_code) {
1508 // Make sure the code objects in the builtins object and in the
1509 // builtin function are the same.
1510 push(r1);
Iain Merrick75681382010-08-19 15:07:18 +01001511 ldr(r1, FieldMemOperand(r1, JSFunction::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001512 cmp(r1, target);
1513 Assert(eq, "Builtin code object changed");
1514 pop(r1);
1515 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001516 add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
1517}
1518
1519
1520void MacroAssembler::SetCounter(StatsCounter* counter, int value,
1521 Register scratch1, Register scratch2) {
1522 if (FLAG_native_code_counters && counter->Enabled()) {
1523 mov(scratch1, Operand(value));
1524 mov(scratch2, Operand(ExternalReference(counter)));
1525 str(scratch1, MemOperand(scratch2));
1526 }
1527}
1528
1529
1530void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
1531 Register scratch1, Register scratch2) {
1532 ASSERT(value > 0);
1533 if (FLAG_native_code_counters && counter->Enabled()) {
1534 mov(scratch2, Operand(ExternalReference(counter)));
1535 ldr(scratch1, MemOperand(scratch2));
1536 add(scratch1, scratch1, Operand(value));
1537 str(scratch1, MemOperand(scratch2));
1538 }
1539}
1540
1541
1542void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
1543 Register scratch1, Register scratch2) {
1544 ASSERT(value > 0);
1545 if (FLAG_native_code_counters && counter->Enabled()) {
1546 mov(scratch2, Operand(ExternalReference(counter)));
1547 ldr(scratch1, MemOperand(scratch2));
1548 sub(scratch1, scratch1, Operand(value));
1549 str(scratch1, MemOperand(scratch2));
1550 }
1551}
1552
1553
1554void MacroAssembler::Assert(Condition cc, const char* msg) {
1555 if (FLAG_debug_code)
1556 Check(cc, msg);
1557}
1558
1559
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001560void MacroAssembler::AssertRegisterIsRoot(Register reg,
1561 Heap::RootListIndex index) {
1562 if (FLAG_debug_code) {
1563 LoadRoot(ip, index);
1564 cmp(reg, ip);
1565 Check(eq, "Register did not match expected root");
1566 }
1567}
1568
1569
Iain Merrick75681382010-08-19 15:07:18 +01001570void MacroAssembler::AssertFastElements(Register elements) {
1571 if (FLAG_debug_code) {
1572 ASSERT(!elements.is(ip));
1573 Label ok;
1574 push(elements);
1575 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
1576 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1577 cmp(elements, ip);
1578 b(eq, &ok);
1579 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
1580 cmp(elements, ip);
1581 b(eq, &ok);
1582 Abort("JSObject with fast elements map has slow elements");
1583 bind(&ok);
1584 pop(elements);
1585 }
1586}
1587
1588
Steve Blocka7e24c12009-10-30 11:49:00 +00001589void MacroAssembler::Check(Condition cc, const char* msg) {
1590 Label L;
1591 b(cc, &L);
1592 Abort(msg);
1593 // will not return here
1594 bind(&L);
1595}
1596
1597
1598void MacroAssembler::Abort(const char* msg) {
Steve Block8defd9f2010-07-08 12:39:36 +01001599 Label abort_start;
1600 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00001601 // We want to pass the msg string like a smi to avoid GC
1602 // problems, however msg is not guaranteed to be aligned
1603 // properly. Instead, we pass an aligned pointer that is
1604 // a proper v8 smi, but also pass the alignment difference
1605 // from the real pointer as a smi.
1606 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1607 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1608 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1609#ifdef DEBUG
1610 if (msg != NULL) {
1611 RecordComment("Abort message: ");
1612 RecordComment(msg);
1613 }
1614#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001615 // Disable stub call restrictions to always allow calls to abort.
1616 set_allow_stub_calls(true);
1617
Steve Blocka7e24c12009-10-30 11:49:00 +00001618 mov(r0, Operand(p0));
1619 push(r0);
1620 mov(r0, Operand(Smi::FromInt(p1 - p0)));
1621 push(r0);
1622 CallRuntime(Runtime::kAbort, 2);
1623 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01001624 if (is_const_pool_blocked()) {
1625 // If the calling code cares about the exact number of
1626 // instructions generated, we insert padding here to keep the size
1627 // of the Abort macro constant.
1628 static const int kExpectedAbortInstructions = 10;
1629 int abort_instructions = InstructionsGeneratedSince(&abort_start);
1630 ASSERT(abort_instructions <= kExpectedAbortInstructions);
1631 while (abort_instructions++ < kExpectedAbortInstructions) {
1632 nop();
1633 }
1634 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001635}
1636
1637
Steve Blockd0582a62009-12-15 09:54:21 +00001638void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1639 if (context_chain_length > 0) {
1640 // Move up the chain of contexts to the context containing the slot.
1641 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::CLOSURE_INDEX)));
1642 // Load the function context (which is the incoming, outer context).
1643 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1644 for (int i = 1; i < context_chain_length; i++) {
1645 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1646 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1647 }
1648 // The context may be an intermediate context, not a function context.
1649 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1650 } else { // Slot is in the current function context.
1651 // The context may be an intermediate context, not a function context.
1652 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1653 }
1654}
1655
1656
Andrei Popescu31002712010-02-23 13:46:05 +00001657void MacroAssembler::JumpIfNotBothSmi(Register reg1,
1658 Register reg2,
1659 Label* on_not_both_smi) {
1660 ASSERT_EQ(0, kSmiTag);
1661 tst(reg1, Operand(kSmiTagMask));
1662 tst(reg2, Operand(kSmiTagMask), eq);
1663 b(ne, on_not_both_smi);
1664}
1665
1666
1667void MacroAssembler::JumpIfEitherSmi(Register reg1,
1668 Register reg2,
1669 Label* on_either_smi) {
1670 ASSERT_EQ(0, kSmiTag);
1671 tst(reg1, Operand(kSmiTagMask));
1672 tst(reg2, Operand(kSmiTagMask), ne);
1673 b(eq, on_either_smi);
1674}
1675
1676
Iain Merrick75681382010-08-19 15:07:18 +01001677void MacroAssembler::AbortIfSmi(Register object) {
1678 ASSERT_EQ(0, kSmiTag);
1679 tst(object, Operand(kSmiTagMask));
1680 Assert(ne, "Operand is a smi");
1681}
1682
1683
Leon Clarked91b9f72010-01-27 17:25:45 +00001684void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
1685 Register first,
1686 Register second,
1687 Register scratch1,
1688 Register scratch2,
1689 Label* failure) {
1690 // Test that both first and second are sequential ASCII strings.
1691 // Assume that they are non-smis.
1692 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
1693 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
1694 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
1695 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001696
1697 JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
1698 scratch2,
1699 scratch1,
1700 scratch2,
1701 failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00001702}
1703
1704void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
1705 Register second,
1706 Register scratch1,
1707 Register scratch2,
1708 Label* failure) {
1709 // Check that neither is a smi.
1710 ASSERT_EQ(0, kSmiTag);
1711 and_(scratch1, first, Operand(second));
1712 tst(scratch1, Operand(kSmiTagMask));
1713 b(eq, failure);
1714 JumpIfNonSmisNotBothSequentialAsciiStrings(first,
1715 second,
1716 scratch1,
1717 scratch2,
1718 failure);
1719}
1720
Steve Blockd0582a62009-12-15 09:54:21 +00001721
Steve Block6ded16b2010-05-10 14:33:55 +01001722// Allocates a heap number or jumps to the need_gc label if the young space
1723// is full and a scavenge is needed.
1724void MacroAssembler::AllocateHeapNumber(Register result,
1725 Register scratch1,
1726 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001727 Register heap_number_map,
Steve Block6ded16b2010-05-10 14:33:55 +01001728 Label* gc_required) {
1729 // Allocate an object in the heap for the heap number and tag it as a heap
1730 // object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001731 AllocateInNewSpace(HeapNumber::kSize,
Steve Block6ded16b2010-05-10 14:33:55 +01001732 result,
1733 scratch1,
1734 scratch2,
1735 gc_required,
1736 TAG_OBJECT);
1737
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001738 // Store heap number map in the allocated object.
1739 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1740 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001741}
1742
1743
Steve Block8defd9f2010-07-08 12:39:36 +01001744void MacroAssembler::AllocateHeapNumberWithValue(Register result,
1745 DwVfpRegister value,
1746 Register scratch1,
1747 Register scratch2,
1748 Register heap_number_map,
1749 Label* gc_required) {
1750 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
1751 sub(scratch1, result, Operand(kHeapObjectTag));
1752 vstr(value, scratch1, HeapNumber::kValueOffset);
1753}
1754
1755
Ben Murdochbb769b22010-08-11 14:56:33 +01001756// Copies a fixed number of fields of heap objects from src to dst.
1757void MacroAssembler::CopyFields(Register dst,
1758 Register src,
1759 RegList temps,
1760 int field_count) {
1761 // At least one bit set in the first 15 registers.
1762 ASSERT((temps & ((1 << 15) - 1)) != 0);
1763 ASSERT((temps & dst.bit()) == 0);
1764 ASSERT((temps & src.bit()) == 0);
1765 // Primitive implementation using only one temporary register.
1766
1767 Register tmp = no_reg;
1768 // Find a temp register in temps list.
1769 for (int i = 0; i < 15; i++) {
1770 if ((temps & (1 << i)) != 0) {
1771 tmp.set_code(i);
1772 break;
1773 }
1774 }
1775 ASSERT(!tmp.is(no_reg));
1776
1777 for (int i = 0; i < field_count; i++) {
1778 ldr(tmp, FieldMemOperand(src, i * kPointerSize));
1779 str(tmp, FieldMemOperand(dst, i * kPointerSize));
1780 }
1781}
1782
1783
Steve Block8defd9f2010-07-08 12:39:36 +01001784void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
1785 Register source, // Input.
1786 Register scratch) {
1787 ASSERT(!zeros.is(source) || !source.is(zeros));
1788 ASSERT(!zeros.is(scratch));
1789 ASSERT(!scratch.is(ip));
1790 ASSERT(!source.is(ip));
1791 ASSERT(!zeros.is(ip));
Steve Block6ded16b2010-05-10 14:33:55 +01001792#ifdef CAN_USE_ARMV5_INSTRUCTIONS
1793 clz(zeros, source); // This instruction is only supported after ARM5.
1794#else
1795 mov(zeros, Operand(0));
Steve Block8defd9f2010-07-08 12:39:36 +01001796 Move(scratch, source);
Steve Block6ded16b2010-05-10 14:33:55 +01001797 // Top 16.
1798 tst(scratch, Operand(0xffff0000));
1799 add(zeros, zeros, Operand(16), LeaveCC, eq);
1800 mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq);
1801 // Top 8.
1802 tst(scratch, Operand(0xff000000));
1803 add(zeros, zeros, Operand(8), LeaveCC, eq);
1804 mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq);
1805 // Top 4.
1806 tst(scratch, Operand(0xf0000000));
1807 add(zeros, zeros, Operand(4), LeaveCC, eq);
1808 mov(scratch, Operand(scratch, LSL, 4), LeaveCC, eq);
1809 // Top 2.
1810 tst(scratch, Operand(0xc0000000));
1811 add(zeros, zeros, Operand(2), LeaveCC, eq);
1812 mov(scratch, Operand(scratch, LSL, 2), LeaveCC, eq);
1813 // Top bit.
1814 tst(scratch, Operand(0x80000000u));
1815 add(zeros, zeros, Operand(1), LeaveCC, eq);
1816#endif
1817}
1818
1819
1820void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1821 Register first,
1822 Register second,
1823 Register scratch1,
1824 Register scratch2,
1825 Label* failure) {
1826 int kFlatAsciiStringMask =
1827 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1828 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1829 and_(scratch1, first, Operand(kFlatAsciiStringMask));
1830 and_(scratch2, second, Operand(kFlatAsciiStringMask));
1831 cmp(scratch1, Operand(kFlatAsciiStringTag));
1832 // Ignore second test if first test failed.
1833 cmp(scratch2, Operand(kFlatAsciiStringTag), eq);
1834 b(ne, failure);
1835}
1836
1837
1838void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1839 Register scratch,
1840 Label* failure) {
1841 int kFlatAsciiStringMask =
1842 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1843 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1844 and_(scratch, type, Operand(kFlatAsciiStringMask));
1845 cmp(scratch, Operand(kFlatAsciiStringTag));
1846 b(ne, failure);
1847}
1848
1849
1850void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1851 int frame_alignment = ActivationFrameAlignment();
1852 // Up to four simple arguments are passed in registers r0..r3.
1853 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1854 if (frame_alignment > kPointerSize) {
1855 // Make stack end at alignment and make room for num_arguments - 4 words
1856 // and the original value of sp.
1857 mov(scratch, sp);
1858 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
1859 ASSERT(IsPowerOf2(frame_alignment));
1860 and_(sp, sp, Operand(-frame_alignment));
1861 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
1862 } else {
1863 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
1864 }
1865}
1866
1867
1868void MacroAssembler::CallCFunction(ExternalReference function,
1869 int num_arguments) {
1870 mov(ip, Operand(function));
1871 CallCFunction(ip, num_arguments);
1872}
1873
1874
1875void MacroAssembler::CallCFunction(Register function, int num_arguments) {
1876 // Make sure that the stack is aligned before calling a C function unless
1877 // running in the simulator. The simulator has its own alignment check which
1878 // provides more information.
1879#if defined(V8_HOST_ARCH_ARM)
1880 if (FLAG_debug_code) {
1881 int frame_alignment = OS::ActivationFrameAlignment();
1882 int frame_alignment_mask = frame_alignment - 1;
1883 if (frame_alignment > kPointerSize) {
1884 ASSERT(IsPowerOf2(frame_alignment));
1885 Label alignment_as_expected;
1886 tst(sp, Operand(frame_alignment_mask));
1887 b(eq, &alignment_as_expected);
1888 // Don't use Check here, as it will call Runtime_Abort possibly
1889 // re-entering here.
1890 stop("Unexpected alignment");
1891 bind(&alignment_as_expected);
1892 }
1893 }
1894#endif
1895
1896 // Just call directly. The function called cannot cause a GC, or
1897 // allow preemption, so the return address in the link register
1898 // stays correct.
1899 Call(function);
1900 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1901 if (OS::ActivationFrameAlignment() > kPointerSize) {
1902 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
1903 } else {
1904 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
1905 }
1906}
1907
1908
Steve Blocka7e24c12009-10-30 11:49:00 +00001909#ifdef ENABLE_DEBUGGER_SUPPORT
1910CodePatcher::CodePatcher(byte* address, int instructions)
1911 : address_(address),
1912 instructions_(instructions),
1913 size_(instructions * Assembler::kInstrSize),
1914 masm_(address, size_ + Assembler::kGap) {
1915 // Create a new macro assembler pointing to the address of the code to patch.
1916 // The size is adjusted with kGap on order for the assembler to generate size
1917 // bytes of instructions without failing with buffer size constraints.
1918 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1919}
1920
1921
1922CodePatcher::~CodePatcher() {
1923 // Indicate that code has changed.
1924 CPU::FlushICache(address_, size_);
1925
1926 // Check that the code was patched as expected.
1927 ASSERT(masm_.pc_ == address_ + size_);
1928 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1929}
1930
1931
1932void CodePatcher::Emit(Instr x) {
1933 masm()->emit(x);
1934}
1935
1936
1937void CodePatcher::Emit(Address addr) {
1938 masm()->emit(reinterpret_cast<Instr>(addr));
1939}
1940#endif // ENABLE_DEBUGGER_SUPPORT
1941
1942
1943} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001944
1945#endif // V8_TARGET_ARCH_ARM