blob: 29c48a400f0d15001b6c3a7d6183b20b590f45f4 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36
37namespace v8 {
38namespace internal {
39
40MacroAssembler::MacroAssembler(void* buffer, int size)
41 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000042 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
45}
46
47
48// We always generate arm code, never thumb code, even if V8 is compiled to
49// thumb, so we require inter-working support
50#if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
51#error "flag -mthumb-interwork missing"
52#endif
53
54
55// We do not support thumb inter-working with an arm architecture not supporting
56// the blx instruction (below v5t). If you know what CPU you are compiling for
57// you can use -march=armv7 or similar.
58#if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
59# error "For thumb inter-working we require an architecture which supports blx"
60#endif
61
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063// Using bx does not yield better code, so use it only when required
64#if defined(USE_THUMB_INTERWORK)
65#define USE_BX 1
66#endif
67
68
69void MacroAssembler::Jump(Register target, Condition cond) {
70#if USE_BX
71 bx(target, cond);
72#else
73 mov(pc, Operand(target), LeaveCC, cond);
74#endif
75}
76
77
78void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
79 Condition cond) {
80#if USE_BX
81 mov(ip, Operand(target, rmode), LeaveCC, cond);
82 bx(ip, cond);
83#else
84 mov(pc, Operand(target, rmode), LeaveCC, cond);
85#endif
86}
87
88
89void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
90 Condition cond) {
91 ASSERT(!RelocInfo::IsCodeTarget(rmode));
92 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
93}
94
95
96void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
97 Condition cond) {
98 ASSERT(RelocInfo::IsCodeTarget(rmode));
99 // 'code' is always generated ARM code, never THUMB code
100 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
101}
102
103
104void MacroAssembler::Call(Register target, Condition cond) {
105#if USE_BLX
106 blx(target, cond);
107#else
108 // set lr for return at current pc + 8
109 mov(lr, Operand(pc), LeaveCC, cond);
110 mov(pc, Operand(target), LeaveCC, cond);
111#endif
112}
113
114
115void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
116 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100117#if USE_BLX
118 // On ARMv5 and after the recommended call sequence is:
119 // ldr ip, [pc, #...]
120 // blx ip
121
122 // The two instructions (ldr and blx) could be separated by a constant
123 // pool and the code would still work. The issue comes from the
124 // patching code which expect the ldr to be just above the blx.
125 { BlockConstPoolScope block_const_pool(this);
126 // Statement positions are expected to be recorded when the target
127 // address is loaded. The mov method will automatically record
128 // positions when pc is the target, since this is not the case here
129 // we have to do it explicitly.
130 WriteRecordedPositions();
131
132 mov(ip, Operand(target, rmode), LeaveCC, cond);
133 blx(ip, cond);
134 }
135
136 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
137#else
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 // Set lr for return at current pc + 8.
139 mov(lr, Operand(pc), LeaveCC, cond);
140 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
141 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Steve Blocka7e24c12009-10-30 11:49:00 +0000143 ASSERT(kCallTargetAddressOffset == kInstrSize);
Steve Block6ded16b2010-05-10 14:33:55 +0100144#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000145}
146
147
148void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
149 Condition cond) {
150 ASSERT(!RelocInfo::IsCodeTarget(rmode));
151 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
152}
153
154
155void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
156 Condition cond) {
157 ASSERT(RelocInfo::IsCodeTarget(rmode));
158 // 'code' is always generated ARM code, never THUMB code
159 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
160}
161
162
163void MacroAssembler::Ret(Condition cond) {
164#if USE_BX
165 bx(lr, cond);
166#else
167 mov(pc, Operand(lr), LeaveCC, cond);
168#endif
169}
170
171
Steve Blockd0582a62009-12-15 09:54:21 +0000172void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
173 LoadRoot(ip, Heap::kStackLimitRootIndex);
174 cmp(sp, Operand(ip));
175 b(lo, on_stack_overflow);
176}
177
178
Leon Clarkee46be812010-01-19 14:06:41 +0000179void MacroAssembler::Drop(int count, Condition cond) {
180 if (count > 0) {
181 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
182 }
183}
184
185
Steve Block6ded16b2010-05-10 14:33:55 +0100186void MacroAssembler::Swap(Register reg1, Register reg2, Register scratch) {
187 if (scratch.is(no_reg)) {
188 eor(reg1, reg1, Operand(reg2));
189 eor(reg2, reg2, Operand(reg1));
190 eor(reg1, reg1, Operand(reg2));
191 } else {
192 mov(scratch, reg1);
193 mov(reg1, reg2);
194 mov(reg2, scratch);
195 }
196}
197
198
Leon Clarkee46be812010-01-19 14:06:41 +0000199void MacroAssembler::Call(Label* target) {
200 bl(target);
201}
202
203
204void MacroAssembler::Move(Register dst, Handle<Object> value) {
205 mov(dst, Operand(value));
206}
Steve Blockd0582a62009-12-15 09:54:21 +0000207
208
Steve Block6ded16b2010-05-10 14:33:55 +0100209void MacroAssembler::Move(Register dst, Register src) {
210 if (!dst.is(src)) {
211 mov(dst, src);
212 }
213}
214
215
Steve Blocka7e24c12009-10-30 11:49:00 +0000216void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
217 // Empty the const pool.
218 CheckConstPool(true, true);
219 add(pc, pc, Operand(index,
220 LSL,
221 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize));
222 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * kInstrSize);
223 nop(); // Jump table alignment.
224 for (int i = 0; i < targets.length(); i++) {
225 b(targets[i]);
226 }
227}
228
229
230void MacroAssembler::LoadRoot(Register destination,
231 Heap::RootListIndex index,
232 Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +0000233 ldr(destination, MemOperand(roots, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000234}
235
236
Kristian Monsen25f61362010-05-21 11:50:48 +0100237void MacroAssembler::StoreRoot(Register source,
238 Heap::RootListIndex index,
239 Condition cond) {
240 str(source, MemOperand(roots, index << kPointerSizeLog2), cond);
241}
242
243
Steve Block6ded16b2010-05-10 14:33:55 +0100244void MacroAssembler::RecordWriteHelper(Register object,
245 Register offset,
246 Register scratch) {
247 if (FLAG_debug_code) {
248 // Check that the object is not in new space.
249 Label not_in_new_space;
250 InNewSpace(object, scratch, ne, &not_in_new_space);
251 Abort("new-space object passed to RecordWriteHelper");
252 bind(&not_in_new_space);
253 }
Leon Clarke4515c472010-02-03 11:58:03 +0000254
Steve Blocka7e24c12009-10-30 11:49:00 +0000255 // This is how much we shift the remembered set bit offset to get the
256 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
257 // shift right 5) and then multiply by kIntSize (4, shift left 2).
258 const int kRSetWordShift = 3;
259
Steve Block6ded16b2010-05-10 14:33:55 +0100260 Label fast;
Steve Blocka7e24c12009-10-30 11:49:00 +0000261
262 // Compute the bit offset in the remembered set.
263 // object: heap object pointer (with tag)
264 // offset: offset to store location from the object
265 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
266 and_(scratch, object, Operand(ip)); // offset into page of the object
267 add(offset, scratch, Operand(offset)); // add offset into the object
268 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
269
270 // Compute the page address from the heap object pointer.
271 // object: heap object pointer (with tag)
272 // offset: bit offset of store position in the remembered set
273 bic(object, object, Operand(ip));
274
275 // If the bit offset lies beyond the normal remembered set range, it is in
276 // the extra remembered set area of a large object.
277 // object: page start
278 // offset: bit offset of store position in the remembered set
279 cmp(offset, Operand(Page::kPageSize / kPointerSize));
280 b(lt, &fast);
281
282 // Adjust the bit offset to be relative to the start of the extra
283 // remembered set and the start address to be the address of the extra
284 // remembered set.
285 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
286 // Load the array length into 'scratch' and multiply by four to get the
287 // size in bytes of the elements.
288 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
289 + FixedArray::kLengthOffset));
290 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
291 // Add the page header (including remembered set), array header, and array
292 // body size to the page address.
293 add(object, object, Operand(Page::kObjectStartOffset
294 + FixedArray::kHeaderSize));
295 add(object, object, Operand(scratch));
296
297 bind(&fast);
298 // Get address of the rset word.
299 // object: start of the remembered set (page start for the fast case)
300 // offset: bit offset of store position in the remembered set
301 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset
302 add(object, object, Operand(scratch, LSR, kRSetWordShift));
303 // Get bit offset in the rset word.
304 // object: address of remembered set word
305 // offset: bit offset of store position
306 and_(offset, offset, Operand(kBitsPerInt - 1));
307
308 ldr(scratch, MemOperand(object));
309 mov(ip, Operand(1));
310 orr(scratch, scratch, Operand(ip, LSL, offset));
311 str(scratch, MemOperand(object));
Steve Block6ded16b2010-05-10 14:33:55 +0100312}
313
314
315void MacroAssembler::InNewSpace(Register object,
316 Register scratch,
317 Condition cc,
318 Label* branch) {
319 ASSERT(cc == eq || cc == ne);
320 and_(scratch, object, Operand(ExternalReference::new_space_mask()));
321 cmp(scratch, Operand(ExternalReference::new_space_start()));
322 b(cc, branch);
323}
324
325
326// Will clobber 4 registers: object, offset, scratch, ip. The
327// register 'object' contains a heap object pointer. The heap object
328// tag is shifted away.
329void MacroAssembler::RecordWrite(Register object, Register offset,
330 Register scratch) {
331 // The compiled code assumes that record write doesn't change the
332 // context register, so we check that none of the clobbered
333 // registers are cp.
334 ASSERT(!object.is(cp) && !offset.is(cp) && !scratch.is(cp));
335
336 Label done;
337
338 // First, test that the object is not in the new space. We cannot set
339 // remembered set bits in the new space.
340 InNewSpace(object, scratch, eq, &done);
341
342 // Record the actual write.
343 RecordWriteHelper(object, offset, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000344
345 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000346
347 // Clobber all input registers when running with the debug-code flag
348 // turned on to provoke errors.
349 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100350 mov(object, Operand(BitCast<int32_t>(kZapValue)));
351 mov(offset, Operand(BitCast<int32_t>(kZapValue)));
352 mov(scratch, Operand(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000353 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000354}
355
356
Leon Clarkef7060e22010-06-03 12:02:55 +0100357void MacroAssembler::Ldrd(Register dst1, Register dst2,
358 const MemOperand& src, Condition cond) {
359 ASSERT(src.rm().is(no_reg));
360 ASSERT(!dst1.is(lr)); // r14.
361 ASSERT_EQ(0, dst1.code() % 2);
362 ASSERT_EQ(dst1.code() + 1, dst2.code());
363
364 // Generate two ldr instructions if ldrd is not available.
365 if (CpuFeatures::IsSupported(ARMv7)) {
366 CpuFeatures::Scope scope(ARMv7);
367 ldrd(dst1, dst2, src, cond);
368 } else {
369 MemOperand src2(src);
370 src2.set_offset(src2.offset() + 4);
371 if (dst1.is(src.rn())) {
372 ldr(dst2, src2, cond);
373 ldr(dst1, src, cond);
374 } else {
375 ldr(dst1, src, cond);
376 ldr(dst2, src2, cond);
377 }
378 }
379}
380
381
382void MacroAssembler::Strd(Register src1, Register src2,
383 const MemOperand& dst, Condition cond) {
384 ASSERT(dst.rm().is(no_reg));
385 ASSERT(!src1.is(lr)); // r14.
386 ASSERT_EQ(0, src1.code() % 2);
387 ASSERT_EQ(src1.code() + 1, src2.code());
388
389 // Generate two str instructions if strd is not available.
390 if (CpuFeatures::IsSupported(ARMv7)) {
391 CpuFeatures::Scope scope(ARMv7);
392 strd(src1, src2, dst, cond);
393 } else {
394 MemOperand dst2(dst);
395 dst2.set_offset(dst2.offset() + 4);
396 str(src1, dst, cond);
397 str(src2, dst2, cond);
398 }
399}
400
401
Steve Blocka7e24c12009-10-30 11:49:00 +0000402void MacroAssembler::EnterFrame(StackFrame::Type type) {
403 // r0-r3: preserved
404 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
405 mov(ip, Operand(Smi::FromInt(type)));
406 push(ip);
407 mov(ip, Operand(CodeObject()));
408 push(ip);
409 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
410}
411
412
413void MacroAssembler::LeaveFrame(StackFrame::Type type) {
414 // r0: preserved
415 // r1: preserved
416 // r2: preserved
417
418 // Drop the execution stack down to the frame pointer and restore
419 // the caller frame pointer and return address.
420 mov(sp, fp);
421 ldm(ia_w, sp, fp.bit() | lr.bit());
422}
423
424
Steve Blockd0582a62009-12-15 09:54:21 +0000425void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000426 // Compute the argv pointer and keep it in a callee-saved register.
427 // r0 is argc.
428 add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
429 sub(r6, r6, Operand(kPointerSize));
430
431 // Compute callee's stack pointer before making changes and save it as
432 // ip register so that it is restored as sp register on exit, thereby
433 // popping the args.
434
435 // ip = sp + kPointerSize * #args;
436 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
437
Steve Block6ded16b2010-05-10 14:33:55 +0100438 // Prepare the stack to be aligned when calling into C. After this point there
439 // are 5 pushes before the call into C, so the stack needs to be aligned after
440 // 5 pushes.
441 int frame_alignment = ActivationFrameAlignment();
442 int frame_alignment_mask = frame_alignment - 1;
443 if (frame_alignment != kPointerSize) {
444 // The following code needs to be more general if this assert does not hold.
445 ASSERT(frame_alignment == 2 * kPointerSize);
446 // With 5 pushes left the frame must be unaligned at this point.
447 mov(r7, Operand(Smi::FromInt(0)));
448 tst(sp, Operand((frame_alignment - kPointerSize) & frame_alignment_mask));
449 push(r7, eq); // Push if aligned to make it unaligned.
450 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000451
452 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
453 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
Andrei Popescu402d9372010-02-26 13:31:12 +0000454 mov(fp, Operand(sp)); // Setup new frame pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +0000455
Andrei Popescu402d9372010-02-26 13:31:12 +0000456 mov(ip, Operand(CodeObject()));
457 push(ip); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000458
459 // Save the frame pointer and the context in top.
460 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
461 str(fp, MemOperand(ip));
462 mov(ip, Operand(ExternalReference(Top::k_context_address)));
463 str(cp, MemOperand(ip));
464
465 // Setup argc and the builtin function in callee-saved registers.
466 mov(r4, Operand(r0));
467 mov(r5, Operand(r1));
468
469
470#ifdef ENABLE_DEBUGGER_SUPPORT
471 // Save the state of all registers to the stack from the memory
472 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000473 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000474 // Use sp as base to push.
475 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
476 }
477#endif
478}
479
480
Steve Block6ded16b2010-05-10 14:33:55 +0100481void MacroAssembler::InitializeNewString(Register string,
482 Register length,
483 Heap::RootListIndex map_index,
484 Register scratch1,
485 Register scratch2) {
486 mov(scratch1, Operand(length, LSL, kSmiTagSize));
487 LoadRoot(scratch2, map_index);
488 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
489 mov(scratch1, Operand(String::kEmptyHashField));
490 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
491 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
492}
493
494
495int MacroAssembler::ActivationFrameAlignment() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000496#if defined(V8_HOST_ARCH_ARM)
497 // Running on the real platform. Use the alignment as mandated by the local
498 // environment.
499 // Note: This will break if we ever start generating snapshots on one ARM
500 // platform for another ARM platform with a different alignment.
Steve Block6ded16b2010-05-10 14:33:55 +0100501 return OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000502#else // defined(V8_HOST_ARCH_ARM)
503 // If we are using the simulator then we should always align to the expected
504 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +0100505 // if the target platform will need alignment, so this is controlled from a
506 // flag.
507 return FLAG_sim_stack_alignment;
Steve Blocka7e24c12009-10-30 11:49:00 +0000508#endif // defined(V8_HOST_ARCH_ARM)
Steve Blocka7e24c12009-10-30 11:49:00 +0000509}
510
511
Steve Blockd0582a62009-12-15 09:54:21 +0000512void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000513#ifdef ENABLE_DEBUGGER_SUPPORT
514 // Restore the memory copy of the registers by digging them out from
515 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000516 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000517 // This code intentionally clobbers r2 and r3.
518 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000519 const int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000520 add(r3, fp, Operand(kOffset));
521 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
522 }
523#endif
524
525 // Clear top frame.
526 mov(r3, Operand(0));
527 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
528 str(r3, MemOperand(ip));
529
530 // Restore current context from top and clear it in debug mode.
531 mov(ip, Operand(ExternalReference(Top::k_context_address)));
532 ldr(cp, MemOperand(ip));
533#ifdef DEBUG
534 str(r3, MemOperand(ip));
535#endif
536
537 // Pop the arguments, restore registers, and return.
538 mov(sp, Operand(fp)); // respect ABI stack constraint
539 ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
540}
541
542
543void MacroAssembler::InvokePrologue(const ParameterCount& expected,
544 const ParameterCount& actual,
545 Handle<Code> code_constant,
546 Register code_reg,
547 Label* done,
548 InvokeFlag flag) {
549 bool definitely_matches = false;
550 Label regular_invoke;
551
552 // Check whether the expected and actual arguments count match. If not,
553 // setup registers according to contract with ArgumentsAdaptorTrampoline:
554 // r0: actual arguments count
555 // r1: function (passed through to callee)
556 // r2: expected arguments count
557 // r3: callee code entry
558
559 // The code below is made a lot easier because the calling code already sets
560 // up actual and expected registers according to the contract if values are
561 // passed in registers.
562 ASSERT(actual.is_immediate() || actual.reg().is(r0));
563 ASSERT(expected.is_immediate() || expected.reg().is(r2));
564 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
565
566 if (expected.is_immediate()) {
567 ASSERT(actual.is_immediate());
568 if (expected.immediate() == actual.immediate()) {
569 definitely_matches = true;
570 } else {
571 mov(r0, Operand(actual.immediate()));
572 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
573 if (expected.immediate() == sentinel) {
574 // Don't worry about adapting arguments for builtins that
575 // don't want that done. Skip adaption code by making it look
576 // like we have a match between expected and actual number of
577 // arguments.
578 definitely_matches = true;
579 } else {
580 mov(r2, Operand(expected.immediate()));
581 }
582 }
583 } else {
584 if (actual.is_immediate()) {
585 cmp(expected.reg(), Operand(actual.immediate()));
586 b(eq, &regular_invoke);
587 mov(r0, Operand(actual.immediate()));
588 } else {
589 cmp(expected.reg(), Operand(actual.reg()));
590 b(eq, &regular_invoke);
591 }
592 }
593
594 if (!definitely_matches) {
595 if (!code_constant.is_null()) {
596 mov(r3, Operand(code_constant));
597 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
598 }
599
600 Handle<Code> adaptor =
601 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
602 if (flag == CALL_FUNCTION) {
603 Call(adaptor, RelocInfo::CODE_TARGET);
604 b(done);
605 } else {
606 Jump(adaptor, RelocInfo::CODE_TARGET);
607 }
608 bind(&regular_invoke);
609 }
610}
611
612
613void MacroAssembler::InvokeCode(Register code,
614 const ParameterCount& expected,
615 const ParameterCount& actual,
616 InvokeFlag flag) {
617 Label done;
618
619 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
620 if (flag == CALL_FUNCTION) {
621 Call(code);
622 } else {
623 ASSERT(flag == JUMP_FUNCTION);
624 Jump(code);
625 }
626
627 // Continue here if InvokePrologue does handle the invocation due to
628 // mismatched parameter counts.
629 bind(&done);
630}
631
632
633void MacroAssembler::InvokeCode(Handle<Code> code,
634 const ParameterCount& expected,
635 const ParameterCount& actual,
636 RelocInfo::Mode rmode,
637 InvokeFlag flag) {
638 Label done;
639
640 InvokePrologue(expected, actual, code, no_reg, &done, flag);
641 if (flag == CALL_FUNCTION) {
642 Call(code, rmode);
643 } else {
644 Jump(code, rmode);
645 }
646
647 // Continue here if InvokePrologue does handle the invocation due to
648 // mismatched parameter counts.
649 bind(&done);
650}
651
652
653void MacroAssembler::InvokeFunction(Register fun,
654 const ParameterCount& actual,
655 InvokeFlag flag) {
656 // Contract with called JS functions requires that function is passed in r1.
657 ASSERT(fun.is(r1));
658
659 Register expected_reg = r2;
660 Register code_reg = r3;
661
662 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
663 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
664 ldr(expected_reg,
665 FieldMemOperand(code_reg,
666 SharedFunctionInfo::kFormalParameterCountOffset));
667 ldr(code_reg,
668 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
669 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
670
671 ParameterCount expected(expected_reg);
672 InvokeCode(code_reg, expected, actual, flag);
673}
674
675
Andrei Popescu402d9372010-02-26 13:31:12 +0000676void MacroAssembler::InvokeFunction(JSFunction* function,
677 const ParameterCount& actual,
678 InvokeFlag flag) {
679 ASSERT(function->is_compiled());
680
681 // Get the function and setup the context.
682 mov(r1, Operand(Handle<JSFunction>(function)));
683 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
684
685 // Invoke the cached code.
686 Handle<Code> code(function->code());
687 ParameterCount expected(function->shared()->formal_parameter_count());
688 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
689}
690
Steve Blocka7e24c12009-10-30 11:49:00 +0000691#ifdef ENABLE_DEBUGGER_SUPPORT
692void MacroAssembler::SaveRegistersToMemory(RegList regs) {
693 ASSERT((regs & ~kJSCallerSaved) == 0);
694 // Copy the content of registers to memory location.
695 for (int i = 0; i < kNumJSCallerSaved; i++) {
696 int r = JSCallerSavedCode(i);
697 if ((regs & (1 << r)) != 0) {
698 Register reg = { r };
699 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
700 str(reg, MemOperand(ip));
701 }
702 }
703}
704
705
706void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
707 ASSERT((regs & ~kJSCallerSaved) == 0);
708 // Copy the content of memory location to registers.
709 for (int i = kNumJSCallerSaved; --i >= 0;) {
710 int r = JSCallerSavedCode(i);
711 if ((regs & (1 << r)) != 0) {
712 Register reg = { r };
713 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
714 ldr(reg, MemOperand(ip));
715 }
716 }
717}
718
719
720void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
721 RegList regs) {
722 ASSERT((regs & ~kJSCallerSaved) == 0);
723 // Copy the content of the memory location to the stack and adjust base.
724 for (int i = kNumJSCallerSaved; --i >= 0;) {
725 int r = JSCallerSavedCode(i);
726 if ((regs & (1 << r)) != 0) {
727 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
728 ldr(ip, MemOperand(ip));
729 str(ip, MemOperand(base, 4, NegPreIndex));
730 }
731 }
732}
733
734
735void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
736 Register scratch,
737 RegList regs) {
738 ASSERT((regs & ~kJSCallerSaved) == 0);
739 // Copy the content of the stack to the memory location and adjust base.
740 for (int i = 0; i < kNumJSCallerSaved; i++) {
741 int r = JSCallerSavedCode(i);
742 if ((regs & (1 << r)) != 0) {
743 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
744 ldr(scratch, MemOperand(base, 4, PostIndex));
745 str(scratch, MemOperand(ip));
746 }
747 }
748}
Andrei Popescu402d9372010-02-26 13:31:12 +0000749
750
751void MacroAssembler::DebugBreak() {
752 ASSERT(allow_stub_calls());
753 mov(r0, Operand(0));
754 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak)));
755 CEntryStub ces(1);
756 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
757}
Steve Blocka7e24c12009-10-30 11:49:00 +0000758#endif
759
760
761void MacroAssembler::PushTryHandler(CodeLocation try_location,
762 HandlerType type) {
763 // Adjust this code if not the case.
764 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
765 // The pc (return address) is passed in register lr.
766 if (try_location == IN_JAVASCRIPT) {
767 if (type == TRY_CATCH_HANDLER) {
768 mov(r3, Operand(StackHandler::TRY_CATCH));
769 } else {
770 mov(r3, Operand(StackHandler::TRY_FINALLY));
771 }
772 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
773 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
774 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
775 stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
776 // Save the current handler as the next handler.
777 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
778 ldr(r1, MemOperand(r3));
779 ASSERT(StackHandlerConstants::kNextOffset == 0);
780 push(r1);
781 // Link this handler as the new current one.
782 str(sp, MemOperand(r3));
783 } else {
784 // Must preserve r0-r4, r5-r7 are available.
785 ASSERT(try_location == IN_JS_ENTRY);
786 // The frame pointer does not point to a JS frame so we save NULL
787 // for fp. We expect the code throwing an exception to check fp
788 // before dereferencing it to restore the context.
789 mov(ip, Operand(0)); // To save a NULL frame pointer.
790 mov(r6, Operand(StackHandler::ENTRY));
791 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
792 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
793 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
794 stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
795 // Save the current handler as the next handler.
796 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
797 ldr(r6, MemOperand(r7));
798 ASSERT(StackHandlerConstants::kNextOffset == 0);
799 push(r6);
800 // Link this handler as the new current one.
801 str(sp, MemOperand(r7));
802 }
803}
804
805
Leon Clarkee46be812010-01-19 14:06:41 +0000806void MacroAssembler::PopTryHandler() {
807 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
808 pop(r1);
809 mov(ip, Operand(ExternalReference(Top::k_handler_address)));
810 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
811 str(r1, MemOperand(ip));
812}
813
814
Steve Blocka7e24c12009-10-30 11:49:00 +0000815Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
816 JSObject* holder, Register holder_reg,
817 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +0100818 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +0000819 Label* miss) {
820 // Make sure there's no overlap between scratch and the other
821 // registers.
822 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
823
824 // Keep track of the current object in register reg.
825 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +0100826 int depth = 0;
827
828 if (save_at_depth == depth) {
829 str(reg, MemOperand(sp));
830 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000831
832 // Check the maps in the prototype chain.
833 // Traverse the prototype chain from the object and do map checks.
834 while (object != holder) {
835 depth++;
836
837 // Only global objects and objects that do not require access
838 // checks are allowed in stubs.
839 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
840
841 // Get the map of the current object.
842 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
843 cmp(scratch, Operand(Handle<Map>(object->map())));
844
845 // Branch on the result of the map check.
846 b(ne, miss);
847
848 // Check access rights to the global object. This has to happen
849 // after the map check so that we know that the object is
850 // actually a global object.
851 if (object->IsJSGlobalProxy()) {
852 CheckAccessGlobalProxy(reg, scratch, miss);
853 // Restore scratch register to be the map of the object. In the
854 // new space case below, we load the prototype from the map in
855 // the scratch register.
856 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
857 }
858
859 reg = holder_reg; // from now the object is in holder_reg
860 JSObject* prototype = JSObject::cast(object->GetPrototype());
861 if (Heap::InNewSpace(prototype)) {
862 // The prototype is in new space; we cannot store a reference
863 // to it in the code. Load it from the map.
864 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
865 } else {
866 // The prototype is in old space; load it directly.
867 mov(reg, Operand(Handle<JSObject>(prototype)));
868 }
869
Steve Block6ded16b2010-05-10 14:33:55 +0100870 if (save_at_depth == depth) {
871 str(reg, MemOperand(sp));
872 }
873
Steve Blocka7e24c12009-10-30 11:49:00 +0000874 // Go to the next object in the prototype chain.
875 object = prototype;
876 }
877
878 // Check the holder map.
879 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
880 cmp(scratch, Operand(Handle<Map>(object->map())));
881 b(ne, miss);
882
883 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +0100884 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000885
886 // Perform security check for access to the global object and return
887 // the holder register.
888 ASSERT(object == holder);
889 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
890 if (object->IsJSGlobalProxy()) {
891 CheckAccessGlobalProxy(reg, scratch, miss);
892 }
893 return reg;
894}
895
896
897void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
898 Register scratch,
899 Label* miss) {
900 Label same_contexts;
901
902 ASSERT(!holder_reg.is(scratch));
903 ASSERT(!holder_reg.is(ip));
904 ASSERT(!scratch.is(ip));
905
906 // Load current lexical context from the stack frame.
907 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
908 // In debug mode, make sure the lexical context is set.
909#ifdef DEBUG
910 cmp(scratch, Operand(0));
911 Check(ne, "we should not have an empty lexical context");
912#endif
913
914 // Load the global context of the current context.
915 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
916 ldr(scratch, FieldMemOperand(scratch, offset));
917 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
918
919 // Check the context is a global context.
920 if (FLAG_debug_code) {
921 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
922 // Cannot use ip as a temporary in this verification code. Due to the fact
923 // that ip is clobbered as part of cmp with an object Operand.
924 push(holder_reg); // Temporarily save holder on the stack.
925 // Read the first word and compare to the global_context_map.
926 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
927 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
928 cmp(holder_reg, ip);
929 Check(eq, "JSGlobalObject::global_context should be a global context.");
930 pop(holder_reg); // Restore holder.
931 }
932
933 // Check if both contexts are the same.
934 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
935 cmp(scratch, Operand(ip));
936 b(eq, &same_contexts);
937
938 // Check the context is a global context.
939 if (FLAG_debug_code) {
940 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
941 // Cannot use ip as a temporary in this verification code. Due to the fact
942 // that ip is clobbered as part of cmp with an object Operand.
943 push(holder_reg); // Temporarily save holder on the stack.
944 mov(holder_reg, ip); // Move ip to its holding place.
945 LoadRoot(ip, Heap::kNullValueRootIndex);
946 cmp(holder_reg, ip);
947 Check(ne, "JSGlobalProxy::context() should not be null.");
948
949 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
950 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
951 cmp(holder_reg, ip);
952 Check(eq, "JSGlobalObject::global_context should be a global context.");
953 // Restore ip is not needed. ip is reloaded below.
954 pop(holder_reg); // Restore holder.
955 // Restore ip to holder's context.
956 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
957 }
958
959 // Check that the security token in the calling global object is
960 // compatible with the security token in the receiving global
961 // object.
962 int token_offset = Context::kHeaderSize +
963 Context::SECURITY_TOKEN_INDEX * kPointerSize;
964
965 ldr(scratch, FieldMemOperand(scratch, token_offset));
966 ldr(ip, FieldMemOperand(ip, token_offset));
967 cmp(scratch, Operand(ip));
968 b(ne, miss);
969
970 bind(&same_contexts);
971}
972
973
974void MacroAssembler::AllocateInNewSpace(int object_size,
975 Register result,
976 Register scratch1,
977 Register scratch2,
978 Label* gc_required,
979 AllocationFlags flags) {
980 ASSERT(!result.is(scratch1));
981 ASSERT(!scratch1.is(scratch2));
982
Kristian Monsen25f61362010-05-21 11:50:48 +0100983 // Make object size into bytes.
984 if ((flags & SIZE_IN_WORDS) != 0) {
985 object_size *= kPointerSize;
986 }
987 ASSERT_EQ(0, object_size & kObjectAlignmentMask);
988
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 // Load address of new object into result and allocation top address into
990 // scratch1.
991 ExternalReference new_space_allocation_top =
992 ExternalReference::new_space_allocation_top_address();
993 mov(scratch1, Operand(new_space_allocation_top));
994 if ((flags & RESULT_CONTAINS_TOP) == 0) {
995 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +0000996 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000997 // Assert that result actually contains top on entry. scratch2 is used
998 // immediately below so this use of scratch2 does not cause difference with
999 // respect to register content between debug and release mode.
1000 ldr(scratch2, MemOperand(scratch1));
1001 cmp(result, scratch2);
1002 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +00001003 }
1004
1005 // Calculate new top and bail out if new space is exhausted. Use result
1006 // to calculate the new top.
1007 ExternalReference new_space_allocation_limit =
1008 ExternalReference::new_space_allocation_limit_address();
1009 mov(scratch2, Operand(new_space_allocation_limit));
1010 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +01001011 add(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +00001012 cmp(result, Operand(scratch2));
1013 b(hi, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 str(result, MemOperand(scratch1));
1015
1016 // Tag and adjust back to start of new object.
1017 if ((flags & TAG_OBJECT) != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001018 sub(result, result, Operand(object_size - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001019 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01001020 sub(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +00001021 }
1022}
1023
1024
1025void MacroAssembler::AllocateInNewSpace(Register object_size,
1026 Register result,
1027 Register scratch1,
1028 Register scratch2,
1029 Label* gc_required,
1030 AllocationFlags flags) {
1031 ASSERT(!result.is(scratch1));
1032 ASSERT(!scratch1.is(scratch2));
1033
1034 // Load address of new object into result and allocation top address into
1035 // scratch1.
1036 ExternalReference new_space_allocation_top =
1037 ExternalReference::new_space_allocation_top_address();
1038 mov(scratch1, Operand(new_space_allocation_top));
1039 if ((flags & RESULT_CONTAINS_TOP) == 0) {
1040 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +00001041 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001042 // Assert that result actually contains top on entry. scratch2 is used
1043 // immediately below so this use of scratch2 does not cause difference with
1044 // respect to register content between debug and release mode.
1045 ldr(scratch2, MemOperand(scratch1));
1046 cmp(result, scratch2);
1047 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +00001048 }
1049
1050 // Calculate new top and bail out if new space is exhausted. Use result
1051 // to calculate the new top. Object size is in words so a shift is required to
1052 // get the number of bytes
1053 ExternalReference new_space_allocation_limit =
1054 ExternalReference::new_space_allocation_limit_address();
1055 mov(scratch2, Operand(new_space_allocation_limit));
1056 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +01001057 if ((flags & SIZE_IN_WORDS) != 0) {
1058 add(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1059 } else {
1060 add(result, result, Operand(object_size));
1061 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001062 cmp(result, Operand(scratch2));
1063 b(hi, gc_required);
1064
Steve Blockd0582a62009-12-15 09:54:21 +00001065 // Update allocation top. result temporarily holds the new top.
1066 if (FLAG_debug_code) {
1067 tst(result, Operand(kObjectAlignmentMask));
1068 Check(eq, "Unaligned allocation in new space");
1069 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001070 str(result, MemOperand(scratch1));
1071
1072 // Adjust back to start of new object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001073 if ((flags & SIZE_IN_WORDS) != 0) {
1074 sub(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1075 } else {
1076 sub(result, result, Operand(object_size));
1077 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001078
1079 // Tag object if requested.
1080 if ((flags & TAG_OBJECT) != 0) {
1081 add(result, result, Operand(kHeapObjectTag));
1082 }
1083}
1084
1085
1086void MacroAssembler::UndoAllocationInNewSpace(Register object,
1087 Register scratch) {
1088 ExternalReference new_space_allocation_top =
1089 ExternalReference::new_space_allocation_top_address();
1090
1091 // Make sure the object has no tag before resetting top.
1092 and_(object, object, Operand(~kHeapObjectTagMask));
1093#ifdef DEBUG
1094 // Check that the object un-allocated is below the current top.
1095 mov(scratch, Operand(new_space_allocation_top));
1096 ldr(scratch, MemOperand(scratch));
1097 cmp(object, scratch);
1098 Check(lt, "Undo allocation of non allocated memory");
1099#endif
1100 // Write the address of the object to un-allocate as the current top.
1101 mov(scratch, Operand(new_space_allocation_top));
1102 str(object, MemOperand(scratch));
1103}
1104
1105
Andrei Popescu31002712010-02-23 13:46:05 +00001106void MacroAssembler::AllocateTwoByteString(Register result,
1107 Register length,
1108 Register scratch1,
1109 Register scratch2,
1110 Register scratch3,
1111 Label* gc_required) {
1112 // Calculate the number of bytes needed for the characters in the string while
1113 // observing object alignment.
1114 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1115 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1116 add(scratch1, scratch1,
1117 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001118 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001119
1120 // Allocate two-byte string in new space.
1121 AllocateInNewSpace(scratch1,
1122 result,
1123 scratch2,
1124 scratch3,
1125 gc_required,
1126 TAG_OBJECT);
1127
1128 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001129 InitializeNewString(result,
1130 length,
1131 Heap::kStringMapRootIndex,
1132 scratch1,
1133 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001134}
1135
1136
1137void MacroAssembler::AllocateAsciiString(Register result,
1138 Register length,
1139 Register scratch1,
1140 Register scratch2,
1141 Register scratch3,
1142 Label* gc_required) {
1143 // Calculate the number of bytes needed for the characters in the string while
1144 // observing object alignment.
1145 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1146 ASSERT(kCharSize == 1);
1147 add(scratch1, length,
1148 Operand(kObjectAlignmentMask + SeqAsciiString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001149 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001150
1151 // Allocate ASCII string in new space.
1152 AllocateInNewSpace(scratch1,
1153 result,
1154 scratch2,
1155 scratch3,
1156 gc_required,
1157 TAG_OBJECT);
1158
1159 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001160 InitializeNewString(result,
1161 length,
1162 Heap::kAsciiStringMapRootIndex,
1163 scratch1,
1164 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001165}
1166
1167
1168void MacroAssembler::AllocateTwoByteConsString(Register result,
1169 Register length,
1170 Register scratch1,
1171 Register scratch2,
1172 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001173 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001174 result,
1175 scratch1,
1176 scratch2,
1177 gc_required,
1178 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001179
1180 InitializeNewString(result,
1181 length,
1182 Heap::kConsStringMapRootIndex,
1183 scratch1,
1184 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001185}
1186
1187
1188void MacroAssembler::AllocateAsciiConsString(Register result,
1189 Register length,
1190 Register scratch1,
1191 Register scratch2,
1192 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001193 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001194 result,
1195 scratch1,
1196 scratch2,
1197 gc_required,
1198 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001199
1200 InitializeNewString(result,
1201 length,
1202 Heap::kConsAsciiStringMapRootIndex,
1203 scratch1,
1204 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001205}
1206
1207
Steve Block6ded16b2010-05-10 14:33:55 +01001208void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00001209 Register map,
1210 Register type_reg,
1211 InstanceType type) {
Steve Block6ded16b2010-05-10 14:33:55 +01001212 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001213 CompareInstanceType(map, type_reg, type);
1214}
1215
1216
1217void MacroAssembler::CompareInstanceType(Register map,
1218 Register type_reg,
1219 InstanceType type) {
1220 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
1221 cmp(type_reg, Operand(type));
1222}
1223
1224
Andrei Popescu31002712010-02-23 13:46:05 +00001225void MacroAssembler::CheckMap(Register obj,
1226 Register scratch,
1227 Handle<Map> map,
1228 Label* fail,
1229 bool is_heap_object) {
1230 if (!is_heap_object) {
1231 BranchOnSmi(obj, fail);
1232 }
1233 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1234 mov(ip, Operand(map));
1235 cmp(scratch, ip);
1236 b(ne, fail);
1237}
1238
1239
Steve Blocka7e24c12009-10-30 11:49:00 +00001240void MacroAssembler::TryGetFunctionPrototype(Register function,
1241 Register result,
1242 Register scratch,
1243 Label* miss) {
1244 // Check that the receiver isn't a smi.
1245 BranchOnSmi(function, miss);
1246
1247 // Check that the function really is a function. Load map into result reg.
1248 CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
1249 b(ne, miss);
1250
1251 // Make sure that the function has an instance prototype.
1252 Label non_instance;
1253 ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
1254 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1255 b(ne, &non_instance);
1256
1257 // Get the prototype or initial map from the function.
1258 ldr(result,
1259 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1260
1261 // If the prototype or initial map is the hole, don't return it and
1262 // simply miss the cache instead. This will allow us to allocate a
1263 // prototype object on-demand in the runtime system.
1264 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1265 cmp(result, ip);
1266 b(eq, miss);
1267
1268 // If the function does not have an initial map, we're done.
1269 Label done;
1270 CompareObjectType(result, scratch, scratch, MAP_TYPE);
1271 b(ne, &done);
1272
1273 // Get the prototype from the initial map.
1274 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
1275 jmp(&done);
1276
1277 // Non-instance prototype: Fetch prototype from constructor field
1278 // in initial map.
1279 bind(&non_instance);
1280 ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1281
1282 // All done.
1283 bind(&done);
1284}
1285
1286
1287void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
1288 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1289 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1290}
1291
1292
Andrei Popescu31002712010-02-23 13:46:05 +00001293void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1294 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1295 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1296}
1297
1298
Steve Blocka7e24c12009-10-30 11:49:00 +00001299void MacroAssembler::StubReturn(int argc) {
1300 ASSERT(argc >= 1 && generating_stub());
Andrei Popescu31002712010-02-23 13:46:05 +00001301 if (argc > 1) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001302 add(sp, sp, Operand((argc - 1) * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00001303 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001304 Ret();
1305}
1306
1307
1308void MacroAssembler::IllegalOperation(int num_arguments) {
1309 if (num_arguments > 0) {
1310 add(sp, sp, Operand(num_arguments * kPointerSize));
1311 }
1312 LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1313}
1314
1315
Steve Blockd0582a62009-12-15 09:54:21 +00001316void MacroAssembler::IntegerToDoubleConversionWithVFP3(Register inReg,
1317 Register outHighReg,
1318 Register outLowReg) {
1319 // ARMv7 VFP3 instructions to implement integer to double conversion.
1320 mov(r7, Operand(inReg, ASR, kSmiTagSize));
Leon Clarkee46be812010-01-19 14:06:41 +00001321 vmov(s15, r7);
Steve Block6ded16b2010-05-10 14:33:55 +01001322 vcvt_f64_s32(d7, s15);
Leon Clarkee46be812010-01-19 14:06:41 +00001323 vmov(outLowReg, outHighReg, d7);
Steve Blockd0582a62009-12-15 09:54:21 +00001324}
1325
1326
Andrei Popescu31002712010-02-23 13:46:05 +00001327void MacroAssembler::GetLeastBitsFromSmi(Register dst,
1328 Register src,
1329 int num_least_bits) {
1330 if (CpuFeatures::IsSupported(ARMv7)) {
1331 ubfx(dst, src, Operand(kSmiTagSize), Operand(num_least_bits - 1));
1332 } else {
1333 mov(dst, Operand(src, ASR, kSmiTagSize));
1334 and_(dst, dst, Operand((1 << num_least_bits) - 1));
1335 }
1336}
1337
1338
Steve Blocka7e24c12009-10-30 11:49:00 +00001339void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1340 // All parameters are on the stack. r0 has the return value after call.
1341
1342 // If the expected number of arguments of the runtime function is
1343 // constant, we check that the actual number of arguments match the
1344 // expectation.
1345 if (f->nargs >= 0 && f->nargs != num_arguments) {
1346 IllegalOperation(num_arguments);
1347 return;
1348 }
1349
Leon Clarke4515c472010-02-03 11:58:03 +00001350 // TODO(1236192): Most runtime routines don't need the number of
1351 // arguments passed in because it is constant. At some point we
1352 // should remove this need and make the runtime routine entry code
1353 // smarter.
1354 mov(r0, Operand(num_arguments));
1355 mov(r1, Operand(ExternalReference(f)));
1356 CEntryStub stub(1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001357 CallStub(&stub);
1358}
1359
1360
1361void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
1362 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1363}
1364
1365
Andrei Popescu402d9372010-02-26 13:31:12 +00001366void MacroAssembler::CallExternalReference(const ExternalReference& ext,
1367 int num_arguments) {
1368 mov(r0, Operand(num_arguments));
1369 mov(r1, Operand(ext));
1370
1371 CEntryStub stub(1);
1372 CallStub(&stub);
1373}
1374
1375
Steve Block6ded16b2010-05-10 14:33:55 +01001376void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1377 int num_arguments,
1378 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001379 // TODO(1236192): Most runtime routines don't need the number of
1380 // arguments passed in because it is constant. At some point we
1381 // should remove this need and make the runtime routine entry code
1382 // smarter.
1383 mov(r0, Operand(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001384 JumpToExternalReference(ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001385}
1386
1387
Steve Block6ded16b2010-05-10 14:33:55 +01001388void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1389 int num_arguments,
1390 int result_size) {
1391 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
1392}
1393
1394
1395void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001396#if defined(__thumb__)
1397 // Thumb mode builtin.
1398 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
1399#endif
1400 mov(r1, Operand(builtin));
1401 CEntryStub stub(1);
1402 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1403}
1404
1405
Steve Blocka7e24c12009-10-30 11:49:00 +00001406void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1407 InvokeJSFlags flags) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001408 GetBuiltinEntry(r2, id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001409 if (flags == CALL_JS) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001410 Call(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 } else {
1412 ASSERT(flags == JUMP_JS);
Andrei Popescu402d9372010-02-26 13:31:12 +00001413 Jump(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001414 }
1415}
1416
1417
1418void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001419 ASSERT(!target.is(r1));
1420
1421 // Load the builtins object into target register.
1422 ldr(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
1423 ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
1424
Andrei Popescu402d9372010-02-26 13:31:12 +00001425 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001426 ldr(r1, FieldMemOperand(target,
1427 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1428
1429 // Load the code entry point from the builtins object.
1430 ldr(target, FieldMemOperand(target,
1431 JSBuiltinsObject::OffsetOfCodeWithId(id)));
1432 if (FLAG_debug_code) {
1433 // Make sure the code objects in the builtins object and in the
1434 // builtin function are the same.
1435 push(r1);
1436 ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1437 ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCodeOffset));
1438 cmp(r1, target);
1439 Assert(eq, "Builtin code object changed");
1440 pop(r1);
1441 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001442 add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
1443}
1444
1445
1446void MacroAssembler::SetCounter(StatsCounter* counter, int value,
1447 Register scratch1, Register scratch2) {
1448 if (FLAG_native_code_counters && counter->Enabled()) {
1449 mov(scratch1, Operand(value));
1450 mov(scratch2, Operand(ExternalReference(counter)));
1451 str(scratch1, MemOperand(scratch2));
1452 }
1453}
1454
1455
1456void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
1457 Register scratch1, Register scratch2) {
1458 ASSERT(value > 0);
1459 if (FLAG_native_code_counters && counter->Enabled()) {
1460 mov(scratch2, Operand(ExternalReference(counter)));
1461 ldr(scratch1, MemOperand(scratch2));
1462 add(scratch1, scratch1, Operand(value));
1463 str(scratch1, MemOperand(scratch2));
1464 }
1465}
1466
1467
1468void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
1469 Register scratch1, Register scratch2) {
1470 ASSERT(value > 0);
1471 if (FLAG_native_code_counters && counter->Enabled()) {
1472 mov(scratch2, Operand(ExternalReference(counter)));
1473 ldr(scratch1, MemOperand(scratch2));
1474 sub(scratch1, scratch1, Operand(value));
1475 str(scratch1, MemOperand(scratch2));
1476 }
1477}
1478
1479
1480void MacroAssembler::Assert(Condition cc, const char* msg) {
1481 if (FLAG_debug_code)
1482 Check(cc, msg);
1483}
1484
1485
1486void MacroAssembler::Check(Condition cc, const char* msg) {
1487 Label L;
1488 b(cc, &L);
1489 Abort(msg);
1490 // will not return here
1491 bind(&L);
1492}
1493
1494
1495void MacroAssembler::Abort(const char* msg) {
1496 // We want to pass the msg string like a smi to avoid GC
1497 // problems, however msg is not guaranteed to be aligned
1498 // properly. Instead, we pass an aligned pointer that is
1499 // a proper v8 smi, but also pass the alignment difference
1500 // from the real pointer as a smi.
1501 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1502 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1503 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1504#ifdef DEBUG
1505 if (msg != NULL) {
1506 RecordComment("Abort message: ");
1507 RecordComment(msg);
1508 }
1509#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001510 // Disable stub call restrictions to always allow calls to abort.
1511 set_allow_stub_calls(true);
1512
Steve Blocka7e24c12009-10-30 11:49:00 +00001513 mov(r0, Operand(p0));
1514 push(r0);
1515 mov(r0, Operand(Smi::FromInt(p1 - p0)));
1516 push(r0);
1517 CallRuntime(Runtime::kAbort, 2);
1518 // will not return here
1519}
1520
1521
Steve Blockd0582a62009-12-15 09:54:21 +00001522void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1523 if (context_chain_length > 0) {
1524 // Move up the chain of contexts to the context containing the slot.
1525 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::CLOSURE_INDEX)));
1526 // Load the function context (which is the incoming, outer context).
1527 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1528 for (int i = 1; i < context_chain_length; i++) {
1529 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1530 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1531 }
1532 // The context may be an intermediate context, not a function context.
1533 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1534 } else { // Slot is in the current function context.
1535 // The context may be an intermediate context, not a function context.
1536 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1537 }
1538}
1539
1540
Andrei Popescu31002712010-02-23 13:46:05 +00001541void MacroAssembler::JumpIfNotBothSmi(Register reg1,
1542 Register reg2,
1543 Label* on_not_both_smi) {
1544 ASSERT_EQ(0, kSmiTag);
1545 tst(reg1, Operand(kSmiTagMask));
1546 tst(reg2, Operand(kSmiTagMask), eq);
1547 b(ne, on_not_both_smi);
1548}
1549
1550
1551void MacroAssembler::JumpIfEitherSmi(Register reg1,
1552 Register reg2,
1553 Label* on_either_smi) {
1554 ASSERT_EQ(0, kSmiTag);
1555 tst(reg1, Operand(kSmiTagMask));
1556 tst(reg2, Operand(kSmiTagMask), ne);
1557 b(eq, on_either_smi);
1558}
1559
1560
Leon Clarked91b9f72010-01-27 17:25:45 +00001561void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
1562 Register first,
1563 Register second,
1564 Register scratch1,
1565 Register scratch2,
1566 Label* failure) {
1567 // Test that both first and second are sequential ASCII strings.
1568 // Assume that they are non-smis.
1569 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
1570 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
1571 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
1572 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001573
1574 JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
1575 scratch2,
1576 scratch1,
1577 scratch2,
1578 failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00001579}
1580
1581void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
1582 Register second,
1583 Register scratch1,
1584 Register scratch2,
1585 Label* failure) {
1586 // Check that neither is a smi.
1587 ASSERT_EQ(0, kSmiTag);
1588 and_(scratch1, first, Operand(second));
1589 tst(scratch1, Operand(kSmiTagMask));
1590 b(eq, failure);
1591 JumpIfNonSmisNotBothSequentialAsciiStrings(first,
1592 second,
1593 scratch1,
1594 scratch2,
1595 failure);
1596}
1597
Steve Blockd0582a62009-12-15 09:54:21 +00001598
Steve Block6ded16b2010-05-10 14:33:55 +01001599// Allocates a heap number or jumps to the need_gc label if the young space
1600// is full and a scavenge is needed.
1601void MacroAssembler::AllocateHeapNumber(Register result,
1602 Register scratch1,
1603 Register scratch2,
1604 Label* gc_required) {
1605 // Allocate an object in the heap for the heap number and tag it as a heap
1606 // object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001607 AllocateInNewSpace(HeapNumber::kSize,
Steve Block6ded16b2010-05-10 14:33:55 +01001608 result,
1609 scratch1,
1610 scratch2,
1611 gc_required,
1612 TAG_OBJECT);
1613
1614 // Get heap number map and store it in the allocated object.
1615 LoadRoot(scratch1, Heap::kHeapNumberMapRootIndex);
1616 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
1617}
1618
1619
1620void MacroAssembler::CountLeadingZeros(Register source,
1621 Register scratch,
1622 Register zeros) {
1623#ifdef CAN_USE_ARMV5_INSTRUCTIONS
1624 clz(zeros, source); // This instruction is only supported after ARM5.
1625#else
1626 mov(zeros, Operand(0));
1627 mov(scratch, source);
1628 // Top 16.
1629 tst(scratch, Operand(0xffff0000));
1630 add(zeros, zeros, Operand(16), LeaveCC, eq);
1631 mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq);
1632 // Top 8.
1633 tst(scratch, Operand(0xff000000));
1634 add(zeros, zeros, Operand(8), LeaveCC, eq);
1635 mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq);
1636 // Top 4.
1637 tst(scratch, Operand(0xf0000000));
1638 add(zeros, zeros, Operand(4), LeaveCC, eq);
1639 mov(scratch, Operand(scratch, LSL, 4), LeaveCC, eq);
1640 // Top 2.
1641 tst(scratch, Operand(0xc0000000));
1642 add(zeros, zeros, Operand(2), LeaveCC, eq);
1643 mov(scratch, Operand(scratch, LSL, 2), LeaveCC, eq);
1644 // Top bit.
1645 tst(scratch, Operand(0x80000000u));
1646 add(zeros, zeros, Operand(1), LeaveCC, eq);
1647#endif
1648}
1649
1650
1651void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1652 Register first,
1653 Register second,
1654 Register scratch1,
1655 Register scratch2,
1656 Label* failure) {
1657 int kFlatAsciiStringMask =
1658 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1659 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1660 and_(scratch1, first, Operand(kFlatAsciiStringMask));
1661 and_(scratch2, second, Operand(kFlatAsciiStringMask));
1662 cmp(scratch1, Operand(kFlatAsciiStringTag));
1663 // Ignore second test if first test failed.
1664 cmp(scratch2, Operand(kFlatAsciiStringTag), eq);
1665 b(ne, failure);
1666}
1667
1668
1669void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1670 Register scratch,
1671 Label* failure) {
1672 int kFlatAsciiStringMask =
1673 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1674 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1675 and_(scratch, type, Operand(kFlatAsciiStringMask));
1676 cmp(scratch, Operand(kFlatAsciiStringTag));
1677 b(ne, failure);
1678}
1679
1680
1681void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1682 int frame_alignment = ActivationFrameAlignment();
1683 // Up to four simple arguments are passed in registers r0..r3.
1684 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1685 if (frame_alignment > kPointerSize) {
1686 // Make stack end at alignment and make room for num_arguments - 4 words
1687 // and the original value of sp.
1688 mov(scratch, sp);
1689 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
1690 ASSERT(IsPowerOf2(frame_alignment));
1691 and_(sp, sp, Operand(-frame_alignment));
1692 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
1693 } else {
1694 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
1695 }
1696}
1697
1698
1699void MacroAssembler::CallCFunction(ExternalReference function,
1700 int num_arguments) {
1701 mov(ip, Operand(function));
1702 CallCFunction(ip, num_arguments);
1703}
1704
1705
1706void MacroAssembler::CallCFunction(Register function, int num_arguments) {
1707 // Make sure that the stack is aligned before calling a C function unless
1708 // running in the simulator. The simulator has its own alignment check which
1709 // provides more information.
1710#if defined(V8_HOST_ARCH_ARM)
1711 if (FLAG_debug_code) {
1712 int frame_alignment = OS::ActivationFrameAlignment();
1713 int frame_alignment_mask = frame_alignment - 1;
1714 if (frame_alignment > kPointerSize) {
1715 ASSERT(IsPowerOf2(frame_alignment));
1716 Label alignment_as_expected;
1717 tst(sp, Operand(frame_alignment_mask));
1718 b(eq, &alignment_as_expected);
1719 // Don't use Check here, as it will call Runtime_Abort possibly
1720 // re-entering here.
1721 stop("Unexpected alignment");
1722 bind(&alignment_as_expected);
1723 }
1724 }
1725#endif
1726
1727 // Just call directly. The function called cannot cause a GC, or
1728 // allow preemption, so the return address in the link register
1729 // stays correct.
1730 Call(function);
1731 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1732 if (OS::ActivationFrameAlignment() > kPointerSize) {
1733 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
1734 } else {
1735 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
1736 }
1737}
1738
1739
Steve Blocka7e24c12009-10-30 11:49:00 +00001740#ifdef ENABLE_DEBUGGER_SUPPORT
1741CodePatcher::CodePatcher(byte* address, int instructions)
1742 : address_(address),
1743 instructions_(instructions),
1744 size_(instructions * Assembler::kInstrSize),
1745 masm_(address, size_ + Assembler::kGap) {
1746 // Create a new macro assembler pointing to the address of the code to patch.
1747 // The size is adjusted with kGap on order for the assembler to generate size
1748 // bytes of instructions without failing with buffer size constraints.
1749 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1750}
1751
1752
1753CodePatcher::~CodePatcher() {
1754 // Indicate that code has changed.
1755 CPU::FlushICache(address_, size_);
1756
1757 // Check that the code was patched as expected.
1758 ASSERT(masm_.pc_ == address_ + size_);
1759 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1760}
1761
1762
1763void CodePatcher::Emit(Instr x) {
1764 masm()->emit(x);
1765}
1766
1767
1768void CodePatcher::Emit(Address addr) {
1769 masm()->emit(reinterpret_cast<Instr>(addr));
1770}
1771#endif // ENABLE_DEBUGGER_SUPPORT
1772
1773
1774} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001775
1776#endif // V8_TARGET_ARCH_ARM