blob: c4b153f82e441c229b32519fc32f27a044d4ea1a [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34
35namespace v8 {
36namespace internal {
37
38MacroAssembler::MacroAssembler(void* buffer, int size)
39 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000040 generating_stub_(false),
41 allow_stub_calls_(true),
42 code_object_(Heap::undefined_value()) {
43}
44
45
46// We always generate arm code, never thumb code, even if V8 is compiled to
47// thumb, so we require inter-working support
48#if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
49#error "flag -mthumb-interwork missing"
50#endif
51
52
53// We do not support thumb inter-working with an arm architecture not supporting
54// the blx instruction (below v5t). If you know what CPU you are compiling for
55// you can use -march=armv7 or similar.
56#if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
57# error "For thumb inter-working we require an architecture which supports blx"
58#endif
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061// Using bx does not yield better code, so use it only when required
62#if defined(USE_THUMB_INTERWORK)
63#define USE_BX 1
64#endif
65
66
67void MacroAssembler::Jump(Register target, Condition cond) {
68#if USE_BX
69 bx(target, cond);
70#else
71 mov(pc, Operand(target), LeaveCC, cond);
72#endif
73}
74
75
76void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
77 Condition cond) {
78#if USE_BX
79 mov(ip, Operand(target, rmode), LeaveCC, cond);
80 bx(ip, cond);
81#else
82 mov(pc, Operand(target, rmode), LeaveCC, cond);
83#endif
84}
85
86
87void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
88 Condition cond) {
89 ASSERT(!RelocInfo::IsCodeTarget(rmode));
90 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
91}
92
93
94void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
95 Condition cond) {
96 ASSERT(RelocInfo::IsCodeTarget(rmode));
97 // 'code' is always generated ARM code, never THUMB code
98 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
99}
100
101
102void MacroAssembler::Call(Register target, Condition cond) {
103#if USE_BLX
104 blx(target, cond);
105#else
106 // set lr for return at current pc + 8
107 mov(lr, Operand(pc), LeaveCC, cond);
108 mov(pc, Operand(target), LeaveCC, cond);
109#endif
110}
111
112
113void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
114 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100115#if USE_BLX
116 // On ARMv5 and after the recommended call sequence is:
117 // ldr ip, [pc, #...]
118 // blx ip
119
120 // The two instructions (ldr and blx) could be separated by a constant
121 // pool and the code would still work. The issue comes from the
122 // patching code which expect the ldr to be just above the blx.
123 { BlockConstPoolScope block_const_pool(this);
124 // Statement positions are expected to be recorded when the target
125 // address is loaded. The mov method will automatically record
126 // positions when pc is the target, since this is not the case here
127 // we have to do it explicitly.
128 WriteRecordedPositions();
129
130 mov(ip, Operand(target, rmode), LeaveCC, cond);
131 blx(ip, cond);
132 }
133
134 ASSERT(kCallTargetAddressOffset == 2 * kInstrSize);
135#else
Steve Blocka7e24c12009-10-30 11:49:00 +0000136 // Set lr for return at current pc + 8.
137 mov(lr, Operand(pc), LeaveCC, cond);
138 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
139 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100140
Steve Blocka7e24c12009-10-30 11:49:00 +0000141 ASSERT(kCallTargetAddressOffset == kInstrSize);
Steve Block6ded16b2010-05-10 14:33:55 +0100142#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000143}
144
145
146void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
147 Condition cond) {
148 ASSERT(!RelocInfo::IsCodeTarget(rmode));
149 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
150}
151
152
153void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
154 Condition cond) {
155 ASSERT(RelocInfo::IsCodeTarget(rmode));
156 // 'code' is always generated ARM code, never THUMB code
157 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
158}
159
160
161void MacroAssembler::Ret(Condition cond) {
162#if USE_BX
163 bx(lr, cond);
164#else
165 mov(pc, Operand(lr), LeaveCC, cond);
166#endif
167}
168
169
Steve Blockd0582a62009-12-15 09:54:21 +0000170void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
171 LoadRoot(ip, Heap::kStackLimitRootIndex);
172 cmp(sp, Operand(ip));
173 b(lo, on_stack_overflow);
174}
175
176
Leon Clarkee46be812010-01-19 14:06:41 +0000177void MacroAssembler::Drop(int count, Condition cond) {
178 if (count > 0) {
179 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
180 }
181}
182
183
Steve Block6ded16b2010-05-10 14:33:55 +0100184void MacroAssembler::Swap(Register reg1, Register reg2, Register scratch) {
185 if (scratch.is(no_reg)) {
186 eor(reg1, reg1, Operand(reg2));
187 eor(reg2, reg2, Operand(reg1));
188 eor(reg1, reg1, Operand(reg2));
189 } else {
190 mov(scratch, reg1);
191 mov(reg1, reg2);
192 mov(reg2, scratch);
193 }
194}
195
196
Leon Clarkee46be812010-01-19 14:06:41 +0000197void MacroAssembler::Call(Label* target) {
198 bl(target);
199}
200
201
202void MacroAssembler::Move(Register dst, Handle<Object> value) {
203 mov(dst, Operand(value));
204}
Steve Blockd0582a62009-12-15 09:54:21 +0000205
206
Steve Block6ded16b2010-05-10 14:33:55 +0100207void MacroAssembler::Move(Register dst, Register src) {
208 if (!dst.is(src)) {
209 mov(dst, src);
210 }
211}
212
213
Steve Blocka7e24c12009-10-30 11:49:00 +0000214void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
215 // Empty the const pool.
216 CheckConstPool(true, true);
217 add(pc, pc, Operand(index,
218 LSL,
219 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize));
220 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * kInstrSize);
221 nop(); // Jump table alignment.
222 for (int i = 0; i < targets.length(); i++) {
223 b(targets[i]);
224 }
225}
226
227
228void MacroAssembler::LoadRoot(Register destination,
229 Heap::RootListIndex index,
230 Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +0000231 ldr(destination, MemOperand(roots, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000232}
233
234
Kristian Monsen25f61362010-05-21 11:50:48 +0100235void MacroAssembler::StoreRoot(Register source,
236 Heap::RootListIndex index,
237 Condition cond) {
238 str(source, MemOperand(roots, index << kPointerSizeLog2), cond);
239}
240
241
Steve Block6ded16b2010-05-10 14:33:55 +0100242void MacroAssembler::RecordWriteHelper(Register object,
243 Register offset,
244 Register scratch) {
245 if (FLAG_debug_code) {
246 // Check that the object is not in new space.
247 Label not_in_new_space;
248 InNewSpace(object, scratch, ne, &not_in_new_space);
249 Abort("new-space object passed to RecordWriteHelper");
250 bind(&not_in_new_space);
251 }
Leon Clarke4515c472010-02-03 11:58:03 +0000252
Steve Blocka7e24c12009-10-30 11:49:00 +0000253 // This is how much we shift the remembered set bit offset to get the
254 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
255 // shift right 5) and then multiply by kIntSize (4, shift left 2).
256 const int kRSetWordShift = 3;
257
Steve Block6ded16b2010-05-10 14:33:55 +0100258 Label fast;
Steve Blocka7e24c12009-10-30 11:49:00 +0000259
260 // Compute the bit offset in the remembered set.
261 // object: heap object pointer (with tag)
262 // offset: offset to store location from the object
263 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
264 and_(scratch, object, Operand(ip)); // offset into page of the object
265 add(offset, scratch, Operand(offset)); // add offset into the object
266 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
267
268 // Compute the page address from the heap object pointer.
269 // object: heap object pointer (with tag)
270 // offset: bit offset of store position in the remembered set
271 bic(object, object, Operand(ip));
272
273 // If the bit offset lies beyond the normal remembered set range, it is in
274 // the extra remembered set area of a large object.
275 // object: page start
276 // offset: bit offset of store position in the remembered set
277 cmp(offset, Operand(Page::kPageSize / kPointerSize));
278 b(lt, &fast);
279
280 // Adjust the bit offset to be relative to the start of the extra
281 // remembered set and the start address to be the address of the extra
282 // remembered set.
283 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
284 // Load the array length into 'scratch' and multiply by four to get the
285 // size in bytes of the elements.
286 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
287 + FixedArray::kLengthOffset));
288 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
289 // Add the page header (including remembered set), array header, and array
290 // body size to the page address.
291 add(object, object, Operand(Page::kObjectStartOffset
292 + FixedArray::kHeaderSize));
293 add(object, object, Operand(scratch));
294
295 bind(&fast);
296 // Get address of the rset word.
297 // object: start of the remembered set (page start for the fast case)
298 // offset: bit offset of store position in the remembered set
299 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset
300 add(object, object, Operand(scratch, LSR, kRSetWordShift));
301 // Get bit offset in the rset word.
302 // object: address of remembered set word
303 // offset: bit offset of store position
304 and_(offset, offset, Operand(kBitsPerInt - 1));
305
306 ldr(scratch, MemOperand(object));
307 mov(ip, Operand(1));
308 orr(scratch, scratch, Operand(ip, LSL, offset));
309 str(scratch, MemOperand(object));
Steve Block6ded16b2010-05-10 14:33:55 +0100310}
311
312
313void MacroAssembler::InNewSpace(Register object,
314 Register scratch,
315 Condition cc,
316 Label* branch) {
317 ASSERT(cc == eq || cc == ne);
318 and_(scratch, object, Operand(ExternalReference::new_space_mask()));
319 cmp(scratch, Operand(ExternalReference::new_space_start()));
320 b(cc, branch);
321}
322
323
324// Will clobber 4 registers: object, offset, scratch, ip. The
325// register 'object' contains a heap object pointer. The heap object
326// tag is shifted away.
327void MacroAssembler::RecordWrite(Register object, Register offset,
328 Register scratch) {
329 // The compiled code assumes that record write doesn't change the
330 // context register, so we check that none of the clobbered
331 // registers are cp.
332 ASSERT(!object.is(cp) && !offset.is(cp) && !scratch.is(cp));
333
334 Label done;
335
336 // First, test that the object is not in the new space. We cannot set
337 // remembered set bits in the new space.
338 InNewSpace(object, scratch, eq, &done);
339
340 // Record the actual write.
341 RecordWriteHelper(object, offset, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000342
343 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000344
345 // Clobber all input registers when running with the debug-code flag
346 // turned on to provoke errors.
347 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100348 mov(object, Operand(BitCast<int32_t>(kZapValue)));
349 mov(offset, Operand(BitCast<int32_t>(kZapValue)));
350 mov(scratch, Operand(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000351 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000352}
353
354
355void MacroAssembler::EnterFrame(StackFrame::Type type) {
356 // r0-r3: preserved
357 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
358 mov(ip, Operand(Smi::FromInt(type)));
359 push(ip);
360 mov(ip, Operand(CodeObject()));
361 push(ip);
362 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
363}
364
365
366void MacroAssembler::LeaveFrame(StackFrame::Type type) {
367 // r0: preserved
368 // r1: preserved
369 // r2: preserved
370
371 // Drop the execution stack down to the frame pointer and restore
372 // the caller frame pointer and return address.
373 mov(sp, fp);
374 ldm(ia_w, sp, fp.bit() | lr.bit());
375}
376
377
Steve Blockd0582a62009-12-15 09:54:21 +0000378void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000379 // Compute the argv pointer and keep it in a callee-saved register.
380 // r0 is argc.
381 add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
382 sub(r6, r6, Operand(kPointerSize));
383
384 // Compute callee's stack pointer before making changes and save it as
385 // ip register so that it is restored as sp register on exit, thereby
386 // popping the args.
387
388 // ip = sp + kPointerSize * #args;
389 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
390
Steve Block6ded16b2010-05-10 14:33:55 +0100391 // Prepare the stack to be aligned when calling into C. After this point there
392 // are 5 pushes before the call into C, so the stack needs to be aligned after
393 // 5 pushes.
394 int frame_alignment = ActivationFrameAlignment();
395 int frame_alignment_mask = frame_alignment - 1;
396 if (frame_alignment != kPointerSize) {
397 // The following code needs to be more general if this assert does not hold.
398 ASSERT(frame_alignment == 2 * kPointerSize);
399 // With 5 pushes left the frame must be unaligned at this point.
400 mov(r7, Operand(Smi::FromInt(0)));
401 tst(sp, Operand((frame_alignment - kPointerSize) & frame_alignment_mask));
402 push(r7, eq); // Push if aligned to make it unaligned.
403 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000404
405 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
406 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
Andrei Popescu402d9372010-02-26 13:31:12 +0000407 mov(fp, Operand(sp)); // Setup new frame pointer.
Steve Blocka7e24c12009-10-30 11:49:00 +0000408
Andrei Popescu402d9372010-02-26 13:31:12 +0000409 mov(ip, Operand(CodeObject()));
410 push(ip); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000411
412 // Save the frame pointer and the context in top.
413 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
414 str(fp, MemOperand(ip));
415 mov(ip, Operand(ExternalReference(Top::k_context_address)));
416 str(cp, MemOperand(ip));
417
418 // Setup argc and the builtin function in callee-saved registers.
419 mov(r4, Operand(r0));
420 mov(r5, Operand(r1));
421
422
423#ifdef ENABLE_DEBUGGER_SUPPORT
424 // Save the state of all registers to the stack from the memory
425 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000426 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 // Use sp as base to push.
428 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
429 }
430#endif
431}
432
433
Steve Block6ded16b2010-05-10 14:33:55 +0100434void MacroAssembler::InitializeNewString(Register string,
435 Register length,
436 Heap::RootListIndex map_index,
437 Register scratch1,
438 Register scratch2) {
439 mov(scratch1, Operand(length, LSL, kSmiTagSize));
440 LoadRoot(scratch2, map_index);
441 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
442 mov(scratch1, Operand(String::kEmptyHashField));
443 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
444 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
445}
446
447
448int MacroAssembler::ActivationFrameAlignment() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000449#if defined(V8_HOST_ARCH_ARM)
450 // Running on the real platform. Use the alignment as mandated by the local
451 // environment.
452 // Note: This will break if we ever start generating snapshots on one ARM
453 // platform for another ARM platform with a different alignment.
Steve Block6ded16b2010-05-10 14:33:55 +0100454 return OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000455#else // defined(V8_HOST_ARCH_ARM)
456 // If we are using the simulator then we should always align to the expected
457 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +0100458 // if the target platform will need alignment, so this is controlled from a
459 // flag.
460 return FLAG_sim_stack_alignment;
Steve Blocka7e24c12009-10-30 11:49:00 +0000461#endif // defined(V8_HOST_ARCH_ARM)
Steve Blocka7e24c12009-10-30 11:49:00 +0000462}
463
464
Steve Blockd0582a62009-12-15 09:54:21 +0000465void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000466#ifdef ENABLE_DEBUGGER_SUPPORT
467 // Restore the memory copy of the registers by digging them out from
468 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000469 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000470 // This code intentionally clobbers r2 and r3.
471 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000472 const int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000473 add(r3, fp, Operand(kOffset));
474 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
475 }
476#endif
477
478 // Clear top frame.
479 mov(r3, Operand(0));
480 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
481 str(r3, MemOperand(ip));
482
483 // Restore current context from top and clear it in debug mode.
484 mov(ip, Operand(ExternalReference(Top::k_context_address)));
485 ldr(cp, MemOperand(ip));
486#ifdef DEBUG
487 str(r3, MemOperand(ip));
488#endif
489
490 // Pop the arguments, restore registers, and return.
491 mov(sp, Operand(fp)); // respect ABI stack constraint
492 ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
493}
494
495
496void MacroAssembler::InvokePrologue(const ParameterCount& expected,
497 const ParameterCount& actual,
498 Handle<Code> code_constant,
499 Register code_reg,
500 Label* done,
501 InvokeFlag flag) {
502 bool definitely_matches = false;
503 Label regular_invoke;
504
505 // Check whether the expected and actual arguments count match. If not,
506 // setup registers according to contract with ArgumentsAdaptorTrampoline:
507 // r0: actual arguments count
508 // r1: function (passed through to callee)
509 // r2: expected arguments count
510 // r3: callee code entry
511
512 // The code below is made a lot easier because the calling code already sets
513 // up actual and expected registers according to the contract if values are
514 // passed in registers.
515 ASSERT(actual.is_immediate() || actual.reg().is(r0));
516 ASSERT(expected.is_immediate() || expected.reg().is(r2));
517 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
518
519 if (expected.is_immediate()) {
520 ASSERT(actual.is_immediate());
521 if (expected.immediate() == actual.immediate()) {
522 definitely_matches = true;
523 } else {
524 mov(r0, Operand(actual.immediate()));
525 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
526 if (expected.immediate() == sentinel) {
527 // Don't worry about adapting arguments for builtins that
528 // don't want that done. Skip adaption code by making it look
529 // like we have a match between expected and actual number of
530 // arguments.
531 definitely_matches = true;
532 } else {
533 mov(r2, Operand(expected.immediate()));
534 }
535 }
536 } else {
537 if (actual.is_immediate()) {
538 cmp(expected.reg(), Operand(actual.immediate()));
539 b(eq, &regular_invoke);
540 mov(r0, Operand(actual.immediate()));
541 } else {
542 cmp(expected.reg(), Operand(actual.reg()));
543 b(eq, &regular_invoke);
544 }
545 }
546
547 if (!definitely_matches) {
548 if (!code_constant.is_null()) {
549 mov(r3, Operand(code_constant));
550 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
551 }
552
553 Handle<Code> adaptor =
554 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
555 if (flag == CALL_FUNCTION) {
556 Call(adaptor, RelocInfo::CODE_TARGET);
557 b(done);
558 } else {
559 Jump(adaptor, RelocInfo::CODE_TARGET);
560 }
561 bind(&regular_invoke);
562 }
563}
564
565
566void MacroAssembler::InvokeCode(Register code,
567 const ParameterCount& expected,
568 const ParameterCount& actual,
569 InvokeFlag flag) {
570 Label done;
571
572 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
573 if (flag == CALL_FUNCTION) {
574 Call(code);
575 } else {
576 ASSERT(flag == JUMP_FUNCTION);
577 Jump(code);
578 }
579
580 // Continue here if InvokePrologue does handle the invocation due to
581 // mismatched parameter counts.
582 bind(&done);
583}
584
585
586void MacroAssembler::InvokeCode(Handle<Code> code,
587 const ParameterCount& expected,
588 const ParameterCount& actual,
589 RelocInfo::Mode rmode,
590 InvokeFlag flag) {
591 Label done;
592
593 InvokePrologue(expected, actual, code, no_reg, &done, flag);
594 if (flag == CALL_FUNCTION) {
595 Call(code, rmode);
596 } else {
597 Jump(code, rmode);
598 }
599
600 // Continue here if InvokePrologue does handle the invocation due to
601 // mismatched parameter counts.
602 bind(&done);
603}
604
605
606void MacroAssembler::InvokeFunction(Register fun,
607 const ParameterCount& actual,
608 InvokeFlag flag) {
609 // Contract with called JS functions requires that function is passed in r1.
610 ASSERT(fun.is(r1));
611
612 Register expected_reg = r2;
613 Register code_reg = r3;
614
615 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
616 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
617 ldr(expected_reg,
618 FieldMemOperand(code_reg,
619 SharedFunctionInfo::kFormalParameterCountOffset));
620 ldr(code_reg,
621 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
622 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
623
624 ParameterCount expected(expected_reg);
625 InvokeCode(code_reg, expected, actual, flag);
626}
627
628
Andrei Popescu402d9372010-02-26 13:31:12 +0000629void MacroAssembler::InvokeFunction(JSFunction* function,
630 const ParameterCount& actual,
631 InvokeFlag flag) {
632 ASSERT(function->is_compiled());
633
634 // Get the function and setup the context.
635 mov(r1, Operand(Handle<JSFunction>(function)));
636 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
637
638 // Invoke the cached code.
639 Handle<Code> code(function->code());
640 ParameterCount expected(function->shared()->formal_parameter_count());
641 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
642}
643
Steve Blocka7e24c12009-10-30 11:49:00 +0000644#ifdef ENABLE_DEBUGGER_SUPPORT
645void MacroAssembler::SaveRegistersToMemory(RegList regs) {
646 ASSERT((regs & ~kJSCallerSaved) == 0);
647 // Copy the content of registers to memory location.
648 for (int i = 0; i < kNumJSCallerSaved; i++) {
649 int r = JSCallerSavedCode(i);
650 if ((regs & (1 << r)) != 0) {
651 Register reg = { r };
652 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
653 str(reg, MemOperand(ip));
654 }
655 }
656}
657
658
659void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
660 ASSERT((regs & ~kJSCallerSaved) == 0);
661 // Copy the content of memory location to registers.
662 for (int i = kNumJSCallerSaved; --i >= 0;) {
663 int r = JSCallerSavedCode(i);
664 if ((regs & (1 << r)) != 0) {
665 Register reg = { r };
666 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
667 ldr(reg, MemOperand(ip));
668 }
669 }
670}
671
672
673void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
674 RegList regs) {
675 ASSERT((regs & ~kJSCallerSaved) == 0);
676 // Copy the content of the memory location to the stack and adjust base.
677 for (int i = kNumJSCallerSaved; --i >= 0;) {
678 int r = JSCallerSavedCode(i);
679 if ((regs & (1 << r)) != 0) {
680 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
681 ldr(ip, MemOperand(ip));
682 str(ip, MemOperand(base, 4, NegPreIndex));
683 }
684 }
685}
686
687
688void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
689 Register scratch,
690 RegList regs) {
691 ASSERT((regs & ~kJSCallerSaved) == 0);
692 // Copy the content of the stack to the memory location and adjust base.
693 for (int i = 0; i < kNumJSCallerSaved; i++) {
694 int r = JSCallerSavedCode(i);
695 if ((regs & (1 << r)) != 0) {
696 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
697 ldr(scratch, MemOperand(base, 4, PostIndex));
698 str(scratch, MemOperand(ip));
699 }
700 }
701}
Andrei Popescu402d9372010-02-26 13:31:12 +0000702
703
704void MacroAssembler::DebugBreak() {
705 ASSERT(allow_stub_calls());
706 mov(r0, Operand(0));
707 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak)));
708 CEntryStub ces(1);
709 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
710}
Steve Blocka7e24c12009-10-30 11:49:00 +0000711#endif
712
713
714void MacroAssembler::PushTryHandler(CodeLocation try_location,
715 HandlerType type) {
716 // Adjust this code if not the case.
717 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
718 // The pc (return address) is passed in register lr.
719 if (try_location == IN_JAVASCRIPT) {
720 if (type == TRY_CATCH_HANDLER) {
721 mov(r3, Operand(StackHandler::TRY_CATCH));
722 } else {
723 mov(r3, Operand(StackHandler::TRY_FINALLY));
724 }
725 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
726 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
727 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
728 stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
729 // Save the current handler as the next handler.
730 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
731 ldr(r1, MemOperand(r3));
732 ASSERT(StackHandlerConstants::kNextOffset == 0);
733 push(r1);
734 // Link this handler as the new current one.
735 str(sp, MemOperand(r3));
736 } else {
737 // Must preserve r0-r4, r5-r7 are available.
738 ASSERT(try_location == IN_JS_ENTRY);
739 // The frame pointer does not point to a JS frame so we save NULL
740 // for fp. We expect the code throwing an exception to check fp
741 // before dereferencing it to restore the context.
742 mov(ip, Operand(0)); // To save a NULL frame pointer.
743 mov(r6, Operand(StackHandler::ENTRY));
744 ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
745 && StackHandlerConstants::kFPOffset == 2 * kPointerSize
746 && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
747 stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
748 // Save the current handler as the next handler.
749 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
750 ldr(r6, MemOperand(r7));
751 ASSERT(StackHandlerConstants::kNextOffset == 0);
752 push(r6);
753 // Link this handler as the new current one.
754 str(sp, MemOperand(r7));
755 }
756}
757
758
Leon Clarkee46be812010-01-19 14:06:41 +0000759void MacroAssembler::PopTryHandler() {
760 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
761 pop(r1);
762 mov(ip, Operand(ExternalReference(Top::k_handler_address)));
763 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
764 str(r1, MemOperand(ip));
765}
766
767
Steve Blocka7e24c12009-10-30 11:49:00 +0000768Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
769 JSObject* holder, Register holder_reg,
770 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +0100771 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +0000772 Label* miss) {
773 // Make sure there's no overlap between scratch and the other
774 // registers.
775 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
776
777 // Keep track of the current object in register reg.
778 Register reg = object_reg;
Steve Block6ded16b2010-05-10 14:33:55 +0100779 int depth = 0;
780
781 if (save_at_depth == depth) {
782 str(reg, MemOperand(sp));
783 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000784
785 // Check the maps in the prototype chain.
786 // Traverse the prototype chain from the object and do map checks.
787 while (object != holder) {
788 depth++;
789
790 // Only global objects and objects that do not require access
791 // checks are allowed in stubs.
792 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
793
794 // Get the map of the current object.
795 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
796 cmp(scratch, Operand(Handle<Map>(object->map())));
797
798 // Branch on the result of the map check.
799 b(ne, miss);
800
801 // Check access rights to the global object. This has to happen
802 // after the map check so that we know that the object is
803 // actually a global object.
804 if (object->IsJSGlobalProxy()) {
805 CheckAccessGlobalProxy(reg, scratch, miss);
806 // Restore scratch register to be the map of the object. In the
807 // new space case below, we load the prototype from the map in
808 // the scratch register.
809 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
810 }
811
812 reg = holder_reg; // from now the object is in holder_reg
813 JSObject* prototype = JSObject::cast(object->GetPrototype());
814 if (Heap::InNewSpace(prototype)) {
815 // The prototype is in new space; we cannot store a reference
816 // to it in the code. Load it from the map.
817 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
818 } else {
819 // The prototype is in old space; load it directly.
820 mov(reg, Operand(Handle<JSObject>(prototype)));
821 }
822
Steve Block6ded16b2010-05-10 14:33:55 +0100823 if (save_at_depth == depth) {
824 str(reg, MemOperand(sp));
825 }
826
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 // Go to the next object in the prototype chain.
828 object = prototype;
829 }
830
831 // Check the holder map.
832 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
833 cmp(scratch, Operand(Handle<Map>(object->map())));
834 b(ne, miss);
835
836 // Log the check depth.
Steve Block6ded16b2010-05-10 14:33:55 +0100837 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000838
839 // Perform security check for access to the global object and return
840 // the holder register.
841 ASSERT(object == holder);
842 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
843 if (object->IsJSGlobalProxy()) {
844 CheckAccessGlobalProxy(reg, scratch, miss);
845 }
846 return reg;
847}
848
849
850void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
851 Register scratch,
852 Label* miss) {
853 Label same_contexts;
854
855 ASSERT(!holder_reg.is(scratch));
856 ASSERT(!holder_reg.is(ip));
857 ASSERT(!scratch.is(ip));
858
859 // Load current lexical context from the stack frame.
860 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
861 // In debug mode, make sure the lexical context is set.
862#ifdef DEBUG
863 cmp(scratch, Operand(0));
864 Check(ne, "we should not have an empty lexical context");
865#endif
866
867 // Load the global context of the current context.
868 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
869 ldr(scratch, FieldMemOperand(scratch, offset));
870 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
871
872 // Check the context is a global context.
873 if (FLAG_debug_code) {
874 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
875 // Cannot use ip as a temporary in this verification code. Due to the fact
876 // that ip is clobbered as part of cmp with an object Operand.
877 push(holder_reg); // Temporarily save holder on the stack.
878 // Read the first word and compare to the global_context_map.
879 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
880 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
881 cmp(holder_reg, ip);
882 Check(eq, "JSGlobalObject::global_context should be a global context.");
883 pop(holder_reg); // Restore holder.
884 }
885
886 // Check if both contexts are the same.
887 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
888 cmp(scratch, Operand(ip));
889 b(eq, &same_contexts);
890
891 // Check the context is a global context.
892 if (FLAG_debug_code) {
893 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
894 // Cannot use ip as a temporary in this verification code. Due to the fact
895 // that ip is clobbered as part of cmp with an object Operand.
896 push(holder_reg); // Temporarily save holder on the stack.
897 mov(holder_reg, ip); // Move ip to its holding place.
898 LoadRoot(ip, Heap::kNullValueRootIndex);
899 cmp(holder_reg, ip);
900 Check(ne, "JSGlobalProxy::context() should not be null.");
901
902 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
903 LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
904 cmp(holder_reg, ip);
905 Check(eq, "JSGlobalObject::global_context should be a global context.");
906 // Restore ip is not needed. ip is reloaded below.
907 pop(holder_reg); // Restore holder.
908 // Restore ip to holder's context.
909 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
910 }
911
912 // Check that the security token in the calling global object is
913 // compatible with the security token in the receiving global
914 // object.
915 int token_offset = Context::kHeaderSize +
916 Context::SECURITY_TOKEN_INDEX * kPointerSize;
917
918 ldr(scratch, FieldMemOperand(scratch, token_offset));
919 ldr(ip, FieldMemOperand(ip, token_offset));
920 cmp(scratch, Operand(ip));
921 b(ne, miss);
922
923 bind(&same_contexts);
924}
925
926
927void MacroAssembler::AllocateInNewSpace(int object_size,
928 Register result,
929 Register scratch1,
930 Register scratch2,
931 Label* gc_required,
932 AllocationFlags flags) {
933 ASSERT(!result.is(scratch1));
934 ASSERT(!scratch1.is(scratch2));
935
Kristian Monsen25f61362010-05-21 11:50:48 +0100936 // Make object size into bytes.
937 if ((flags & SIZE_IN_WORDS) != 0) {
938 object_size *= kPointerSize;
939 }
940 ASSERT_EQ(0, object_size & kObjectAlignmentMask);
941
Steve Blocka7e24c12009-10-30 11:49:00 +0000942 // Load address of new object into result and allocation top address into
943 // scratch1.
944 ExternalReference new_space_allocation_top =
945 ExternalReference::new_space_allocation_top_address();
946 mov(scratch1, Operand(new_space_allocation_top));
947 if ((flags & RESULT_CONTAINS_TOP) == 0) {
948 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +0000949 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 // Assert that result actually contains top on entry. scratch2 is used
951 // immediately below so this use of scratch2 does not cause difference with
952 // respect to register content between debug and release mode.
953 ldr(scratch2, MemOperand(scratch1));
954 cmp(result, scratch2);
955 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +0000956 }
957
958 // Calculate new top and bail out if new space is exhausted. Use result
959 // to calculate the new top.
960 ExternalReference new_space_allocation_limit =
961 ExternalReference::new_space_allocation_limit_address();
962 mov(scratch2, Operand(new_space_allocation_limit));
963 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +0100964 add(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000965 cmp(result, Operand(scratch2));
966 b(hi, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +0000967 str(result, MemOperand(scratch1));
968
969 // Tag and adjust back to start of new object.
970 if ((flags & TAG_OBJECT) != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +0100971 sub(result, result, Operand(object_size - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000972 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +0100973 sub(result, result, Operand(object_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000974 }
975}
976
977
978void MacroAssembler::AllocateInNewSpace(Register object_size,
979 Register result,
980 Register scratch1,
981 Register scratch2,
982 Label* gc_required,
983 AllocationFlags flags) {
984 ASSERT(!result.is(scratch1));
985 ASSERT(!scratch1.is(scratch2));
986
987 // Load address of new object into result and allocation top address into
988 // scratch1.
989 ExternalReference new_space_allocation_top =
990 ExternalReference::new_space_allocation_top_address();
991 mov(scratch1, Operand(new_space_allocation_top));
992 if ((flags & RESULT_CONTAINS_TOP) == 0) {
993 ldr(result, MemOperand(scratch1));
Steve Blockd0582a62009-12-15 09:54:21 +0000994 } else if (FLAG_debug_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000995 // Assert that result actually contains top on entry. scratch2 is used
996 // immediately below so this use of scratch2 does not cause difference with
997 // respect to register content between debug and release mode.
998 ldr(scratch2, MemOperand(scratch1));
999 cmp(result, scratch2);
1000 Check(eq, "Unexpected allocation top");
Steve Blocka7e24c12009-10-30 11:49:00 +00001001 }
1002
1003 // Calculate new top and bail out if new space is exhausted. Use result
1004 // to calculate the new top. Object size is in words so a shift is required to
1005 // get the number of bytes
1006 ExternalReference new_space_allocation_limit =
1007 ExternalReference::new_space_allocation_limit_address();
1008 mov(scratch2, Operand(new_space_allocation_limit));
1009 ldr(scratch2, MemOperand(scratch2));
Kristian Monsen25f61362010-05-21 11:50:48 +01001010 if ((flags & SIZE_IN_WORDS) != 0) {
1011 add(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1012 } else {
1013 add(result, result, Operand(object_size));
1014 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001015 cmp(result, Operand(scratch2));
1016 b(hi, gc_required);
1017
Steve Blockd0582a62009-12-15 09:54:21 +00001018 // Update allocation top. result temporarily holds the new top.
1019 if (FLAG_debug_code) {
1020 tst(result, Operand(kObjectAlignmentMask));
1021 Check(eq, "Unaligned allocation in new space");
1022 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001023 str(result, MemOperand(scratch1));
1024
1025 // Adjust back to start of new object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001026 if ((flags & SIZE_IN_WORDS) != 0) {
1027 sub(result, result, Operand(object_size, LSL, kPointerSizeLog2));
1028 } else {
1029 sub(result, result, Operand(object_size));
1030 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001031
1032 // Tag object if requested.
1033 if ((flags & TAG_OBJECT) != 0) {
1034 add(result, result, Operand(kHeapObjectTag));
1035 }
1036}
1037
1038
1039void MacroAssembler::UndoAllocationInNewSpace(Register object,
1040 Register scratch) {
1041 ExternalReference new_space_allocation_top =
1042 ExternalReference::new_space_allocation_top_address();
1043
1044 // Make sure the object has no tag before resetting top.
1045 and_(object, object, Operand(~kHeapObjectTagMask));
1046#ifdef DEBUG
1047 // Check that the object un-allocated is below the current top.
1048 mov(scratch, Operand(new_space_allocation_top));
1049 ldr(scratch, MemOperand(scratch));
1050 cmp(object, scratch);
1051 Check(lt, "Undo allocation of non allocated memory");
1052#endif
1053 // Write the address of the object to un-allocate as the current top.
1054 mov(scratch, Operand(new_space_allocation_top));
1055 str(object, MemOperand(scratch));
1056}
1057
1058
Andrei Popescu31002712010-02-23 13:46:05 +00001059void MacroAssembler::AllocateTwoByteString(Register result,
1060 Register length,
1061 Register scratch1,
1062 Register scratch2,
1063 Register scratch3,
1064 Label* gc_required) {
1065 // Calculate the number of bytes needed for the characters in the string while
1066 // observing object alignment.
1067 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1068 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1069 add(scratch1, scratch1,
1070 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001071 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001072
1073 // Allocate two-byte string in new space.
1074 AllocateInNewSpace(scratch1,
1075 result,
1076 scratch2,
1077 scratch3,
1078 gc_required,
1079 TAG_OBJECT);
1080
1081 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001082 InitializeNewString(result,
1083 length,
1084 Heap::kStringMapRootIndex,
1085 scratch1,
1086 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001087}
1088
1089
1090void MacroAssembler::AllocateAsciiString(Register result,
1091 Register length,
1092 Register scratch1,
1093 Register scratch2,
1094 Register scratch3,
1095 Label* gc_required) {
1096 // Calculate the number of bytes needed for the characters in the string while
1097 // observing object alignment.
1098 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1099 ASSERT(kCharSize == 1);
1100 add(scratch1, length,
1101 Operand(kObjectAlignmentMask + SeqAsciiString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001102 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001103
1104 // Allocate ASCII string in new space.
1105 AllocateInNewSpace(scratch1,
1106 result,
1107 scratch2,
1108 scratch3,
1109 gc_required,
1110 TAG_OBJECT);
1111
1112 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001113 InitializeNewString(result,
1114 length,
1115 Heap::kAsciiStringMapRootIndex,
1116 scratch1,
1117 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001118}
1119
1120
1121void MacroAssembler::AllocateTwoByteConsString(Register result,
1122 Register length,
1123 Register scratch1,
1124 Register scratch2,
1125 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001126 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001127 result,
1128 scratch1,
1129 scratch2,
1130 gc_required,
1131 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001132
1133 InitializeNewString(result,
1134 length,
1135 Heap::kConsStringMapRootIndex,
1136 scratch1,
1137 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001138}
1139
1140
1141void MacroAssembler::AllocateAsciiConsString(Register result,
1142 Register length,
1143 Register scratch1,
1144 Register scratch2,
1145 Label* gc_required) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001146 AllocateInNewSpace(ConsString::kSize,
Andrei Popescu31002712010-02-23 13:46:05 +00001147 result,
1148 scratch1,
1149 scratch2,
1150 gc_required,
1151 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001152
1153 InitializeNewString(result,
1154 length,
1155 Heap::kConsAsciiStringMapRootIndex,
1156 scratch1,
1157 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001158}
1159
1160
Steve Block6ded16b2010-05-10 14:33:55 +01001161void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00001162 Register map,
1163 Register type_reg,
1164 InstanceType type) {
Steve Block6ded16b2010-05-10 14:33:55 +01001165 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001166 CompareInstanceType(map, type_reg, type);
1167}
1168
1169
1170void MacroAssembler::CompareInstanceType(Register map,
1171 Register type_reg,
1172 InstanceType type) {
1173 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
1174 cmp(type_reg, Operand(type));
1175}
1176
1177
Andrei Popescu31002712010-02-23 13:46:05 +00001178void MacroAssembler::CheckMap(Register obj,
1179 Register scratch,
1180 Handle<Map> map,
1181 Label* fail,
1182 bool is_heap_object) {
1183 if (!is_heap_object) {
1184 BranchOnSmi(obj, fail);
1185 }
1186 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1187 mov(ip, Operand(map));
1188 cmp(scratch, ip);
1189 b(ne, fail);
1190}
1191
1192
Steve Blocka7e24c12009-10-30 11:49:00 +00001193void MacroAssembler::TryGetFunctionPrototype(Register function,
1194 Register result,
1195 Register scratch,
1196 Label* miss) {
1197 // Check that the receiver isn't a smi.
1198 BranchOnSmi(function, miss);
1199
1200 // Check that the function really is a function. Load map into result reg.
1201 CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
1202 b(ne, miss);
1203
1204 // Make sure that the function has an instance prototype.
1205 Label non_instance;
1206 ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
1207 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1208 b(ne, &non_instance);
1209
1210 // Get the prototype or initial map from the function.
1211 ldr(result,
1212 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1213
1214 // If the prototype or initial map is the hole, don't return it and
1215 // simply miss the cache instead. This will allow us to allocate a
1216 // prototype object on-demand in the runtime system.
1217 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1218 cmp(result, ip);
1219 b(eq, miss);
1220
1221 // If the function does not have an initial map, we're done.
1222 Label done;
1223 CompareObjectType(result, scratch, scratch, MAP_TYPE);
1224 b(ne, &done);
1225
1226 // Get the prototype from the initial map.
1227 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
1228 jmp(&done);
1229
1230 // Non-instance prototype: Fetch prototype from constructor field
1231 // in initial map.
1232 bind(&non_instance);
1233 ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1234
1235 // All done.
1236 bind(&done);
1237}
1238
1239
1240void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
1241 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1242 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1243}
1244
1245
Andrei Popescu31002712010-02-23 13:46:05 +00001246void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1247 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
1248 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1249}
1250
1251
Steve Blocka7e24c12009-10-30 11:49:00 +00001252void MacroAssembler::StubReturn(int argc) {
1253 ASSERT(argc >= 1 && generating_stub());
Andrei Popescu31002712010-02-23 13:46:05 +00001254 if (argc > 1) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001255 add(sp, sp, Operand((argc - 1) * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00001256 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001257 Ret();
1258}
1259
1260
1261void MacroAssembler::IllegalOperation(int num_arguments) {
1262 if (num_arguments > 0) {
1263 add(sp, sp, Operand(num_arguments * kPointerSize));
1264 }
1265 LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1266}
1267
1268
Steve Blockd0582a62009-12-15 09:54:21 +00001269void MacroAssembler::IntegerToDoubleConversionWithVFP3(Register inReg,
1270 Register outHighReg,
1271 Register outLowReg) {
1272 // ARMv7 VFP3 instructions to implement integer to double conversion.
1273 mov(r7, Operand(inReg, ASR, kSmiTagSize));
Leon Clarkee46be812010-01-19 14:06:41 +00001274 vmov(s15, r7);
Steve Block6ded16b2010-05-10 14:33:55 +01001275 vcvt_f64_s32(d7, s15);
Leon Clarkee46be812010-01-19 14:06:41 +00001276 vmov(outLowReg, outHighReg, d7);
Steve Blockd0582a62009-12-15 09:54:21 +00001277}
1278
1279
Andrei Popescu31002712010-02-23 13:46:05 +00001280void MacroAssembler::GetLeastBitsFromSmi(Register dst,
1281 Register src,
1282 int num_least_bits) {
1283 if (CpuFeatures::IsSupported(ARMv7)) {
1284 ubfx(dst, src, Operand(kSmiTagSize), Operand(num_least_bits - 1));
1285 } else {
1286 mov(dst, Operand(src, ASR, kSmiTagSize));
1287 and_(dst, dst, Operand((1 << num_least_bits) - 1));
1288 }
1289}
1290
1291
Steve Blocka7e24c12009-10-30 11:49:00 +00001292void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1293 // All parameters are on the stack. r0 has the return value after call.
1294
1295 // If the expected number of arguments of the runtime function is
1296 // constant, we check that the actual number of arguments match the
1297 // expectation.
1298 if (f->nargs >= 0 && f->nargs != num_arguments) {
1299 IllegalOperation(num_arguments);
1300 return;
1301 }
1302
Leon Clarke4515c472010-02-03 11:58:03 +00001303 // TODO(1236192): Most runtime routines don't need the number of
1304 // arguments passed in because it is constant. At some point we
1305 // should remove this need and make the runtime routine entry code
1306 // smarter.
1307 mov(r0, Operand(num_arguments));
1308 mov(r1, Operand(ExternalReference(f)));
1309 CEntryStub stub(1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001310 CallStub(&stub);
1311}
1312
1313
1314void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
1315 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1316}
1317
1318
Andrei Popescu402d9372010-02-26 13:31:12 +00001319void MacroAssembler::CallExternalReference(const ExternalReference& ext,
1320 int num_arguments) {
1321 mov(r0, Operand(num_arguments));
1322 mov(r1, Operand(ext));
1323
1324 CEntryStub stub(1);
1325 CallStub(&stub);
1326}
1327
1328
Steve Block6ded16b2010-05-10 14:33:55 +01001329void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1330 int num_arguments,
1331 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001332 // TODO(1236192): Most runtime routines don't need the number of
1333 // arguments passed in because it is constant. At some point we
1334 // should remove this need and make the runtime routine entry code
1335 // smarter.
1336 mov(r0, Operand(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001337 JumpToExternalReference(ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001338}
1339
1340
Steve Block6ded16b2010-05-10 14:33:55 +01001341void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1342 int num_arguments,
1343 int result_size) {
1344 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
1345}
1346
1347
1348void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001349#if defined(__thumb__)
1350 // Thumb mode builtin.
1351 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
1352#endif
1353 mov(r1, Operand(builtin));
1354 CEntryStub stub(1);
1355 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1356}
1357
1358
Steve Blocka7e24c12009-10-30 11:49:00 +00001359void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1360 InvokeJSFlags flags) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001361 GetBuiltinEntry(r2, id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001362 if (flags == CALL_JS) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001363 Call(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 } else {
1365 ASSERT(flags == JUMP_JS);
Andrei Popescu402d9372010-02-26 13:31:12 +00001366 Jump(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001367 }
1368}
1369
1370
1371void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001372 ASSERT(!target.is(r1));
1373
1374 // Load the builtins object into target register.
1375 ldr(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
1376 ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
1377
Andrei Popescu402d9372010-02-26 13:31:12 +00001378 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001379 ldr(r1, FieldMemOperand(target,
1380 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1381
1382 // Load the code entry point from the builtins object.
1383 ldr(target, FieldMemOperand(target,
1384 JSBuiltinsObject::OffsetOfCodeWithId(id)));
1385 if (FLAG_debug_code) {
1386 // Make sure the code objects in the builtins object and in the
1387 // builtin function are the same.
1388 push(r1);
1389 ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1390 ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCodeOffset));
1391 cmp(r1, target);
1392 Assert(eq, "Builtin code object changed");
1393 pop(r1);
1394 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001395 add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
1396}
1397
1398
1399void MacroAssembler::SetCounter(StatsCounter* counter, int value,
1400 Register scratch1, Register scratch2) {
1401 if (FLAG_native_code_counters && counter->Enabled()) {
1402 mov(scratch1, Operand(value));
1403 mov(scratch2, Operand(ExternalReference(counter)));
1404 str(scratch1, MemOperand(scratch2));
1405 }
1406}
1407
1408
1409void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
1410 Register scratch1, Register scratch2) {
1411 ASSERT(value > 0);
1412 if (FLAG_native_code_counters && counter->Enabled()) {
1413 mov(scratch2, Operand(ExternalReference(counter)));
1414 ldr(scratch1, MemOperand(scratch2));
1415 add(scratch1, scratch1, Operand(value));
1416 str(scratch1, MemOperand(scratch2));
1417 }
1418}
1419
1420
1421void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
1422 Register scratch1, Register scratch2) {
1423 ASSERT(value > 0);
1424 if (FLAG_native_code_counters && counter->Enabled()) {
1425 mov(scratch2, Operand(ExternalReference(counter)));
1426 ldr(scratch1, MemOperand(scratch2));
1427 sub(scratch1, scratch1, Operand(value));
1428 str(scratch1, MemOperand(scratch2));
1429 }
1430}
1431
1432
1433void MacroAssembler::Assert(Condition cc, const char* msg) {
1434 if (FLAG_debug_code)
1435 Check(cc, msg);
1436}
1437
1438
1439void MacroAssembler::Check(Condition cc, const char* msg) {
1440 Label L;
1441 b(cc, &L);
1442 Abort(msg);
1443 // will not return here
1444 bind(&L);
1445}
1446
1447
1448void MacroAssembler::Abort(const char* msg) {
1449 // We want to pass the msg string like a smi to avoid GC
1450 // problems, however msg is not guaranteed to be aligned
1451 // properly. Instead, we pass an aligned pointer that is
1452 // a proper v8 smi, but also pass the alignment difference
1453 // from the real pointer as a smi.
1454 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1455 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1456 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1457#ifdef DEBUG
1458 if (msg != NULL) {
1459 RecordComment("Abort message: ");
1460 RecordComment(msg);
1461 }
1462#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001463 // Disable stub call restrictions to always allow calls to abort.
1464 set_allow_stub_calls(true);
1465
Steve Blocka7e24c12009-10-30 11:49:00 +00001466 mov(r0, Operand(p0));
1467 push(r0);
1468 mov(r0, Operand(Smi::FromInt(p1 - p0)));
1469 push(r0);
1470 CallRuntime(Runtime::kAbort, 2);
1471 // will not return here
1472}
1473
1474
Steve Blockd0582a62009-12-15 09:54:21 +00001475void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1476 if (context_chain_length > 0) {
1477 // Move up the chain of contexts to the context containing the slot.
1478 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::CLOSURE_INDEX)));
1479 // Load the function context (which is the incoming, outer context).
1480 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1481 for (int i = 1; i < context_chain_length; i++) {
1482 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1483 ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset));
1484 }
1485 // The context may be an intermediate context, not a function context.
1486 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1487 } else { // Slot is in the current function context.
1488 // The context may be an intermediate context, not a function context.
1489 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1490 }
1491}
1492
1493
Andrei Popescu31002712010-02-23 13:46:05 +00001494void MacroAssembler::JumpIfNotBothSmi(Register reg1,
1495 Register reg2,
1496 Label* on_not_both_smi) {
1497 ASSERT_EQ(0, kSmiTag);
1498 tst(reg1, Operand(kSmiTagMask));
1499 tst(reg2, Operand(kSmiTagMask), eq);
1500 b(ne, on_not_both_smi);
1501}
1502
1503
1504void MacroAssembler::JumpIfEitherSmi(Register reg1,
1505 Register reg2,
1506 Label* on_either_smi) {
1507 ASSERT_EQ(0, kSmiTag);
1508 tst(reg1, Operand(kSmiTagMask));
1509 tst(reg2, Operand(kSmiTagMask), ne);
1510 b(eq, on_either_smi);
1511}
1512
1513
Leon Clarked91b9f72010-01-27 17:25:45 +00001514void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
1515 Register first,
1516 Register second,
1517 Register scratch1,
1518 Register scratch2,
1519 Label* failure) {
1520 // Test that both first and second are sequential ASCII strings.
1521 // Assume that they are non-smis.
1522 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
1523 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
1524 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
1525 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001526
1527 JumpIfBothInstanceTypesAreNotSequentialAscii(scratch1,
1528 scratch2,
1529 scratch1,
1530 scratch2,
1531 failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00001532}
1533
1534void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
1535 Register second,
1536 Register scratch1,
1537 Register scratch2,
1538 Label* failure) {
1539 // Check that neither is a smi.
1540 ASSERT_EQ(0, kSmiTag);
1541 and_(scratch1, first, Operand(second));
1542 tst(scratch1, Operand(kSmiTagMask));
1543 b(eq, failure);
1544 JumpIfNonSmisNotBothSequentialAsciiStrings(first,
1545 second,
1546 scratch1,
1547 scratch2,
1548 failure);
1549}
1550
Steve Blockd0582a62009-12-15 09:54:21 +00001551
Steve Block6ded16b2010-05-10 14:33:55 +01001552// Allocates a heap number or jumps to the need_gc label if the young space
1553// is full and a scavenge is needed.
1554void MacroAssembler::AllocateHeapNumber(Register result,
1555 Register scratch1,
1556 Register scratch2,
1557 Label* gc_required) {
1558 // Allocate an object in the heap for the heap number and tag it as a heap
1559 // object.
Kristian Monsen25f61362010-05-21 11:50:48 +01001560 AllocateInNewSpace(HeapNumber::kSize,
Steve Block6ded16b2010-05-10 14:33:55 +01001561 result,
1562 scratch1,
1563 scratch2,
1564 gc_required,
1565 TAG_OBJECT);
1566
1567 // Get heap number map and store it in the allocated object.
1568 LoadRoot(scratch1, Heap::kHeapNumberMapRootIndex);
1569 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
1570}
1571
1572
1573void MacroAssembler::CountLeadingZeros(Register source,
1574 Register scratch,
1575 Register zeros) {
1576#ifdef CAN_USE_ARMV5_INSTRUCTIONS
1577 clz(zeros, source); // This instruction is only supported after ARM5.
1578#else
1579 mov(zeros, Operand(0));
1580 mov(scratch, source);
1581 // Top 16.
1582 tst(scratch, Operand(0xffff0000));
1583 add(zeros, zeros, Operand(16), LeaveCC, eq);
1584 mov(scratch, Operand(scratch, LSL, 16), LeaveCC, eq);
1585 // Top 8.
1586 tst(scratch, Operand(0xff000000));
1587 add(zeros, zeros, Operand(8), LeaveCC, eq);
1588 mov(scratch, Operand(scratch, LSL, 8), LeaveCC, eq);
1589 // Top 4.
1590 tst(scratch, Operand(0xf0000000));
1591 add(zeros, zeros, Operand(4), LeaveCC, eq);
1592 mov(scratch, Operand(scratch, LSL, 4), LeaveCC, eq);
1593 // Top 2.
1594 tst(scratch, Operand(0xc0000000));
1595 add(zeros, zeros, Operand(2), LeaveCC, eq);
1596 mov(scratch, Operand(scratch, LSL, 2), LeaveCC, eq);
1597 // Top bit.
1598 tst(scratch, Operand(0x80000000u));
1599 add(zeros, zeros, Operand(1), LeaveCC, eq);
1600#endif
1601}
1602
1603
1604void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1605 Register first,
1606 Register second,
1607 Register scratch1,
1608 Register scratch2,
1609 Label* failure) {
1610 int kFlatAsciiStringMask =
1611 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1612 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1613 and_(scratch1, first, Operand(kFlatAsciiStringMask));
1614 and_(scratch2, second, Operand(kFlatAsciiStringMask));
1615 cmp(scratch1, Operand(kFlatAsciiStringTag));
1616 // Ignore second test if first test failed.
1617 cmp(scratch2, Operand(kFlatAsciiStringTag), eq);
1618 b(ne, failure);
1619}
1620
1621
1622void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(Register type,
1623 Register scratch,
1624 Label* failure) {
1625 int kFlatAsciiStringMask =
1626 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
1627 int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1628 and_(scratch, type, Operand(kFlatAsciiStringMask));
1629 cmp(scratch, Operand(kFlatAsciiStringTag));
1630 b(ne, failure);
1631}
1632
1633
1634void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1635 int frame_alignment = ActivationFrameAlignment();
1636 // Up to four simple arguments are passed in registers r0..r3.
1637 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1638 if (frame_alignment > kPointerSize) {
1639 // Make stack end at alignment and make room for num_arguments - 4 words
1640 // and the original value of sp.
1641 mov(scratch, sp);
1642 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
1643 ASSERT(IsPowerOf2(frame_alignment));
1644 and_(sp, sp, Operand(-frame_alignment));
1645 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
1646 } else {
1647 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
1648 }
1649}
1650
1651
1652void MacroAssembler::CallCFunction(ExternalReference function,
1653 int num_arguments) {
1654 mov(ip, Operand(function));
1655 CallCFunction(ip, num_arguments);
1656}
1657
1658
1659void MacroAssembler::CallCFunction(Register function, int num_arguments) {
1660 // Make sure that the stack is aligned before calling a C function unless
1661 // running in the simulator. The simulator has its own alignment check which
1662 // provides more information.
1663#if defined(V8_HOST_ARCH_ARM)
1664 if (FLAG_debug_code) {
1665 int frame_alignment = OS::ActivationFrameAlignment();
1666 int frame_alignment_mask = frame_alignment - 1;
1667 if (frame_alignment > kPointerSize) {
1668 ASSERT(IsPowerOf2(frame_alignment));
1669 Label alignment_as_expected;
1670 tst(sp, Operand(frame_alignment_mask));
1671 b(eq, &alignment_as_expected);
1672 // Don't use Check here, as it will call Runtime_Abort possibly
1673 // re-entering here.
1674 stop("Unexpected alignment");
1675 bind(&alignment_as_expected);
1676 }
1677 }
1678#endif
1679
1680 // Just call directly. The function called cannot cause a GC, or
1681 // allow preemption, so the return address in the link register
1682 // stays correct.
1683 Call(function);
1684 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4;
1685 if (OS::ActivationFrameAlignment() > kPointerSize) {
1686 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
1687 } else {
1688 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
1689 }
1690}
1691
1692
Steve Blocka7e24c12009-10-30 11:49:00 +00001693#ifdef ENABLE_DEBUGGER_SUPPORT
1694CodePatcher::CodePatcher(byte* address, int instructions)
1695 : address_(address),
1696 instructions_(instructions),
1697 size_(instructions * Assembler::kInstrSize),
1698 masm_(address, size_ + Assembler::kGap) {
1699 // Create a new macro assembler pointing to the address of the code to patch.
1700 // The size is adjusted with kGap on order for the assembler to generate size
1701 // bytes of instructions without failing with buffer size constraints.
1702 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1703}
1704
1705
1706CodePatcher::~CodePatcher() {
1707 // Indicate that code has changed.
1708 CPU::FlushICache(address_, size_);
1709
1710 // Check that the code was patched as expected.
1711 ASSERT(masm_.pc_ == address_ + size_);
1712 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1713}
1714
1715
1716void CodePatcher::Emit(Instr x) {
1717 masm()->emit(x);
1718}
1719
1720
1721void CodePatcher::Emit(Address addr) {
1722 masm()->emit(reinterpret_cast<Instr>(addr));
1723}
1724#endif // ENABLE_DEBUGGER_SUPPORT
1725
1726
1727} } // namespace v8::internal