blob: 16003c09433a946da48993b22f4e7232f7f5d718 [file] [log] [blame]
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001// Copyright 2006-2008 Google Inc. All Rights Reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34
35namespace v8 { namespace internal {
36
37DECLARE_bool(debug_code);
38DECLARE_bool(optimize_locals);
39
40
41// Give alias names to registers
42Register cp = { 8 }; // JavaScript context pointer
43Register pp = { 10 }; // parameter pointer
44
45
46MacroAssembler::MacroAssembler(void* buffer, int size)
47 : Assembler(buffer, size),
48 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000049 generating_stub_(false),
50 allow_stub_calls_(true) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000051}
52
53
54// We always generate arm code, never thumb code, even if V8 is compiled to
55// thumb, so we require inter-working support
56#if defined(__thumb__) && !defined(__THUMB_INTERWORK__)
57#error "flag -mthumb-interwork missing"
58#endif
59
60
61// We do not support thumb inter-working with an arm architecture not supporting
62// the blx instruction (below v5t)
63#if defined(__THUMB_INTERWORK__)
64#if !defined(__ARM_ARCH_5T__) && !defined(__ARM_ARCH_5TE__)
65// add tests for other versions above v5t as required
66#error "for thumb inter-working we require architecture v5t or above"
67#endif
68#endif
69
70
71// Using blx may yield better code, so use it when required or when available
72#if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__)
73#define USE_BLX 1
74#endif
75
76// Using bx does not yield better code, so use it only when required
77#if defined(__THUMB_INTERWORK__)
78#define USE_BX 1
79#endif
80
81
82void MacroAssembler::Jump(Register target, Condition cond) {
83#if USE_BX
84 bx(target, cond);
85#else
86 mov(pc, Operand(target), LeaveCC, cond);
87#endif
88}
89
90
91void MacroAssembler::Jump(intptr_t target, RelocMode rmode, Condition cond) {
92#if USE_BX
93 mov(ip, Operand(target, rmode), LeaveCC, cond);
94 bx(ip, cond);
95#else
96 mov(pc, Operand(target, rmode), LeaveCC, cond);
97#endif
98}
99
100
101void MacroAssembler::Jump(byte* target, RelocMode rmode, Condition cond) {
102 ASSERT(!is_code_target(rmode));
103 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
104}
105
106
107void MacroAssembler::Jump(Handle<Code> code, RelocMode rmode, Condition cond) {
108 ASSERT(is_code_target(rmode));
109 // 'code' is always generated ARM code, never THUMB code
110 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
111}
112
113
114void MacroAssembler::Call(Register target, Condition cond) {
115#if USE_BLX
116 blx(target, cond);
117#else
118 // set lr for return at current pc + 8
119 mov(lr, Operand(pc), LeaveCC, cond);
120 mov(pc, Operand(target), LeaveCC, cond);
121#endif
122}
123
124
125void MacroAssembler::Call(intptr_t target, RelocMode rmode, Condition cond) {
126#if !defined(__arm__)
127 if (rmode == runtime_entry) {
128 mov(r2, Operand(target, rmode), LeaveCC, cond);
129 // Set lr for return at current pc + 8.
130 mov(lr, Operand(pc), LeaveCC, cond);
131 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
132 // Notify the simulator of the transition to C code.
133 swi(assembler::arm::call_rt_r2);
134 } else {
135 // set lr for return at current pc + 8
136 mov(lr, Operand(pc), LeaveCC, cond);
137 // emit a ldr<cond> pc, [pc + offset of target in constant pool]
138 mov(pc, Operand(target, rmode), LeaveCC, cond);
139 }
140#else
141 // Set lr for return at current pc + 8.
142 mov(lr, Operand(pc), LeaveCC, cond);
143 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
144 mov(pc, Operand(target, rmode), LeaveCC, cond);
145#endif // !defined(__arm__)
146 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a
147 // 'blx ip'; however, the code would not be shorter than the above sequence
148 // and the target address of the call would be referenced by the first
149 // instruction rather than the second one, which would make it harder to patch
150 // (two instructions before the return address, instead of one).
151 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr));
152}
153
154
155void MacroAssembler::Call(byte* target, RelocMode rmode, Condition cond) {
156 ASSERT(!is_code_target(rmode));
157 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
158}
159
160
161void MacroAssembler::Call(Handle<Code> code, RelocMode rmode, Condition cond) {
162 ASSERT(is_code_target(rmode));
163 // 'code' is always generated ARM code, never THUMB code
164 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
165}
166
167
168void MacroAssembler::Ret() {
169#if USE_BX
170 bx(lr);
171#else
172 mov(pc, Operand(lr));
173#endif
174}
175
176
177void MacroAssembler::Push(const Operand& src) {
178 push(r0);
179 mov(r0, src);
180}
181
182
183void MacroAssembler::Push(const MemOperand& src) {
184 push(r0);
185 ldr(r0, src);
186}
187
188
189void MacroAssembler::Pop(Register dst) {
190 mov(dst, Operand(r0));
191 pop(r0);
192}
193
194
195void MacroAssembler::Pop(const MemOperand& dst) {
196 str(r0, dst);
197 pop(r0);
198}
199
200
201// Will clobber 4 registers: object, offset, scratch, ip. The
202// register 'object' contains a heap object pointer. The heap object
203// tag is shifted away.
204void MacroAssembler::RecordWrite(Register object, Register offset,
205 Register scratch) {
206 // This is how much we shift the remembered set bit offset to get the
207 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
208 // shift right 5) and then multiply by kIntSize (4, shift left 2).
209 const int kRSetWordShift = 3;
210
211 Label fast, done;
212
kasper.lund7276f142008-07-30 08:49:36 +0000213 // First, test that the object is not in the new space. We cannot set
214 // remembered set bits in the new space.
215 // object: heap object pointer (with tag)
216 // offset: offset to store location from the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000217 and_(scratch, object, Operand(Heap::NewSpaceMask()));
218 cmp(scratch, Operand(ExternalReference::new_space_start()));
219 b(eq, &done);
220
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000221 // Compute the bit offset in the remembered set.
kasper.lund7276f142008-07-30 08:49:36 +0000222 // object: heap object pointer (with tag)
223 // offset: offset to store location from the object
224 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
225 and_(scratch, object, Operand(ip)); // offset into page of the object
226 add(offset, scratch, Operand(offset)); // add offset into the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000227 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
228
229 // Compute the page address from the heap object pointer.
kasper.lund7276f142008-07-30 08:49:36 +0000230 // object: heap object pointer (with tag)
231 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000232 bic(object, object, Operand(ip));
233
234 // If the bit offset lies beyond the normal remembered set range, it is in
235 // the extra remembered set area of a large object.
kasper.lund7276f142008-07-30 08:49:36 +0000236 // object: page start
237 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000238 cmp(offset, Operand(Page::kPageSize / kPointerSize));
239 b(lt, &fast);
240
241 // Adjust the bit offset to be relative to the start of the extra
242 // remembered set and the start address to be the address of the extra
243 // remembered set.
244 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
245 // Load the array length into 'scratch' and multiply by four to get the
246 // size in bytes of the elements.
247 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
248 + FixedArray::kLengthOffset));
249 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
250 // Add the page header (including remembered set), array header, and array
251 // body size to the page address.
252 add(object, object, Operand(Page::kObjectStartOffset
253 + Array::kHeaderSize));
254 add(object, object, Operand(scratch));
255
256 bind(&fast);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000257 // Get address of the rset word.
kasper.lund7276f142008-07-30 08:49:36 +0000258 // object: start of the remembered set (page start for the fast case)
259 // offset: bit offset of store position in the remembered set
260 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset
261 add(object, object, Operand(scratch, LSR, kRSetWordShift));
262 // Get bit offset in the rset word.
263 // object: address of remembered set word
264 // offset: bit offset of store position
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000265 and_(offset, offset, Operand(kBitsPerInt - 1));
266
267 ldr(scratch, MemOperand(object));
268 mov(ip, Operand(1));
269 orr(scratch, scratch, Operand(ip, LSL, offset));
270 str(scratch, MemOperand(object));
271
272 bind(&done);
273}
274
275
kasper.lund7276f142008-07-30 08:49:36 +0000276void MacroAssembler::EnterJSFrame(int argc) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000277 // Generate code entering a JS function called from a JS function
278 // stack: receiver, arguments
279 // r0: number of arguments (not including function, nor receiver)
280 // r1: preserved
281 // sp: stack pointer
282 // fp: frame pointer
283 // cp: callee's context
284 // pp: caller's parameter pointer
285 // lr: return address
286
287 // compute parameter pointer before making changes
288 // ip = sp + kPointerSize*(args_len+1); // +1 for receiver
289 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
290 add(ip, ip, Operand(kPointerSize));
291
292 // push extra parameters if we don't have enough
293 // (this can only happen if argc > 0 to begin with)
294 if (argc > 0) {
295 Label loop, done;
296
297 // assume enough arguments to be the most common case
298 sub(r2, r0, Operand(argc), SetCC); // number of missing arguments
299 b(ge, &done); // enough arguments
300
301 // not enough arguments
302 mov(r3, Operand(Factory::undefined_value()));
303 bind(&loop);
304 push(r3);
305 add(r2, r2, Operand(1), SetCC);
306 b(lt, &loop);
307
308 bind(&done);
309 }
310
311 mov(r3, Operand(r0)); // args_len to be saved
312 mov(r2, Operand(cp)); // context to be saved
313
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000314 // push in reverse order: context (r2), args_len (r3), caller_pp, caller_fp,
kasper.lund7276f142008-07-30 08:49:36 +0000315 // sp_on_exit (ip == pp, may be patched on exit), return address
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000316 stm(db_w, sp, r2.bit() | r3.bit() | pp.bit() | fp.bit() |
kasper.lund7276f142008-07-30 08:49:36 +0000317 ip.bit() | lr.bit());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000318
319 // Setup new frame pointer.
320 add(fp, sp, Operand(-StandardFrameConstants::kContextOffset));
321 mov(pp, Operand(ip)); // setup new parameter pointer
322 mov(r0, Operand(0)); // spare slot to store caller code object during GC
323 // r0: TOS (code slot == 0)
324 // r1: preserved
325}
326
327
kasper.lund7276f142008-07-30 08:49:36 +0000328void MacroAssembler::ExitJSFrame(ExitJSFlag flag) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000329 // r0: result
330 // sp: stack pointer
331 // fp: frame pointer
332 // pp: parameter pointer
333
kasper.lund7276f142008-07-30 08:49:36 +0000334 if (flag == DO_NOT_RETURN) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000335 add(r3, fp, Operand(JavaScriptFrameConstants::kSavedRegistersOffset));
336 }
337
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000338 if (flag == DO_NOT_RETURN) {
339 // restore sp as caller_sp (not as pp)
340 str(r3, MemOperand(fp, JavaScriptFrameConstants::kSPOnExitOffset));
341 }
342
343 if (flag == DO_NOT_RETURN && generating_stub()) {
344 // If we're generating a stub, we need to preserve the link
345 // register to be able to return to the place the stub was called
346 // from.
347 mov(ip, Operand(lr));
348 }
349
350 mov(sp, Operand(fp)); // respect ABI stack constraint
351 ldm(ia, sp, pp.bit() | fp.bit() | sp.bit() |
352 ((flag == RETURN) ? pc.bit() : lr.bit()));
353
354 if (flag == DO_NOT_RETURN && generating_stub()) {
355 // Return to the place where the stub was called without
356 // clobbering the value of the link register.
357 mov(pc, Operand(ip));
358 }
359
360 // r0: result
361 // sp: points to function arg (if return) or to last arg (if no return)
362 // fp: restored frame pointer
363 // pp: restored parameter pointer
364}
365
366
367void MacroAssembler::SaveRegistersToMemory(RegList regs) {
368 ASSERT((regs & ~kJSCallerSaved) == 0);
369 // Copy the content of registers to memory location.
370 for (int i = 0; i < kNumJSCallerSaved; i++) {
371 int r = JSCallerSavedCode(i);
372 if ((regs & (1 << r)) != 0) {
373 Register reg = { r };
374 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
375 str(reg, MemOperand(ip));
376 }
377 }
378}
379
380
381void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
382 ASSERT((regs & ~kJSCallerSaved) == 0);
383 // Copy the content of memory location to registers.
384 for (int i = kNumJSCallerSaved; --i >= 0;) {
385 int r = JSCallerSavedCode(i);
386 if ((regs & (1 << r)) != 0) {
387 Register reg = { r };
388 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
389 ldr(reg, MemOperand(ip));
390 }
391 }
392}
393
394
395void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
396 RegList regs) {
397 ASSERT((regs & ~kJSCallerSaved) == 0);
398 // Copy the content of the memory location to the stack and adjust base.
399 for (int i = kNumJSCallerSaved; --i >= 0;) {
400 int r = JSCallerSavedCode(i);
401 if ((regs & (1 << r)) != 0) {
402 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
403 ldr(ip, MemOperand(ip));
404 str(ip, MemOperand(base, 4, NegPreIndex));
405 }
406 }
407}
408
409
410void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
411 Register scratch,
412 RegList regs) {
413 ASSERT((regs & ~kJSCallerSaved) == 0);
414 // Copy the content of the stack to the memory location and adjust base.
415 for (int i = 0; i < kNumJSCallerSaved; i++) {
416 int r = JSCallerSavedCode(i);
417 if ((regs & (1 << r)) != 0) {
418 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
419 ldr(scratch, MemOperand(base, 4, PostIndex));
420 str(scratch, MemOperand(ip));
421 }
422 }
423}
424
425
426void MacroAssembler::PushTryHandler(CodeLocation try_location,
427 HandlerType type) {
428 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
429 // The pc (return address) is passed in register lr.
430 if (try_location == IN_JAVASCRIPT) {
431 mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
432 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
433 if (type == TRY_CATCH_HANDLER) {
434 mov(r3, Operand(StackHandler::TRY_CATCH));
435 } else {
436 mov(r3, Operand(StackHandler::TRY_FINALLY));
437 }
438 push(r3); // state
439 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
440 ldr(r1, MemOperand(r3));
441 push(r1); // next sp
442 str(sp, MemOperand(r3)); // chain handler
443 // TOS is r0
444 } else {
445 // Must preserve r0-r3, r5-r7 are available.
446 ASSERT(try_location == IN_JS_ENTRY);
447 // The parameter pointer is meaningless here and fp does not point to a JS
448 // frame. So we save NULL for both pp and fp. We expect the code throwing an
449 // exception to check fp before dereferencing it to restore the context.
450 mov(r5, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
451 mov(pp, Operand(0)); // set pp to NULL
452 mov(ip, Operand(0)); // to save a NULL fp
453 stm(db_w, sp, pp.bit() | ip.bit() | lr.bit());
454 mov(r6, Operand(StackHandler::ENTRY));
455 push(r6); // state
456 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
457 ldr(r6, MemOperand(r7));
458 push(r6); // next sp
459 str(sp, MemOperand(r7)); // chain handler
460 push(r5); // flush TOS
461 }
462}
463
464
465Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
466 JSObject* holder, Register holder_reg,
467 Register scratch,
468 Label* miss) {
469 // Make sure there's no overlap between scratch and the other
470 // registers.
471 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
472
473 // Keep track of the current object in register reg.
474 Register reg = object_reg;
475 int depth = 1;
476
477 // Check the maps in the prototype chain.
478 // Traverse the prototype chain from the object and do map checks.
479 while (object != holder) {
480 depth++;
481
482 // Only global objects and objects that do not require access
483 // checks are allowed in stubs.
484 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
485
486 // Get the map of the current object.
487 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
488 cmp(scratch, Operand(Handle<Map>(object->map())));
489
490 // Branch on the result of the map check.
491 b(ne, miss);
492
493 // Check access rights to the global object. This has to happen
494 // after the map check so that we know that the object is
495 // actually a global object.
496 if (object->IsJSGlobalObject()) {
497 CheckAccessGlobal(reg, scratch, miss);
498 // Restore scratch register to be the map of the object. In the
499 // new space case below, we load the prototype from the map in
500 // the scratch register.
501 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
502 }
503
504 reg = holder_reg; // from now the object is in holder_reg
505 JSObject* prototype = JSObject::cast(object->GetPrototype());
506 if (Heap::InNewSpace(prototype)) {
507 // The prototype is in new space; we cannot store a reference
508 // to it in the code. Load it from the map.
509 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
510 } else {
511 // The prototype is in old space; load it directly.
512 mov(reg, Operand(Handle<JSObject>(prototype)));
513 }
514
515 // Go to the next object in the prototype chain.
516 object = prototype;
517 }
518
519 // Check the holder map.
520 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
521 cmp(scratch, Operand(Handle<Map>(object->map())));
522 b(ne, miss);
523
524 // Log the check depth.
525 LOG(IntEvent("check-maps-depth", depth));
526
527 // Perform security check for access to the global object and return
528 // the holder register.
529 ASSERT(object == holder);
530 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
531 if (object->IsJSGlobalObject()) {
532 CheckAccessGlobal(reg, scratch, miss);
533 }
534 return reg;
535}
536
537
538void MacroAssembler::CheckAccessGlobal(Register holder_reg,
539 Register scratch,
540 Label* miss) {
541 ASSERT(!holder_reg.is(scratch));
542
543 // Load the security context.
544 mov(scratch, Operand(Top::security_context_address()));
545 ldr(scratch, MemOperand(scratch));
546 // In debug mode, make sure the security context is set.
547 if (kDebug) {
548 cmp(scratch, Operand(0));
549 Check(ne, "we should not have an empty security context");
550 }
551
552 // Load the global object of the security context.
553 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
554 ldr(scratch, FieldMemOperand(scratch, offset));
555 // Check that the security token in the calling global object is
556 // compatible with the security token in the receiving global
557 // object.
558 ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
559 ldr(ip, FieldMemOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
560 cmp(scratch, Operand(ip));
561 b(ne, miss);
562}
563
564
565void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000566 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000567 Call(stub->GetCode(), code_target);
568}
569
570
571void MacroAssembler::CallJSExitStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000572 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000573 Call(stub->GetCode(), exit_js_frame);
574}
575
576
577void MacroAssembler::StubReturn(int argc) {
578 ASSERT(argc >= 1 && generating_stub());
579 if (argc > 1)
580 add(sp, sp, Operand((argc - 1) * kPointerSize));
581 Ret();
582}
583
584void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
585 ASSERT(num_arguments >= 1); // must have receiver for call
586
587 if (f->nargs < 0) {
588 // The number of arguments is not constant for this call, or we don't
589 // have an entry stub that pushes the value. Push it before the call.
590 push(r0);
591 // Receiver does not count as an argument.
592 mov(r0, Operand(num_arguments - 1));
593 } else {
594 ASSERT(f->nargs == num_arguments);
kasper.lund7276f142008-07-30 08:49:36 +0000595 // TODO(1236192): Most runtime routines don't need the number of
596 // arguments passed in because it is constant. At some point we
597 // should remove this need and make the runtime routine entry code
598 // smarter.
599
600 // The number of arguments is fixed for this call.
601 // Set r0 correspondingly.
602 push(r0);
603 mov(r0, Operand(f->nargs - 1)); // receiver does not count as an argument
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000604 }
605
606 RuntimeStub stub((Runtime::FunctionId) f->stub_id);
607 CallStub(&stub);
608}
609
610
611void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
612 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
613}
614
615
616void MacroAssembler::TailCallRuntime(Runtime::Function* f) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000617 JumpToBuiltin(ExternalReference(f)); // tail call to runtime routine
618}
619
620
621void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
622#if defined(__thumb__)
623 // Thumb mode builtin.
624 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
625#endif
626 mov(r1, Operand(builtin));
627 CEntryStub stub;
628 Jump(stub.GetCode(), code_target);
629}
630
631
632void MacroAssembler::InvokeBuiltin(const char* name,
633 int argc,
634 InvokeJSFlags flags) {
635 Handle<String> symbol = Factory::LookupAsciiSymbol(name);
636 Object* object = Top::security_context_builtins()->GetProperty(*symbol);
637 bool unresolved = true;
638 Code* code = Builtins::builtin(Builtins::Illegal);
639
640 if (object->IsJSFunction()) {
641 Handle<JSFunction> function(JSFunction::cast(object));
642 if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) {
643 code = function->code();
644 unresolved = false;
645 }
646 }
647
648 if (flags == CALL_JS) {
649 Call(Handle<Code>(code), code_target);
650 } else {
651 ASSERT(flags == JUMP_JS);
652 Jump(Handle<Code>(code), code_target);
653 }
654
655 if (unresolved) {
656 uint32_t flags =
657 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
658 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
659 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
660 unresolved_.Add(entry);
661 }
662}
663
664
665void MacroAssembler::Assert(Condition cc, const char* msg) {
666 if (FLAG_debug_code)
667 Check(cc, msg);
668}
669
670
671void MacroAssembler::Check(Condition cc, const char* msg) {
672 Label L;
673 b(cc, &L);
674 Abort(msg);
675 // will not return here
676 bind(&L);
677}
678
679
680void MacroAssembler::Abort(const char* msg) {
681 // We want to pass the msg string like a smi to avoid GC
682 // problems, however msg is not guaranteed to be aligned
683 // properly. Instead, we pass an aligned pointer that is
684 // a proper v8 smi, but also pass the aligment difference
685 // from the real pointer as a smi.
686 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
687 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
688 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
689#ifdef DEBUG
690 if (msg != NULL) {
691 RecordComment("Abort message: ");
692 RecordComment(msg);
693 }
694#endif
695 push(r0);
696 mov(r0, Operand(p0));
697 push(r0);
698 mov(r0, Operand(Smi::FromInt(p1 - p0)));
699 CallRuntime(Runtime::kAbort, 2);
700 // will not return here
701}
702
703} } // namespace v8::internal