blob: a241041fb6b70ea14e5cb452598dc5d3c46546ee [file] [log] [blame]
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001// Copyright 2006-2008 Google Inc. All Rights Reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34
35namespace v8 { namespace internal {
36
37DECLARE_bool(debug_code);
38DECLARE_bool(optimize_locals);
39
40
41// Give alias names to registers
42Register cp = { 8 }; // JavaScript context pointer
43Register pp = { 10 }; // parameter pointer
44
45
46MacroAssembler::MacroAssembler(void* buffer, int size)
47 : Assembler(buffer, size),
48 unresolved_(0),
49 generating_stub_(false) {
50}
51
52
53// We always generate arm code, never thumb code, even if V8 is compiled to
54// thumb, so we require inter-working support
55#if defined(__thumb__) && !defined(__THUMB_INTERWORK__)
56#error "flag -mthumb-interwork missing"
57#endif
58
59
60// We do not support thumb inter-working with an arm architecture not supporting
61// the blx instruction (below v5t)
62#if defined(__THUMB_INTERWORK__)
63#if !defined(__ARM_ARCH_5T__) && !defined(__ARM_ARCH_5TE__)
64// add tests for other versions above v5t as required
65#error "for thumb inter-working we require architecture v5t or above"
66#endif
67#endif
68
69
70// Using blx may yield better code, so use it when required or when available
71#if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__)
72#define USE_BLX 1
73#endif
74
75// Using bx does not yield better code, so use it only when required
76#if defined(__THUMB_INTERWORK__)
77#define USE_BX 1
78#endif
79
80
81void MacroAssembler::Jump(Register target, Condition cond) {
82#if USE_BX
83 bx(target, cond);
84#else
85 mov(pc, Operand(target), LeaveCC, cond);
86#endif
87}
88
89
90void MacroAssembler::Jump(intptr_t target, RelocMode rmode, Condition cond) {
91#if USE_BX
92 mov(ip, Operand(target, rmode), LeaveCC, cond);
93 bx(ip, cond);
94#else
95 mov(pc, Operand(target, rmode), LeaveCC, cond);
96#endif
97}
98
99
100void MacroAssembler::Jump(byte* target, RelocMode rmode, Condition cond) {
101 ASSERT(!is_code_target(rmode));
102 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
103}
104
105
106void MacroAssembler::Jump(Handle<Code> code, RelocMode rmode, Condition cond) {
107 ASSERT(is_code_target(rmode));
108 // 'code' is always generated ARM code, never THUMB code
109 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
110}
111
112
113void MacroAssembler::Call(Register target, Condition cond) {
114#if USE_BLX
115 blx(target, cond);
116#else
117 // set lr for return at current pc + 8
118 mov(lr, Operand(pc), LeaveCC, cond);
119 mov(pc, Operand(target), LeaveCC, cond);
120#endif
121}
122
123
124void MacroAssembler::Call(intptr_t target, RelocMode rmode, Condition cond) {
125#if !defined(__arm__)
126 if (rmode == runtime_entry) {
127 mov(r2, Operand(target, rmode), LeaveCC, cond);
128 // Set lr for return at current pc + 8.
129 mov(lr, Operand(pc), LeaveCC, cond);
130 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
131 // Notify the simulator of the transition to C code.
132 swi(assembler::arm::call_rt_r2);
133 } else {
134 // set lr for return at current pc + 8
135 mov(lr, Operand(pc), LeaveCC, cond);
136 // emit a ldr<cond> pc, [pc + offset of target in constant pool]
137 mov(pc, Operand(target, rmode), LeaveCC, cond);
138 }
139#else
140 // Set lr for return at current pc + 8.
141 mov(lr, Operand(pc), LeaveCC, cond);
142 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
143 mov(pc, Operand(target, rmode), LeaveCC, cond);
144#endif // !defined(__arm__)
145 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a
146 // 'blx ip'; however, the code would not be shorter than the above sequence
147 // and the target address of the call would be referenced by the first
148 // instruction rather than the second one, which would make it harder to patch
149 // (two instructions before the return address, instead of one).
150 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr));
151}
152
153
154void MacroAssembler::Call(byte* target, RelocMode rmode, Condition cond) {
155 ASSERT(!is_code_target(rmode));
156 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
157}
158
159
160void MacroAssembler::Call(Handle<Code> code, RelocMode rmode, Condition cond) {
161 ASSERT(is_code_target(rmode));
162 // 'code' is always generated ARM code, never THUMB code
163 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
164}
165
166
167void MacroAssembler::Ret() {
168#if USE_BX
169 bx(lr);
170#else
171 mov(pc, Operand(lr));
172#endif
173}
174
175
176void MacroAssembler::Push(const Operand& src) {
177 push(r0);
178 mov(r0, src);
179}
180
181
182void MacroAssembler::Push(const MemOperand& src) {
183 push(r0);
184 ldr(r0, src);
185}
186
187
188void MacroAssembler::Pop(Register dst) {
189 mov(dst, Operand(r0));
190 pop(r0);
191}
192
193
194void MacroAssembler::Pop(const MemOperand& dst) {
195 str(r0, dst);
196 pop(r0);
197}
198
199
200// Will clobber 4 registers: object, offset, scratch, ip. The
201// register 'object' contains a heap object pointer. The heap object
202// tag is shifted away.
203void MacroAssembler::RecordWrite(Register object, Register offset,
204 Register scratch) {
205 // This is how much we shift the remembered set bit offset to get the
206 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
207 // shift right 5) and then multiply by kIntSize (4, shift left 2).
208 const int kRSetWordShift = 3;
209
210 Label fast, done;
211
212 // First, test that the start address is not in the new space. We cannot
213 // set remembered set bits in the new space.
214 and_(scratch, object, Operand(Heap::NewSpaceMask()));
215 cmp(scratch, Operand(ExternalReference::new_space_start()));
216 b(eq, &done);
217
218 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
219 // Compute the bit offset in the remembered set.
220 and_(scratch, object, Operand(ip));
221 add(offset, scratch, Operand(offset));
222 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
223
224 // Compute the page address from the heap object pointer.
225 bic(object, object, Operand(ip));
226
227 // If the bit offset lies beyond the normal remembered set range, it is in
228 // the extra remembered set area of a large object.
229 cmp(offset, Operand(Page::kPageSize / kPointerSize));
230 b(lt, &fast);
231
232 // Adjust the bit offset to be relative to the start of the extra
233 // remembered set and the start address to be the address of the extra
234 // remembered set.
235 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
236 // Load the array length into 'scratch' and multiply by four to get the
237 // size in bytes of the elements.
238 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
239 + FixedArray::kLengthOffset));
240 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
241 // Add the page header (including remembered set), array header, and array
242 // body size to the page address.
243 add(object, object, Operand(Page::kObjectStartOffset
244 + Array::kHeaderSize));
245 add(object, object, Operand(scratch));
246
247 bind(&fast);
248 // Now object is the address of the start of the remembered set and offset
249 // is the bit offset from that start.
250 // Get address of the rset word.
251 add(object, object, Operand(offset, LSR, kRSetWordShift));
252 // Get bit offset in the word.
253 and_(offset, offset, Operand(kBitsPerInt - 1));
254
255 ldr(scratch, MemOperand(object));
256 mov(ip, Operand(1));
257 orr(scratch, scratch, Operand(ip, LSL, offset));
258 str(scratch, MemOperand(object));
259
260 bind(&done);
261}
262
263
264void MacroAssembler::EnterJSFrame(int argc, RegList callee_saved) {
265 // Generate code entering a JS function called from a JS function
266 // stack: receiver, arguments
267 // r0: number of arguments (not including function, nor receiver)
268 // r1: preserved
269 // sp: stack pointer
270 // fp: frame pointer
271 // cp: callee's context
272 // pp: caller's parameter pointer
273 // lr: return address
274
275 // compute parameter pointer before making changes
276 // ip = sp + kPointerSize*(args_len+1); // +1 for receiver
277 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
278 add(ip, ip, Operand(kPointerSize));
279
280 // push extra parameters if we don't have enough
281 // (this can only happen if argc > 0 to begin with)
282 if (argc > 0) {
283 Label loop, done;
284
285 // assume enough arguments to be the most common case
286 sub(r2, r0, Operand(argc), SetCC); // number of missing arguments
287 b(ge, &done); // enough arguments
288
289 // not enough arguments
290 mov(r3, Operand(Factory::undefined_value()));
291 bind(&loop);
292 push(r3);
293 add(r2, r2, Operand(1), SetCC);
294 b(lt, &loop);
295
296 bind(&done);
297 }
298
299 mov(r3, Operand(r0)); // args_len to be saved
300 mov(r2, Operand(cp)); // context to be saved
301
302 // Make sure there are no instructions between both stm instructions, because
303 // the callee_saved list is obtained during stack unwinding by decoding the
304 // first stmdb instruction, which is found (or not) at a constant offset from
305 // the pc saved by the second stmdb instruction.
306 if (callee_saved != 0) {
307 stm(db_w, sp, callee_saved);
308 }
309
310 // push in reverse order: context (r2), args_len (r3), caller_pp, caller_fp,
311 // sp_on_exit (ip == pp, may be patched on exit), return address, prolog_pc
312 stm(db_w, sp, r2.bit() | r3.bit() | pp.bit() | fp.bit() |
313 ip.bit() | lr.bit() | pc.bit());
314
315 // Setup new frame pointer.
316 add(fp, sp, Operand(-StandardFrameConstants::kContextOffset));
317 mov(pp, Operand(ip)); // setup new parameter pointer
318 mov(r0, Operand(0)); // spare slot to store caller code object during GC
319 // r0: TOS (code slot == 0)
320 // r1: preserved
321}
322
323
324void MacroAssembler::ExitJSFrame(ExitJSFlag flag, RegList callee_saved) {
325 // r0: result
326 // sp: stack pointer
327 // fp: frame pointer
328 // pp: parameter pointer
329
330 if (callee_saved != 0 || flag == DO_NOT_RETURN) {
331 add(r3, fp, Operand(JavaScriptFrameConstants::kSavedRegistersOffset));
332 }
333
334 if (callee_saved != 0) {
335 ldm(ia_w, r3, callee_saved);
336 }
337
338 if (flag == DO_NOT_RETURN) {
339 // restore sp as caller_sp (not as pp)
340 str(r3, MemOperand(fp, JavaScriptFrameConstants::kSPOnExitOffset));
341 }
342
343 if (flag == DO_NOT_RETURN && generating_stub()) {
344 // If we're generating a stub, we need to preserve the link
345 // register to be able to return to the place the stub was called
346 // from.
347 mov(ip, Operand(lr));
348 }
349
350 mov(sp, Operand(fp)); // respect ABI stack constraint
351 ldm(ia, sp, pp.bit() | fp.bit() | sp.bit() |
352 ((flag == RETURN) ? pc.bit() : lr.bit()));
353
354 if (flag == DO_NOT_RETURN && generating_stub()) {
355 // Return to the place where the stub was called without
356 // clobbering the value of the link register.
357 mov(pc, Operand(ip));
358 }
359
360 // r0: result
361 // sp: points to function arg (if return) or to last arg (if no return)
362 // fp: restored frame pointer
363 // pp: restored parameter pointer
364}
365
366
367void MacroAssembler::SaveRegistersToMemory(RegList regs) {
368 ASSERT((regs & ~kJSCallerSaved) == 0);
369 // Copy the content of registers to memory location.
370 for (int i = 0; i < kNumJSCallerSaved; i++) {
371 int r = JSCallerSavedCode(i);
372 if ((regs & (1 << r)) != 0) {
373 Register reg = { r };
374 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
375 str(reg, MemOperand(ip));
376 }
377 }
378}
379
380
381void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
382 ASSERT((regs & ~kJSCallerSaved) == 0);
383 // Copy the content of memory location to registers.
384 for (int i = kNumJSCallerSaved; --i >= 0;) {
385 int r = JSCallerSavedCode(i);
386 if ((regs & (1 << r)) != 0) {
387 Register reg = { r };
388 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
389 ldr(reg, MemOperand(ip));
390 }
391 }
392}
393
394
395void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
396 RegList regs) {
397 ASSERT((regs & ~kJSCallerSaved) == 0);
398 // Copy the content of the memory location to the stack and adjust base.
399 for (int i = kNumJSCallerSaved; --i >= 0;) {
400 int r = JSCallerSavedCode(i);
401 if ((regs & (1 << r)) != 0) {
402 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
403 ldr(ip, MemOperand(ip));
404 str(ip, MemOperand(base, 4, NegPreIndex));
405 }
406 }
407}
408
409
410void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
411 Register scratch,
412 RegList regs) {
413 ASSERT((regs & ~kJSCallerSaved) == 0);
414 // Copy the content of the stack to the memory location and adjust base.
415 for (int i = 0; i < kNumJSCallerSaved; i++) {
416 int r = JSCallerSavedCode(i);
417 if ((regs & (1 << r)) != 0) {
418 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
419 ldr(scratch, MemOperand(base, 4, PostIndex));
420 str(scratch, MemOperand(ip));
421 }
422 }
423}
424
425
426void MacroAssembler::PushTryHandler(CodeLocation try_location,
427 HandlerType type) {
428 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
429 // The pc (return address) is passed in register lr.
430 if (try_location == IN_JAVASCRIPT) {
431 mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
432 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
433 if (type == TRY_CATCH_HANDLER) {
434 mov(r3, Operand(StackHandler::TRY_CATCH));
435 } else {
436 mov(r3, Operand(StackHandler::TRY_FINALLY));
437 }
438 push(r3); // state
439 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
440 ldr(r1, MemOperand(r3));
441 push(r1); // next sp
442 str(sp, MemOperand(r3)); // chain handler
443 // TOS is r0
444 } else {
445 // Must preserve r0-r3, r5-r7 are available.
446 ASSERT(try_location == IN_JS_ENTRY);
447 // The parameter pointer is meaningless here and fp does not point to a JS
448 // frame. So we save NULL for both pp and fp. We expect the code throwing an
449 // exception to check fp before dereferencing it to restore the context.
450 mov(r5, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
451 mov(pp, Operand(0)); // set pp to NULL
452 mov(ip, Operand(0)); // to save a NULL fp
453 stm(db_w, sp, pp.bit() | ip.bit() | lr.bit());
454 mov(r6, Operand(StackHandler::ENTRY));
455 push(r6); // state
456 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
457 ldr(r6, MemOperand(r7));
458 push(r6); // next sp
459 str(sp, MemOperand(r7)); // chain handler
460 push(r5); // flush TOS
461 }
462}
463
464
465Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
466 JSObject* holder, Register holder_reg,
467 Register scratch,
468 Label* miss) {
469 // Make sure there's no overlap between scratch and the other
470 // registers.
471 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
472
473 // Keep track of the current object in register reg.
474 Register reg = object_reg;
475 int depth = 1;
476
477 // Check the maps in the prototype chain.
478 // Traverse the prototype chain from the object and do map checks.
479 while (object != holder) {
480 depth++;
481
482 // Only global objects and objects that do not require access
483 // checks are allowed in stubs.
484 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
485
486 // Get the map of the current object.
487 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
488 cmp(scratch, Operand(Handle<Map>(object->map())));
489
490 // Branch on the result of the map check.
491 b(ne, miss);
492
493 // Check access rights to the global object. This has to happen
494 // after the map check so that we know that the object is
495 // actually a global object.
496 if (object->IsJSGlobalObject()) {
497 CheckAccessGlobal(reg, scratch, miss);
498 // Restore scratch register to be the map of the object. In the
499 // new space case below, we load the prototype from the map in
500 // the scratch register.
501 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
502 }
503
504 reg = holder_reg; // from now the object is in holder_reg
505 JSObject* prototype = JSObject::cast(object->GetPrototype());
506 if (Heap::InNewSpace(prototype)) {
507 // The prototype is in new space; we cannot store a reference
508 // to it in the code. Load it from the map.
509 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
510 } else {
511 // The prototype is in old space; load it directly.
512 mov(reg, Operand(Handle<JSObject>(prototype)));
513 }
514
515 // Go to the next object in the prototype chain.
516 object = prototype;
517 }
518
519 // Check the holder map.
520 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
521 cmp(scratch, Operand(Handle<Map>(object->map())));
522 b(ne, miss);
523
524 // Log the check depth.
525 LOG(IntEvent("check-maps-depth", depth));
526
527 // Perform security check for access to the global object and return
528 // the holder register.
529 ASSERT(object == holder);
530 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
531 if (object->IsJSGlobalObject()) {
532 CheckAccessGlobal(reg, scratch, miss);
533 }
534 return reg;
535}
536
537
538void MacroAssembler::CheckAccessGlobal(Register holder_reg,
539 Register scratch,
540 Label* miss) {
541 ASSERT(!holder_reg.is(scratch));
542
543 // Load the security context.
544 mov(scratch, Operand(Top::security_context_address()));
545 ldr(scratch, MemOperand(scratch));
546 // In debug mode, make sure the security context is set.
547 if (kDebug) {
548 cmp(scratch, Operand(0));
549 Check(ne, "we should not have an empty security context");
550 }
551
552 // Load the global object of the security context.
553 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
554 ldr(scratch, FieldMemOperand(scratch, offset));
555 // Check that the security token in the calling global object is
556 // compatible with the security token in the receiving global
557 // object.
558 ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
559 ldr(ip, FieldMemOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
560 cmp(scratch, Operand(ip));
561 b(ne, miss);
562}
563
564
565void MacroAssembler::CallStub(CodeStub* stub) {
566 ASSERT(!generating_stub()); // stub calls are not allowed in stubs
567 Call(stub->GetCode(), code_target);
568}
569
570
571void MacroAssembler::CallJSExitStub(CodeStub* stub) {
572 ASSERT(!generating_stub()); // stub calls are not allowed in stubs
573 Call(stub->GetCode(), exit_js_frame);
574}
575
576
577void MacroAssembler::StubReturn(int argc) {
578 ASSERT(argc >= 1 && generating_stub());
579 if (argc > 1)
580 add(sp, sp, Operand((argc - 1) * kPointerSize));
581 Ret();
582}
583
584void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
585 ASSERT(num_arguments >= 1); // must have receiver for call
586
587 if (f->nargs < 0) {
588 // The number of arguments is not constant for this call, or we don't
589 // have an entry stub that pushes the value. Push it before the call.
590 push(r0);
591 // Receiver does not count as an argument.
592 mov(r0, Operand(num_arguments - 1));
593 } else {
594 ASSERT(f->nargs == num_arguments);
595 }
596
597 RuntimeStub stub((Runtime::FunctionId) f->stub_id);
598 CallStub(&stub);
599}
600
601
602void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
603 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
604}
605
606
607void MacroAssembler::TailCallRuntime(Runtime::Function* f) {
608 // TODO(1236192): Most runtime routines don't need the number of
609 // arguments passed in because it is constant. At some point we
610 // should remove this need and make the runtime routine entry code
611 // smarter.
612 if (f->nargs >= 0) {
613 // The number of arguments is fixed for this call.
614 // Set r0 correspondingly.
615 push(r0);
616 mov(r0, Operand(f->nargs - 1)); // receiver does not count as an argument
617 }
618 JumpToBuiltin(ExternalReference(f)); // tail call to runtime routine
619}
620
621
622void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
623#if defined(__thumb__)
624 // Thumb mode builtin.
625 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
626#endif
627 mov(r1, Operand(builtin));
628 CEntryStub stub;
629 Jump(stub.GetCode(), code_target);
630}
631
632
633void MacroAssembler::InvokeBuiltin(const char* name,
634 int argc,
635 InvokeJSFlags flags) {
636 Handle<String> symbol = Factory::LookupAsciiSymbol(name);
637 Object* object = Top::security_context_builtins()->GetProperty(*symbol);
638 bool unresolved = true;
639 Code* code = Builtins::builtin(Builtins::Illegal);
640
641 if (object->IsJSFunction()) {
642 Handle<JSFunction> function(JSFunction::cast(object));
643 if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) {
644 code = function->code();
645 unresolved = false;
646 }
647 }
648
649 if (flags == CALL_JS) {
650 Call(Handle<Code>(code), code_target);
651 } else {
652 ASSERT(flags == JUMP_JS);
653 Jump(Handle<Code>(code), code_target);
654 }
655
656 if (unresolved) {
657 uint32_t flags =
658 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
659 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
660 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
661 unresolved_.Add(entry);
662 }
663}
664
665
666void MacroAssembler::Assert(Condition cc, const char* msg) {
667 if (FLAG_debug_code)
668 Check(cc, msg);
669}
670
671
672void MacroAssembler::Check(Condition cc, const char* msg) {
673 Label L;
674 b(cc, &L);
675 Abort(msg);
676 // will not return here
677 bind(&L);
678}
679
680
681void MacroAssembler::Abort(const char* msg) {
682 // We want to pass the msg string like a smi to avoid GC
683 // problems, however msg is not guaranteed to be aligned
684 // properly. Instead, we pass an aligned pointer that is
685 // a proper v8 smi, but also pass the aligment difference
686 // from the real pointer as a smi.
687 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
688 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
689 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
690#ifdef DEBUG
691 if (msg != NULL) {
692 RecordComment("Abort message: ");
693 RecordComment(msg);
694 }
695#endif
696 push(r0);
697 mov(r0, Operand(p0));
698 push(r0);
699 mov(r0, Operand(Smi::FromInt(p1 - p0)));
700 CallRuntime(Runtime::kAbort, 2);
701 // will not return here
702}
703
704} } // namespace v8::internal