blob: bf81510a552d9a6c40b787cf134c075fcba67c77 [file] [log] [blame]
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001// Copyright 2006-2008 Google Inc. All Rights Reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34
35namespace v8 { namespace internal {
36
37DECLARE_bool(debug_code);
38DECLARE_bool(optimize_locals);
39
40
41// Give alias names to registers
42Register cp = { 8 }; // JavaScript context pointer
43Register pp = { 10 }; // parameter pointer
44
45
46MacroAssembler::MacroAssembler(void* buffer, int size)
47 : Assembler(buffer, size),
48 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000049 generating_stub_(false),
50 allow_stub_calls_(true) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000051}
52
53
54// We always generate arm code, never thumb code, even if V8 is compiled to
55// thumb, so we require inter-working support
56#if defined(__thumb__) && !defined(__THUMB_INTERWORK__)
57#error "flag -mthumb-interwork missing"
58#endif
59
60
61// We do not support thumb inter-working with an arm architecture not supporting
62// the blx instruction (below v5t)
63#if defined(__THUMB_INTERWORK__)
64#if !defined(__ARM_ARCH_5T__) && !defined(__ARM_ARCH_5TE__)
65// add tests for other versions above v5t as required
66#error "for thumb inter-working we require architecture v5t or above"
67#endif
68#endif
69
70
71// Using blx may yield better code, so use it when required or when available
72#if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__)
73#define USE_BLX 1
74#endif
75
76// Using bx does not yield better code, so use it only when required
77#if defined(__THUMB_INTERWORK__)
78#define USE_BX 1
79#endif
80
81
82void MacroAssembler::Jump(Register target, Condition cond) {
83#if USE_BX
84 bx(target, cond);
85#else
86 mov(pc, Operand(target), LeaveCC, cond);
87#endif
88}
89
90
91void MacroAssembler::Jump(intptr_t target, RelocMode rmode, Condition cond) {
92#if USE_BX
93 mov(ip, Operand(target, rmode), LeaveCC, cond);
94 bx(ip, cond);
95#else
96 mov(pc, Operand(target, rmode), LeaveCC, cond);
97#endif
98}
99
100
101void MacroAssembler::Jump(byte* target, RelocMode rmode, Condition cond) {
102 ASSERT(!is_code_target(rmode));
103 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
104}
105
106
107void MacroAssembler::Jump(Handle<Code> code, RelocMode rmode, Condition cond) {
108 ASSERT(is_code_target(rmode));
109 // 'code' is always generated ARM code, never THUMB code
110 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
111}
112
113
114void MacroAssembler::Call(Register target, Condition cond) {
115#if USE_BLX
116 blx(target, cond);
117#else
118 // set lr for return at current pc + 8
119 mov(lr, Operand(pc), LeaveCC, cond);
120 mov(pc, Operand(target), LeaveCC, cond);
121#endif
122}
123
124
125void MacroAssembler::Call(intptr_t target, RelocMode rmode, Condition cond) {
126#if !defined(__arm__)
127 if (rmode == runtime_entry) {
128 mov(r2, Operand(target, rmode), LeaveCC, cond);
129 // Set lr for return at current pc + 8.
130 mov(lr, Operand(pc), LeaveCC, cond);
131 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
132 // Notify the simulator of the transition to C code.
133 swi(assembler::arm::call_rt_r2);
134 } else {
135 // set lr for return at current pc + 8
136 mov(lr, Operand(pc), LeaveCC, cond);
137 // emit a ldr<cond> pc, [pc + offset of target in constant pool]
138 mov(pc, Operand(target, rmode), LeaveCC, cond);
139 }
140#else
141 // Set lr for return at current pc + 8.
142 mov(lr, Operand(pc), LeaveCC, cond);
143 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
144 mov(pc, Operand(target, rmode), LeaveCC, cond);
145#endif // !defined(__arm__)
146 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a
147 // 'blx ip'; however, the code would not be shorter than the above sequence
148 // and the target address of the call would be referenced by the first
149 // instruction rather than the second one, which would make it harder to patch
150 // (two instructions before the return address, instead of one).
151 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr));
152}
153
154
155void MacroAssembler::Call(byte* target, RelocMode rmode, Condition cond) {
156 ASSERT(!is_code_target(rmode));
157 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
158}
159
160
161void MacroAssembler::Call(Handle<Code> code, RelocMode rmode, Condition cond) {
162 ASSERT(is_code_target(rmode));
163 // 'code' is always generated ARM code, never THUMB code
164 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
165}
166
167
168void MacroAssembler::Ret() {
169#if USE_BX
170 bx(lr);
171#else
172 mov(pc, Operand(lr));
173#endif
174}
175
176
177void MacroAssembler::Push(const Operand& src) {
178 push(r0);
179 mov(r0, src);
180}
181
182
183void MacroAssembler::Push(const MemOperand& src) {
184 push(r0);
185 ldr(r0, src);
186}
187
188
189void MacroAssembler::Pop(Register dst) {
190 mov(dst, Operand(r0));
191 pop(r0);
192}
193
194
195void MacroAssembler::Pop(const MemOperand& dst) {
196 str(r0, dst);
197 pop(r0);
198}
199
200
201// Will clobber 4 registers: object, offset, scratch, ip. The
202// register 'object' contains a heap object pointer. The heap object
203// tag is shifted away.
204void MacroAssembler::RecordWrite(Register object, Register offset,
205 Register scratch) {
206 // This is how much we shift the remembered set bit offset to get the
207 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
208 // shift right 5) and then multiply by kIntSize (4, shift left 2).
209 const int kRSetWordShift = 3;
210
211 Label fast, done;
212
kasper.lund7276f142008-07-30 08:49:36 +0000213 // First, test that the object is not in the new space. We cannot set
214 // remembered set bits in the new space.
215 // object: heap object pointer (with tag)
216 // offset: offset to store location from the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000217 and_(scratch, object, Operand(Heap::NewSpaceMask()));
218 cmp(scratch, Operand(ExternalReference::new_space_start()));
219 b(eq, &done);
220
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000221 // Compute the bit offset in the remembered set.
kasper.lund7276f142008-07-30 08:49:36 +0000222 // object: heap object pointer (with tag)
223 // offset: offset to store location from the object
224 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
225 and_(scratch, object, Operand(ip)); // offset into page of the object
226 add(offset, scratch, Operand(offset)); // add offset into the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000227 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
228
229 // Compute the page address from the heap object pointer.
kasper.lund7276f142008-07-30 08:49:36 +0000230 // object: heap object pointer (with tag)
231 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000232 bic(object, object, Operand(ip));
233
234 // If the bit offset lies beyond the normal remembered set range, it is in
235 // the extra remembered set area of a large object.
kasper.lund7276f142008-07-30 08:49:36 +0000236 // object: page start
237 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000238 cmp(offset, Operand(Page::kPageSize / kPointerSize));
239 b(lt, &fast);
240
241 // Adjust the bit offset to be relative to the start of the extra
242 // remembered set and the start address to be the address of the extra
243 // remembered set.
244 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
245 // Load the array length into 'scratch' and multiply by four to get the
246 // size in bytes of the elements.
247 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
248 + FixedArray::kLengthOffset));
249 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
250 // Add the page header (including remembered set), array header, and array
251 // body size to the page address.
252 add(object, object, Operand(Page::kObjectStartOffset
253 + Array::kHeaderSize));
254 add(object, object, Operand(scratch));
255
256 bind(&fast);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000257 // Get address of the rset word.
kasper.lund7276f142008-07-30 08:49:36 +0000258 // object: start of the remembered set (page start for the fast case)
259 // offset: bit offset of store position in the remembered set
260 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset
261 add(object, object, Operand(scratch, LSR, kRSetWordShift));
262 // Get bit offset in the rset word.
263 // object: address of remembered set word
264 // offset: bit offset of store position
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000265 and_(offset, offset, Operand(kBitsPerInt - 1));
266
267 ldr(scratch, MemOperand(object));
268 mov(ip, Operand(1));
269 orr(scratch, scratch, Operand(ip, LSL, offset));
270 str(scratch, MemOperand(object));
271
272 bind(&done);
273}
274
275
kasper.lund7276f142008-07-30 08:49:36 +0000276void MacroAssembler::EnterJSFrame(int argc) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000277 // Generate code entering a JS function called from a JS function
278 // stack: receiver, arguments
279 // r0: number of arguments (not including function, nor receiver)
280 // r1: preserved
281 // sp: stack pointer
282 // fp: frame pointer
283 // cp: callee's context
284 // pp: caller's parameter pointer
285 // lr: return address
286
287 // compute parameter pointer before making changes
288 // ip = sp + kPointerSize*(args_len+1); // +1 for receiver
289 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
290 add(ip, ip, Operand(kPointerSize));
291
292 // push extra parameters if we don't have enough
293 // (this can only happen if argc > 0 to begin with)
294 if (argc > 0) {
295 Label loop, done;
296
297 // assume enough arguments to be the most common case
298 sub(r2, r0, Operand(argc), SetCC); // number of missing arguments
299 b(ge, &done); // enough arguments
300
301 // not enough arguments
302 mov(r3, Operand(Factory::undefined_value()));
303 bind(&loop);
304 push(r3);
305 add(r2, r2, Operand(1), SetCC);
306 b(lt, &loop);
307
308 bind(&done);
309 }
310
311 mov(r3, Operand(r0)); // args_len to be saved
312 mov(r2, Operand(cp)); // context to be saved
313
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000314 // push in reverse order: context (r2), args_len (r3), caller_pp, caller_fp,
kasper.lund7276f142008-07-30 08:49:36 +0000315 // sp_on_exit (ip == pp, may be patched on exit), return address
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000316 stm(db_w, sp, r2.bit() | r3.bit() | pp.bit() | fp.bit() |
kasper.lund7276f142008-07-30 08:49:36 +0000317 ip.bit() | lr.bit());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000318
319 // Setup new frame pointer.
320 add(fp, sp, Operand(-StandardFrameConstants::kContextOffset));
321 mov(pp, Operand(ip)); // setup new parameter pointer
322 mov(r0, Operand(0)); // spare slot to store caller code object during GC
323 // r0: TOS (code slot == 0)
324 // r1: preserved
325}
326
327
kasper.lund7276f142008-07-30 08:49:36 +0000328void MacroAssembler::ExitJSFrame(ExitJSFlag flag) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000329 // r0: result
330 // sp: stack pointer
331 // fp: frame pointer
332 // pp: parameter pointer
333
kasper.lund7276f142008-07-30 08:49:36 +0000334 if (flag == DO_NOT_RETURN) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000335 add(r3, fp, Operand(JavaScriptFrameConstants::kSavedRegistersOffset));
336 }
337
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000338 if (flag == DO_NOT_RETURN) {
339 // restore sp as caller_sp (not as pp)
340 str(r3, MemOperand(fp, JavaScriptFrameConstants::kSPOnExitOffset));
341 }
342
343 if (flag == DO_NOT_RETURN && generating_stub()) {
344 // If we're generating a stub, we need to preserve the link
345 // register to be able to return to the place the stub was called
346 // from.
347 mov(ip, Operand(lr));
348 }
349
350 mov(sp, Operand(fp)); // respect ABI stack constraint
351 ldm(ia, sp, pp.bit() | fp.bit() | sp.bit() |
352 ((flag == RETURN) ? pc.bit() : lr.bit()));
353
354 if (flag == DO_NOT_RETURN && generating_stub()) {
355 // Return to the place where the stub was called without
356 // clobbering the value of the link register.
357 mov(pc, Operand(ip));
358 }
359
360 // r0: result
361 // sp: points to function arg (if return) or to last arg (if no return)
362 // fp: restored frame pointer
363 // pp: restored parameter pointer
364}
365
366
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000367void MacroAssembler::InvokePrologue(const ParameterCount& expected,
368 const ParameterCount& actual,
369 Handle<Code> code_constant,
370 Register code_reg,
371 Label* done,
372 InvokeFlag flag) {
373 if (actual.is_immediate()) {
374 mov(r0, Operand(actual.immediate())); // Push the number of arguments.
375 } else {
376 if (!actual.reg().is(r0)) {
377 mov(r0, Operand(actual.reg()));
378 }
379 }
380}
381
382
383void MacroAssembler::InvokeCode(Register code,
384 const ParameterCount& expected,
385 const ParameterCount& actual,
386 InvokeFlag flag) {
387 Label done;
388
389 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
390 if (flag == CALL_FUNCTION) {
391 Call(code);
392 } else {
393 ASSERT(flag == JUMP_FUNCTION);
394 Jump(code);
395 }
396
397 // Continue here if InvokePrologue does handle the invocation due to
398 // mismatched parameter counts.
399 bind(&done);
400}
401
402
403void MacroAssembler::InvokeCode(Handle<Code> code,
404 const ParameterCount& expected,
405 const ParameterCount& actual,
406 RelocMode rmode,
407 InvokeFlag flag) {
408 Label done;
409
410 InvokePrologue(expected, actual, code, no_reg, &done, flag);
411 if (flag == CALL_FUNCTION) {
412 Call(code, rmode);
413 } else {
414 Jump(code, rmode);
415 }
416
417 // Continue here if InvokePrologue does handle the invocation due to
418 // mismatched parameter counts.
419 bind(&done);
420}
421
422
423void MacroAssembler::InvokeFunction(Register fun,
424 const ParameterCount& actual,
425 InvokeFlag flag) {
426 // Contract with called JS functions requires that function is passed in r1.
427 ASSERT(fun.is(r1));
428
429 Register code_reg = r3;
430 Register expected_reg = r2;
431
432 // Make sure that the code and expected registers do not collide with the
433 // actual register being passed in.
434 if (actual.is_reg()) {
435 if (actual.reg().is(code_reg)) {
436 code_reg = r4;
437 } else if (actual.reg().is(expected_reg)) {
438 expected_reg = r4;
439 }
440 }
441
442 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
443 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
444 ldr(expected_reg,
445 FieldMemOperand(code_reg,
446 SharedFunctionInfo::kFormalParameterCountOffset));
447 ldr(code_reg,
448 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
449 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
450
451 ParameterCount expected(expected_reg);
452 InvokeCode(code_reg, expected, actual, flag);
453}
454
455
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000456void MacroAssembler::SaveRegistersToMemory(RegList regs) {
457 ASSERT((regs & ~kJSCallerSaved) == 0);
458 // Copy the content of registers to memory location.
459 for (int i = 0; i < kNumJSCallerSaved; i++) {
460 int r = JSCallerSavedCode(i);
461 if ((regs & (1 << r)) != 0) {
462 Register reg = { r };
463 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
464 str(reg, MemOperand(ip));
465 }
466 }
467}
468
469
470void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
471 ASSERT((regs & ~kJSCallerSaved) == 0);
472 // Copy the content of memory location to registers.
473 for (int i = kNumJSCallerSaved; --i >= 0;) {
474 int r = JSCallerSavedCode(i);
475 if ((regs & (1 << r)) != 0) {
476 Register reg = { r };
477 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
478 ldr(reg, MemOperand(ip));
479 }
480 }
481}
482
483
484void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
485 RegList regs) {
486 ASSERT((regs & ~kJSCallerSaved) == 0);
487 // Copy the content of the memory location to the stack and adjust base.
488 for (int i = kNumJSCallerSaved; --i >= 0;) {
489 int r = JSCallerSavedCode(i);
490 if ((regs & (1 << r)) != 0) {
491 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
492 ldr(ip, MemOperand(ip));
493 str(ip, MemOperand(base, 4, NegPreIndex));
494 }
495 }
496}
497
498
499void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
500 Register scratch,
501 RegList regs) {
502 ASSERT((regs & ~kJSCallerSaved) == 0);
503 // Copy the content of the stack to the memory location and adjust base.
504 for (int i = 0; i < kNumJSCallerSaved; i++) {
505 int r = JSCallerSavedCode(i);
506 if ((regs & (1 << r)) != 0) {
507 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
508 ldr(scratch, MemOperand(base, 4, PostIndex));
509 str(scratch, MemOperand(ip));
510 }
511 }
512}
513
514
515void MacroAssembler::PushTryHandler(CodeLocation try_location,
516 HandlerType type) {
517 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
518 // The pc (return address) is passed in register lr.
519 if (try_location == IN_JAVASCRIPT) {
520 mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
521 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
522 if (type == TRY_CATCH_HANDLER) {
523 mov(r3, Operand(StackHandler::TRY_CATCH));
524 } else {
525 mov(r3, Operand(StackHandler::TRY_FINALLY));
526 }
527 push(r3); // state
528 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
529 ldr(r1, MemOperand(r3));
530 push(r1); // next sp
531 str(sp, MemOperand(r3)); // chain handler
532 // TOS is r0
533 } else {
534 // Must preserve r0-r3, r5-r7 are available.
535 ASSERT(try_location == IN_JS_ENTRY);
536 // The parameter pointer is meaningless here and fp does not point to a JS
537 // frame. So we save NULL for both pp and fp. We expect the code throwing an
538 // exception to check fp before dereferencing it to restore the context.
539 mov(r5, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
540 mov(pp, Operand(0)); // set pp to NULL
541 mov(ip, Operand(0)); // to save a NULL fp
542 stm(db_w, sp, pp.bit() | ip.bit() | lr.bit());
543 mov(r6, Operand(StackHandler::ENTRY));
544 push(r6); // state
545 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
546 ldr(r6, MemOperand(r7));
547 push(r6); // next sp
548 str(sp, MemOperand(r7)); // chain handler
549 push(r5); // flush TOS
550 }
551}
552
553
554Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
555 JSObject* holder, Register holder_reg,
556 Register scratch,
557 Label* miss) {
558 // Make sure there's no overlap between scratch and the other
559 // registers.
560 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
561
562 // Keep track of the current object in register reg.
563 Register reg = object_reg;
564 int depth = 1;
565
566 // Check the maps in the prototype chain.
567 // Traverse the prototype chain from the object and do map checks.
568 while (object != holder) {
569 depth++;
570
571 // Only global objects and objects that do not require access
572 // checks are allowed in stubs.
573 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
574
575 // Get the map of the current object.
576 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
577 cmp(scratch, Operand(Handle<Map>(object->map())));
578
579 // Branch on the result of the map check.
580 b(ne, miss);
581
582 // Check access rights to the global object. This has to happen
583 // after the map check so that we know that the object is
584 // actually a global object.
585 if (object->IsJSGlobalObject()) {
586 CheckAccessGlobal(reg, scratch, miss);
587 // Restore scratch register to be the map of the object. In the
588 // new space case below, we load the prototype from the map in
589 // the scratch register.
590 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
591 }
592
593 reg = holder_reg; // from now the object is in holder_reg
594 JSObject* prototype = JSObject::cast(object->GetPrototype());
595 if (Heap::InNewSpace(prototype)) {
596 // The prototype is in new space; we cannot store a reference
597 // to it in the code. Load it from the map.
598 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
599 } else {
600 // The prototype is in old space; load it directly.
601 mov(reg, Operand(Handle<JSObject>(prototype)));
602 }
603
604 // Go to the next object in the prototype chain.
605 object = prototype;
606 }
607
608 // Check the holder map.
609 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
610 cmp(scratch, Operand(Handle<Map>(object->map())));
611 b(ne, miss);
612
613 // Log the check depth.
614 LOG(IntEvent("check-maps-depth", depth));
615
616 // Perform security check for access to the global object and return
617 // the holder register.
618 ASSERT(object == holder);
619 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
620 if (object->IsJSGlobalObject()) {
621 CheckAccessGlobal(reg, scratch, miss);
622 }
623 return reg;
624}
625
626
627void MacroAssembler::CheckAccessGlobal(Register holder_reg,
628 Register scratch,
629 Label* miss) {
630 ASSERT(!holder_reg.is(scratch));
631
632 // Load the security context.
633 mov(scratch, Operand(Top::security_context_address()));
634 ldr(scratch, MemOperand(scratch));
635 // In debug mode, make sure the security context is set.
636 if (kDebug) {
637 cmp(scratch, Operand(0));
638 Check(ne, "we should not have an empty security context");
639 }
640
641 // Load the global object of the security context.
642 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
643 ldr(scratch, FieldMemOperand(scratch, offset));
644 // Check that the security token in the calling global object is
645 // compatible with the security token in the receiving global
646 // object.
647 ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
648 ldr(ip, FieldMemOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
649 cmp(scratch, Operand(ip));
650 b(ne, miss);
651}
652
653
654void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000655 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000656 Call(stub->GetCode(), code_target);
657}
658
659
660void MacroAssembler::CallJSExitStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000661 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000662 Call(stub->GetCode(), exit_js_frame);
663}
664
665
666void MacroAssembler::StubReturn(int argc) {
667 ASSERT(argc >= 1 && generating_stub());
668 if (argc > 1)
669 add(sp, sp, Operand((argc - 1) * kPointerSize));
670 Ret();
671}
672
673void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
674 ASSERT(num_arguments >= 1); // must have receiver for call
675
676 if (f->nargs < 0) {
677 // The number of arguments is not constant for this call, or we don't
678 // have an entry stub that pushes the value. Push it before the call.
679 push(r0);
680 // Receiver does not count as an argument.
681 mov(r0, Operand(num_arguments - 1));
682 } else {
683 ASSERT(f->nargs == num_arguments);
kasper.lund7276f142008-07-30 08:49:36 +0000684 // TODO(1236192): Most runtime routines don't need the number of
685 // arguments passed in because it is constant. At some point we
686 // should remove this need and make the runtime routine entry code
687 // smarter.
688
689 // The number of arguments is fixed for this call.
690 // Set r0 correspondingly.
691 push(r0);
692 mov(r0, Operand(f->nargs - 1)); // receiver does not count as an argument
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000693 }
694
695 RuntimeStub stub((Runtime::FunctionId) f->stub_id);
696 CallStub(&stub);
697}
698
699
700void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
701 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
702}
703
704
705void MacroAssembler::TailCallRuntime(Runtime::Function* f) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000706 JumpToBuiltin(ExternalReference(f)); // tail call to runtime routine
707}
708
709
710void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
711#if defined(__thumb__)
712 // Thumb mode builtin.
713 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
714#endif
715 mov(r1, Operand(builtin));
716 CEntryStub stub;
717 Jump(stub.GetCode(), code_target);
718}
719
720
721void MacroAssembler::InvokeBuiltin(const char* name,
722 int argc,
723 InvokeJSFlags flags) {
724 Handle<String> symbol = Factory::LookupAsciiSymbol(name);
725 Object* object = Top::security_context_builtins()->GetProperty(*symbol);
726 bool unresolved = true;
727 Code* code = Builtins::builtin(Builtins::Illegal);
728
729 if (object->IsJSFunction()) {
730 Handle<JSFunction> function(JSFunction::cast(object));
731 if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) {
732 code = function->code();
733 unresolved = false;
734 }
735 }
736
737 if (flags == CALL_JS) {
738 Call(Handle<Code>(code), code_target);
739 } else {
740 ASSERT(flags == JUMP_JS);
741 Jump(Handle<Code>(code), code_target);
742 }
743
744 if (unresolved) {
745 uint32_t flags =
746 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
747 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
748 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
749 unresolved_.Add(entry);
750 }
751}
752
753
754void MacroAssembler::Assert(Condition cc, const char* msg) {
755 if (FLAG_debug_code)
756 Check(cc, msg);
757}
758
759
760void MacroAssembler::Check(Condition cc, const char* msg) {
761 Label L;
762 b(cc, &L);
763 Abort(msg);
764 // will not return here
765 bind(&L);
766}
767
768
769void MacroAssembler::Abort(const char* msg) {
770 // We want to pass the msg string like a smi to avoid GC
771 // problems, however msg is not guaranteed to be aligned
772 // properly. Instead, we pass an aligned pointer that is
773 // a proper v8 smi, but also pass the aligment difference
774 // from the real pointer as a smi.
775 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
776 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
777 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
778#ifdef DEBUG
779 if (msg != NULL) {
780 RecordComment("Abort message: ");
781 RecordComment(msg);
782 }
783#endif
784 push(r0);
785 mov(r0, Operand(p0));
786 push(r0);
787 mov(r0, Operand(Smi::FromInt(p1 - p0)));
788 CallRuntime(Runtime::kAbort, 2);
789 // will not return here
790}
791
792} } // namespace v8::internal