blob: d85a1dec5b035ac6226cf4fe0ae085a9cd9ddffd [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34
35namespace v8 { namespace internal {
36
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000037// Give alias names to registers
38Register cp = { 8 }; // JavaScript context pointer
39Register pp = { 10 }; // parameter pointer
40
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000045 generating_stub_(false),
46 allow_stub_calls_(true) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000047}
48
49
50// We always generate arm code, never thumb code, even if V8 is compiled to
51// thumb, so we require inter-working support
52#if defined(__thumb__) && !defined(__THUMB_INTERWORK__)
53#error "flag -mthumb-interwork missing"
54#endif
55
56
57// We do not support thumb inter-working with an arm architecture not supporting
58// the blx instruction (below v5t)
59#if defined(__THUMB_INTERWORK__)
60#if !defined(__ARM_ARCH_5T__) && !defined(__ARM_ARCH_5TE__)
61// add tests for other versions above v5t as required
62#error "for thumb inter-working we require architecture v5t or above"
63#endif
64#endif
65
66
67// Using blx may yield better code, so use it when required or when available
68#if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__)
69#define USE_BLX 1
70#endif
71
72// Using bx does not yield better code, so use it only when required
73#if defined(__THUMB_INTERWORK__)
74#define USE_BX 1
75#endif
76
77
78void MacroAssembler::Jump(Register target, Condition cond) {
79#if USE_BX
80 bx(target, cond);
81#else
82 mov(pc, Operand(target), LeaveCC, cond);
83#endif
84}
85
86
87void MacroAssembler::Jump(intptr_t target, RelocMode rmode, Condition cond) {
88#if USE_BX
89 mov(ip, Operand(target, rmode), LeaveCC, cond);
90 bx(ip, cond);
91#else
92 mov(pc, Operand(target, rmode), LeaveCC, cond);
93#endif
94}
95
96
97void MacroAssembler::Jump(byte* target, RelocMode rmode, Condition cond) {
98 ASSERT(!is_code_target(rmode));
99 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
100}
101
102
103void MacroAssembler::Jump(Handle<Code> code, RelocMode rmode, Condition cond) {
104 ASSERT(is_code_target(rmode));
105 // 'code' is always generated ARM code, never THUMB code
106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
107}
108
109
110void MacroAssembler::Call(Register target, Condition cond) {
111#if USE_BLX
112 blx(target, cond);
113#else
114 // set lr for return at current pc + 8
115 mov(lr, Operand(pc), LeaveCC, cond);
116 mov(pc, Operand(target), LeaveCC, cond);
117#endif
118}
119
120
121void MacroAssembler::Call(intptr_t target, RelocMode rmode, Condition cond) {
122#if !defined(__arm__)
123 if (rmode == runtime_entry) {
124 mov(r2, Operand(target, rmode), LeaveCC, cond);
125 // Set lr for return at current pc + 8.
126 mov(lr, Operand(pc), LeaveCC, cond);
127 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
128 // Notify the simulator of the transition to C code.
129 swi(assembler::arm::call_rt_r2);
130 } else {
131 // set lr for return at current pc + 8
132 mov(lr, Operand(pc), LeaveCC, cond);
133 // emit a ldr<cond> pc, [pc + offset of target in constant pool]
134 mov(pc, Operand(target, rmode), LeaveCC, cond);
135 }
136#else
137 // Set lr for return at current pc + 8.
138 mov(lr, Operand(pc), LeaveCC, cond);
139 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
140 mov(pc, Operand(target, rmode), LeaveCC, cond);
141#endif // !defined(__arm__)
142 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a
143 // 'blx ip'; however, the code would not be shorter than the above sequence
144 // and the target address of the call would be referenced by the first
145 // instruction rather than the second one, which would make it harder to patch
146 // (two instructions before the return address, instead of one).
147 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr));
148}
149
150
151void MacroAssembler::Call(byte* target, RelocMode rmode, Condition cond) {
152 ASSERT(!is_code_target(rmode));
153 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
154}
155
156
157void MacroAssembler::Call(Handle<Code> code, RelocMode rmode, Condition cond) {
158 ASSERT(is_code_target(rmode));
159 // 'code' is always generated ARM code, never THUMB code
160 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
161}
162
163
164void MacroAssembler::Ret() {
165#if USE_BX
166 bx(lr);
167#else
168 mov(pc, Operand(lr));
169#endif
170}
171
172
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000173// Will clobber 4 registers: object, offset, scratch, ip. The
174// register 'object' contains a heap object pointer. The heap object
175// tag is shifted away.
176void MacroAssembler::RecordWrite(Register object, Register offset,
177 Register scratch) {
178 // This is how much we shift the remembered set bit offset to get the
179 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
180 // shift right 5) and then multiply by kIntSize (4, shift left 2).
181 const int kRSetWordShift = 3;
182
183 Label fast, done;
184
kasper.lund7276f142008-07-30 08:49:36 +0000185 // First, test that the object is not in the new space. We cannot set
186 // remembered set bits in the new space.
187 // object: heap object pointer (with tag)
188 // offset: offset to store location from the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000189 and_(scratch, object, Operand(Heap::NewSpaceMask()));
190 cmp(scratch, Operand(ExternalReference::new_space_start()));
191 b(eq, &done);
192
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000193 // Compute the bit offset in the remembered set.
kasper.lund7276f142008-07-30 08:49:36 +0000194 // object: heap object pointer (with tag)
195 // offset: offset to store location from the object
196 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
197 and_(scratch, object, Operand(ip)); // offset into page of the object
198 add(offset, scratch, Operand(offset)); // add offset into the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000199 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
200
201 // Compute the page address from the heap object pointer.
kasper.lund7276f142008-07-30 08:49:36 +0000202 // object: heap object pointer (with tag)
203 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000204 bic(object, object, Operand(ip));
205
206 // If the bit offset lies beyond the normal remembered set range, it is in
207 // the extra remembered set area of a large object.
kasper.lund7276f142008-07-30 08:49:36 +0000208 // object: page start
209 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000210 cmp(offset, Operand(Page::kPageSize / kPointerSize));
211 b(lt, &fast);
212
213 // Adjust the bit offset to be relative to the start of the extra
214 // remembered set and the start address to be the address of the extra
215 // remembered set.
216 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
217 // Load the array length into 'scratch' and multiply by four to get the
218 // size in bytes of the elements.
219 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
220 + FixedArray::kLengthOffset));
221 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
222 // Add the page header (including remembered set), array header, and array
223 // body size to the page address.
224 add(object, object, Operand(Page::kObjectStartOffset
225 + Array::kHeaderSize));
226 add(object, object, Operand(scratch));
227
228 bind(&fast);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000229 // Get address of the rset word.
kasper.lund7276f142008-07-30 08:49:36 +0000230 // object: start of the remembered set (page start for the fast case)
231 // offset: bit offset of store position in the remembered set
232 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset
233 add(object, object, Operand(scratch, LSR, kRSetWordShift));
234 // Get bit offset in the rset word.
235 // object: address of remembered set word
236 // offset: bit offset of store position
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000237 and_(offset, offset, Operand(kBitsPerInt - 1));
238
239 ldr(scratch, MemOperand(object));
240 mov(ip, Operand(1));
241 orr(scratch, scratch, Operand(ip, LSL, offset));
242 str(scratch, MemOperand(object));
243
244 bind(&done);
245}
246
247
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000248void MacroAssembler::EnterInternalFrame() {
249 // r0-r3: preserved
250 int type = StackFrame::INTERNAL;
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000251
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000252 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
253 mov(ip, Operand(Smi::FromInt(type)));
254 push(ip);
255 mov(ip, Operand(0));
256 push(ip); // Push an empty code cache slot.
257 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000258}
259
260
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000261void MacroAssembler::ExitInternalFrame() {
262 // r0: preserved
263 // r1: preserved
264 // r2: preserved
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000265
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000266 // Drop the execution stack down to the frame pointer and restore the caller
267 // frame pointer and return address.
268 mov(sp, fp);
269 ldm(ia_w, sp, fp.bit() | lr.bit());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000270}
271
272
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000273void MacroAssembler::InvokePrologue(const ParameterCount& expected,
274 const ParameterCount& actual,
275 Handle<Code> code_constant,
276 Register code_reg,
277 Label* done,
278 InvokeFlag flag) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000279 bool definitely_matches = false;
280 Label regular_invoke;
281
282 // Check whether the expected and actual arguments count match. If not,
283 // setup registers according to contract with ArgumentsAdaptorTrampoline:
284 // r0: actual arguments count
285 // r1: function (passed through to callee)
286 // r2: expected arguments count
287 // r3: callee code entry
288
289 // The code below is made a lot easier because the calling code already sets
290 // up actual and expected registers according to the contract if values are
291 // passed in registers.
292 ASSERT(actual.is_immediate() || actual.reg().is(r0));
293 ASSERT(expected.is_immediate() || expected.reg().is(r2));
294 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
295
296 if (expected.is_immediate()) {
297 ASSERT(actual.is_immediate());
298 if (expected.immediate() == actual.immediate()) {
299 definitely_matches = true;
300 } else {
301 mov(r0, Operand(actual.immediate()));
302 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
303 if (expected.immediate() == sentinel) {
304 // Don't worry about adapting arguments for builtins that
305 // don't want that done. Skip adaption code by making it look
306 // like we have a match between expected and actual number of
307 // arguments.
308 definitely_matches = true;
309 } else {
310 mov(r2, Operand(expected.immediate()));
311 }
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000312 }
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000313 } else {
314 if (actual.is_immediate()) {
315 cmp(expected.reg(), Operand(actual.immediate()));
316 b(eq, &regular_invoke);
317 mov(r0, Operand(actual.immediate()));
318 } else {
319 cmp(expected.reg(), Operand(actual.reg()));
320 b(eq, &regular_invoke);
321 }
322 }
323
324 if (!definitely_matches) {
325 if (!code_constant.is_null()) {
326 mov(r3, Operand(code_constant));
327 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
328 }
329
330 Handle<Code> adaptor =
331 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
332 if (flag == CALL_FUNCTION) {
333 Call(adaptor, code_target);
334 b(done);
335 } else {
336 Jump(adaptor, code_target);
337 }
338 bind(&regular_invoke);
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000339 }
340}
341
342
343void MacroAssembler::InvokeCode(Register code,
344 const ParameterCount& expected,
345 const ParameterCount& actual,
346 InvokeFlag flag) {
347 Label done;
348
349 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
350 if (flag == CALL_FUNCTION) {
351 Call(code);
352 } else {
353 ASSERT(flag == JUMP_FUNCTION);
354 Jump(code);
355 }
356
357 // Continue here if InvokePrologue does handle the invocation due to
358 // mismatched parameter counts.
359 bind(&done);
360}
361
362
363void MacroAssembler::InvokeCode(Handle<Code> code,
364 const ParameterCount& expected,
365 const ParameterCount& actual,
366 RelocMode rmode,
367 InvokeFlag flag) {
368 Label done;
369
370 InvokePrologue(expected, actual, code, no_reg, &done, flag);
371 if (flag == CALL_FUNCTION) {
372 Call(code, rmode);
373 } else {
374 Jump(code, rmode);
375 }
376
377 // Continue here if InvokePrologue does handle the invocation due to
378 // mismatched parameter counts.
379 bind(&done);
380}
381
382
383void MacroAssembler::InvokeFunction(Register fun,
384 const ParameterCount& actual,
385 InvokeFlag flag) {
386 // Contract with called JS functions requires that function is passed in r1.
387 ASSERT(fun.is(r1));
388
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000389 Register expected_reg = r2;
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000390 Register code_reg = r3;
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000391
392 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
393 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
394 ldr(expected_reg,
395 FieldMemOperand(code_reg,
396 SharedFunctionInfo::kFormalParameterCountOffset));
397 ldr(code_reg,
398 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
399 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
400
401 ParameterCount expected(expected_reg);
402 InvokeCode(code_reg, expected, actual, flag);
403}
404
405
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000406void MacroAssembler::SaveRegistersToMemory(RegList regs) {
407 ASSERT((regs & ~kJSCallerSaved) == 0);
408 // Copy the content of registers to memory location.
409 for (int i = 0; i < kNumJSCallerSaved; i++) {
410 int r = JSCallerSavedCode(i);
411 if ((regs & (1 << r)) != 0) {
412 Register reg = { r };
413 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
414 str(reg, MemOperand(ip));
415 }
416 }
417}
418
419
420void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
421 ASSERT((regs & ~kJSCallerSaved) == 0);
422 // Copy the content of memory location to registers.
423 for (int i = kNumJSCallerSaved; --i >= 0;) {
424 int r = JSCallerSavedCode(i);
425 if ((regs & (1 << r)) != 0) {
426 Register reg = { r };
427 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
428 ldr(reg, MemOperand(ip));
429 }
430 }
431}
432
433
434void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
435 RegList regs) {
436 ASSERT((regs & ~kJSCallerSaved) == 0);
437 // Copy the content of the memory location to the stack and adjust base.
438 for (int i = kNumJSCallerSaved; --i >= 0;) {
439 int r = JSCallerSavedCode(i);
440 if ((regs & (1 << r)) != 0) {
441 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
442 ldr(ip, MemOperand(ip));
443 str(ip, MemOperand(base, 4, NegPreIndex));
444 }
445 }
446}
447
448
449void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
450 Register scratch,
451 RegList regs) {
452 ASSERT((regs & ~kJSCallerSaved) == 0);
453 // Copy the content of the stack to the memory location and adjust base.
454 for (int i = 0; i < kNumJSCallerSaved; i++) {
455 int r = JSCallerSavedCode(i);
456 if ((regs & (1 << r)) != 0) {
457 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
458 ldr(scratch, MemOperand(base, 4, PostIndex));
459 str(scratch, MemOperand(ip));
460 }
461 }
462}
463
464
465void MacroAssembler::PushTryHandler(CodeLocation try_location,
466 HandlerType type) {
467 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
468 // The pc (return address) is passed in register lr.
469 if (try_location == IN_JAVASCRIPT) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000470 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
471 if (type == TRY_CATCH_HANDLER) {
472 mov(r3, Operand(StackHandler::TRY_CATCH));
473 } else {
474 mov(r3, Operand(StackHandler::TRY_FINALLY));
475 }
476 push(r3); // state
477 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
478 ldr(r1, MemOperand(r3));
479 push(r1); // next sp
480 str(sp, MemOperand(r3)); // chain handler
mads.s.ager31e71382008-08-13 09:32:07 +0000481 mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
482 push(r0);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000483 } else {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000484 // Must preserve r0-r4, r5-r7 are available.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000485 ASSERT(try_location == IN_JS_ENTRY);
486 // The parameter pointer is meaningless here and fp does not point to a JS
487 // frame. So we save NULL for both pp and fp. We expect the code throwing an
488 // exception to check fp before dereferencing it to restore the context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000489 mov(pp, Operand(0)); // set pp to NULL
490 mov(ip, Operand(0)); // to save a NULL fp
491 stm(db_w, sp, pp.bit() | ip.bit() | lr.bit());
492 mov(r6, Operand(StackHandler::ENTRY));
493 push(r6); // state
494 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
495 ldr(r6, MemOperand(r7));
496 push(r6); // next sp
497 str(sp, MemOperand(r7)); // chain handler
mads.s.ager31e71382008-08-13 09:32:07 +0000498 mov(r5, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000499 push(r5); // flush TOS
500 }
501}
502
503
504Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
505 JSObject* holder, Register holder_reg,
506 Register scratch,
507 Label* miss) {
508 // Make sure there's no overlap between scratch and the other
509 // registers.
510 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
511
512 // Keep track of the current object in register reg.
513 Register reg = object_reg;
514 int depth = 1;
515
516 // Check the maps in the prototype chain.
517 // Traverse the prototype chain from the object and do map checks.
518 while (object != holder) {
519 depth++;
520
521 // Only global objects and objects that do not require access
522 // checks are allowed in stubs.
523 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
524
525 // Get the map of the current object.
526 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
527 cmp(scratch, Operand(Handle<Map>(object->map())));
528
529 // Branch on the result of the map check.
530 b(ne, miss);
531
532 // Check access rights to the global object. This has to happen
533 // after the map check so that we know that the object is
534 // actually a global object.
535 if (object->IsJSGlobalObject()) {
536 CheckAccessGlobal(reg, scratch, miss);
537 // Restore scratch register to be the map of the object. In the
538 // new space case below, we load the prototype from the map in
539 // the scratch register.
540 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
541 }
542
543 reg = holder_reg; // from now the object is in holder_reg
544 JSObject* prototype = JSObject::cast(object->GetPrototype());
545 if (Heap::InNewSpace(prototype)) {
546 // The prototype is in new space; we cannot store a reference
547 // to it in the code. Load it from the map.
548 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
549 } else {
550 // The prototype is in old space; load it directly.
551 mov(reg, Operand(Handle<JSObject>(prototype)));
552 }
553
554 // Go to the next object in the prototype chain.
555 object = prototype;
556 }
557
558 // Check the holder map.
559 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
560 cmp(scratch, Operand(Handle<Map>(object->map())));
561 b(ne, miss);
562
563 // Log the check depth.
564 LOG(IntEvent("check-maps-depth", depth));
565
566 // Perform security check for access to the global object and return
567 // the holder register.
568 ASSERT(object == holder);
569 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
570 if (object->IsJSGlobalObject()) {
571 CheckAccessGlobal(reg, scratch, miss);
572 }
573 return reg;
574}
575
576
577void MacroAssembler::CheckAccessGlobal(Register holder_reg,
578 Register scratch,
579 Label* miss) {
580 ASSERT(!holder_reg.is(scratch));
581
582 // Load the security context.
583 mov(scratch, Operand(Top::security_context_address()));
584 ldr(scratch, MemOperand(scratch));
585 // In debug mode, make sure the security context is set.
586 if (kDebug) {
587 cmp(scratch, Operand(0));
588 Check(ne, "we should not have an empty security context");
589 }
590
591 // Load the global object of the security context.
592 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
593 ldr(scratch, FieldMemOperand(scratch, offset));
594 // Check that the security token in the calling global object is
595 // compatible with the security token in the receiving global
596 // object.
597 ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
598 ldr(ip, FieldMemOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
599 cmp(scratch, Operand(ip));
600 b(ne, miss);
601}
602
603
604void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000605 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000606 Call(stub->GetCode(), code_target);
607}
608
609
610void MacroAssembler::CallJSExitStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000611 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000612 Call(stub->GetCode(), exit_js_frame);
613}
614
615
616void MacroAssembler::StubReturn(int argc) {
617 ASSERT(argc >= 1 && generating_stub());
618 if (argc > 1)
619 add(sp, sp, Operand((argc - 1) * kPointerSize));
620 Ret();
621}
622
mads.s.ager31e71382008-08-13 09:32:07 +0000623
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000624void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000625 // All parameters are on the stack. r0 has the return value after call.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000626
mads.s.ager31e71382008-08-13 09:32:07 +0000627 // Either the expected number of arguments is unknown, or the actual
628 // number of arguments match the expectation.
629 ASSERT(f->nargs < 0 || f->nargs == num_arguments);
kasper.lund7276f142008-07-30 08:49:36 +0000630
mads.s.ager31e71382008-08-13 09:32:07 +0000631 Runtime::FunctionId function_id =
632 static_cast<Runtime::FunctionId>(f->stub_id);
633 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000634 CallStub(&stub);
635}
636
637
638void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
639 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
640}
641
642
mads.s.ager31e71382008-08-13 09:32:07 +0000643void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
644 int num_arguments) {
645 // TODO(1236192): Most runtime routines don't need the number of
646 // arguments passed in because it is constant. At some point we
647 // should remove this need and make the runtime routine entry code
648 // smarter.
649 mov(r0, Operand(num_arguments));
650 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000651}
652
653
654void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
655#if defined(__thumb__)
656 // Thumb mode builtin.
657 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
658#endif
659 mov(r1, Operand(builtin));
660 CEntryStub stub;
661 Jump(stub.GetCode(), code_target);
662}
663
664
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000665Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
666 bool* resolved) {
667 // Contract with compiled functions is that the function is passed in r1.
668 int builtins_offset =
669 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
670 ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
671 ldr(r1, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset));
672 ldr(r1, FieldMemOperand(r1, builtins_offset));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000673
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000674 return Builtins::GetCode(id, resolved);
675}
676
677
678void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
679 InvokeJSFlags flags) {
680 bool resolved;
681 Handle<Code> code = ResolveBuiltin(id, &resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000682
683 if (flags == CALL_JS) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000684 Call(code, code_target);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000685 } else {
686 ASSERT(flags == JUMP_JS);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000687 Jump(code, code_target);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000688 }
689
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000690 if (!resolved) {
691 const char* name = Builtins::GetName(id);
692 int argc = Builtins::GetArgumentsCount(id);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000693 uint32_t flags =
694 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000695 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
696 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
697 unresolved_.Add(entry);
698 }
699}
700
701
702void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
703 bool resolved;
704 Handle<Code> code = ResolveBuiltin(id, &resolved);
705
706 mov(target, Operand(code));
707 if (!resolved) {
708 const char* name = Builtins::GetName(id);
709 int argc = Builtins::GetArgumentsCount(id);
710 uint32_t flags =
711 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
712 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000713 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
714 unresolved_.Add(entry);
715 }
716}
717
718
719void MacroAssembler::Assert(Condition cc, const char* msg) {
720 if (FLAG_debug_code)
721 Check(cc, msg);
722}
723
724
725void MacroAssembler::Check(Condition cc, const char* msg) {
726 Label L;
727 b(cc, &L);
728 Abort(msg);
729 // will not return here
730 bind(&L);
731}
732
733
734void MacroAssembler::Abort(const char* msg) {
735 // We want to pass the msg string like a smi to avoid GC
736 // problems, however msg is not guaranteed to be aligned
737 // properly. Instead, we pass an aligned pointer that is
738 // a proper v8 smi, but also pass the aligment difference
739 // from the real pointer as a smi.
740 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
741 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
742 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
743#ifdef DEBUG
744 if (msg != NULL) {
745 RecordComment("Abort message: ");
746 RecordComment(msg);
747 }
748#endif
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000749 mov(r0, Operand(p0));
750 push(r0);
751 mov(r0, Operand(Smi::FromInt(p1 - p0)));
mads.s.ager31e71382008-08-13 09:32:07 +0000752 push(r0);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000753 CallRuntime(Runtime::kAbort, 2);
754 // will not return here
755}
756
757} } // namespace v8::internal