blob: 4e24063c94b8236101f7a5fbcc736b40101b352d [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34
kasperl@chromium.org71affb52009-05-26 05:44:31 +000035namespace v8 {
36namespace internal {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000037
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038// Give alias names to registers
39Register cp = { 8 }; // JavaScript context pointer
40Register pp = { 10 }; // parameter pointer
41
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
44 : Assembler(buffer, size),
45 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000046 generating_stub_(false),
kasperl@chromium.org061ef742009-02-27 12:16:20 +000047 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000049}
50
51
52// We always generate arm code, never thumb code, even if V8 is compiled to
53// thumb, so we require inter-working support
54#if defined(__thumb__) && !defined(__THUMB_INTERWORK__)
55#error "flag -mthumb-interwork missing"
56#endif
57
58
59// We do not support thumb inter-working with an arm architecture not supporting
60// the blx instruction (below v5t)
61#if defined(__THUMB_INTERWORK__)
kasperl@chromium.org71affb52009-05-26 05:44:31 +000062#if !defined(__ARM_ARCH_5T__) && \
63 !defined(__ARM_ARCH_5TE__) && \
64 !defined(__ARM_ARCH_7A__) && \
65 !defined(__ARM_ARCH_7__)
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000066// add tests for other versions above v5t as required
67#error "for thumb inter-working we require architecture v5t or above"
68#endif
69#endif
70
71
72// Using blx may yield better code, so use it when required or when available
73#if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__)
74#define USE_BLX 1
75#endif
76
77// Using bx does not yield better code, so use it only when required
78#if defined(__THUMB_INTERWORK__)
79#define USE_BX 1
80#endif
81
82
83void MacroAssembler::Jump(Register target, Condition cond) {
84#if USE_BX
85 bx(target, cond);
86#else
87 mov(pc, Operand(target), LeaveCC, cond);
88#endif
89}
90
91
ager@chromium.org236ad962008-09-25 09:45:57 +000092void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
93 Condition cond) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000094#if USE_BX
95 mov(ip, Operand(target, rmode), LeaveCC, cond);
96 bx(ip, cond);
97#else
98 mov(pc, Operand(target, rmode), LeaveCC, cond);
99#endif
100}
101
102
ager@chromium.org236ad962008-09-25 09:45:57 +0000103void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
104 Condition cond) {
105 ASSERT(!RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000106 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
107}
108
109
ager@chromium.org236ad962008-09-25 09:45:57 +0000110void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
111 Condition cond) {
112 ASSERT(RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000113 // 'code' is always generated ARM code, never THUMB code
114 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
115}
116
117
118void MacroAssembler::Call(Register target, Condition cond) {
119#if USE_BLX
120 blx(target, cond);
121#else
122 // set lr for return at current pc + 8
123 mov(lr, Operand(pc), LeaveCC, cond);
124 mov(pc, Operand(target), LeaveCC, cond);
125#endif
126}
127
128
ager@chromium.org236ad962008-09-25 09:45:57 +0000129void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
130 Condition cond) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000131#if !defined(__arm__)
ager@chromium.org236ad962008-09-25 09:45:57 +0000132 if (rmode == RelocInfo::RUNTIME_ENTRY) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000133 mov(r2, Operand(target, rmode), LeaveCC, cond);
134 // Set lr for return at current pc + 8.
135 mov(lr, Operand(pc), LeaveCC, cond);
136 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
137 // Notify the simulator of the transition to C code.
138 swi(assembler::arm::call_rt_r2);
139 } else {
140 // set lr for return at current pc + 8
141 mov(lr, Operand(pc), LeaveCC, cond);
142 // emit a ldr<cond> pc, [pc + offset of target in constant pool]
143 mov(pc, Operand(target, rmode), LeaveCC, cond);
144 }
145#else
146 // Set lr for return at current pc + 8.
147 mov(lr, Operand(pc), LeaveCC, cond);
148 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
149 mov(pc, Operand(target, rmode), LeaveCC, cond);
150#endif // !defined(__arm__)
151 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a
152 // 'blx ip'; however, the code would not be shorter than the above sequence
153 // and the target address of the call would be referenced by the first
154 // instruction rather than the second one, which would make it harder to patch
155 // (two instructions before the return address, instead of one).
156 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr));
157}
158
159
ager@chromium.org236ad962008-09-25 09:45:57 +0000160void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
161 Condition cond) {
162 ASSERT(!RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000163 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
164}
165
166
ager@chromium.org236ad962008-09-25 09:45:57 +0000167void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
168 Condition cond) {
169 ASSERT(RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000170 // 'code' is always generated ARM code, never THUMB code
171 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
172}
173
174
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000175void MacroAssembler::Ret(Condition cond) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000176#if USE_BX
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000177 bx(lr, cond);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000178#else
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000179 mov(pc, Operand(lr), LeaveCC, cond);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000180#endif
181}
182
183
ager@chromium.org8bb60582008-12-11 12:02:20 +0000184void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
185 // Empty the const pool.
186 CheckConstPool(true, true);
187 add(pc, pc, Operand(index,
188 LSL,
189 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize));
190 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * sizeof(Instr));
191 nop(); // Jump table alignment.
192 for (int i = 0; i < targets.length(); i++) {
193 b(targets[i]);
194 }
195}
196
197
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000198// Will clobber 4 registers: object, offset, scratch, ip. The
199// register 'object' contains a heap object pointer. The heap object
200// tag is shifted away.
201void MacroAssembler::RecordWrite(Register object, Register offset,
202 Register scratch) {
203 // This is how much we shift the remembered set bit offset to get the
204 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
205 // shift right 5) and then multiply by kIntSize (4, shift left 2).
206 const int kRSetWordShift = 3;
207
208 Label fast, done;
209
kasper.lund7276f142008-07-30 08:49:36 +0000210 // First, test that the object is not in the new space. We cannot set
211 // remembered set bits in the new space.
212 // object: heap object pointer (with tag)
213 // offset: offset to store location from the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000214 and_(scratch, object, Operand(Heap::NewSpaceMask()));
215 cmp(scratch, Operand(ExternalReference::new_space_start()));
216 b(eq, &done);
217
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000218 // Compute the bit offset in the remembered set.
kasper.lund7276f142008-07-30 08:49:36 +0000219 // object: heap object pointer (with tag)
220 // offset: offset to store location from the object
221 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
222 and_(scratch, object, Operand(ip)); // offset into page of the object
223 add(offset, scratch, Operand(offset)); // add offset into the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000224 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
225
226 // Compute the page address from the heap object pointer.
kasper.lund7276f142008-07-30 08:49:36 +0000227 // object: heap object pointer (with tag)
228 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000229 bic(object, object, Operand(ip));
230
231 // If the bit offset lies beyond the normal remembered set range, it is in
232 // the extra remembered set area of a large object.
kasper.lund7276f142008-07-30 08:49:36 +0000233 // object: page start
234 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000235 cmp(offset, Operand(Page::kPageSize / kPointerSize));
236 b(lt, &fast);
237
238 // Adjust the bit offset to be relative to the start of the extra
239 // remembered set and the start address to be the address of the extra
240 // remembered set.
241 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
242 // Load the array length into 'scratch' and multiply by four to get the
243 // size in bytes of the elements.
244 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
245 + FixedArray::kLengthOffset));
246 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
247 // Add the page header (including remembered set), array header, and array
248 // body size to the page address.
249 add(object, object, Operand(Page::kObjectStartOffset
250 + Array::kHeaderSize));
251 add(object, object, Operand(scratch));
252
253 bind(&fast);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000254 // Get address of the rset word.
kasper.lund7276f142008-07-30 08:49:36 +0000255 // object: start of the remembered set (page start for the fast case)
256 // offset: bit offset of store position in the remembered set
257 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset
258 add(object, object, Operand(scratch, LSR, kRSetWordShift));
259 // Get bit offset in the rset word.
260 // object: address of remembered set word
261 // offset: bit offset of store position
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000262 and_(offset, offset, Operand(kBitsPerInt - 1));
263
264 ldr(scratch, MemOperand(object));
265 mov(ip, Operand(1));
266 orr(scratch, scratch, Operand(ip, LSL, offset));
267 str(scratch, MemOperand(object));
268
269 bind(&done);
270}
271
272
ager@chromium.org7c537e22008-10-16 08:43:32 +0000273void MacroAssembler::EnterFrame(StackFrame::Type type) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000274 // r0-r3: preserved
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000275 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
276 mov(ip, Operand(Smi::FromInt(type)));
277 push(ip);
kasperl@chromium.org061ef742009-02-27 12:16:20 +0000278 mov(ip, Operand(CodeObject()));
279 push(ip);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000280 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000281}
282
283
ager@chromium.org7c537e22008-10-16 08:43:32 +0000284void MacroAssembler::LeaveFrame(StackFrame::Type type) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000285 // r0: preserved
286 // r1: preserved
287 // r2: preserved
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000288
ager@chromium.org7c537e22008-10-16 08:43:32 +0000289 // Drop the execution stack down to the frame pointer and restore
290 // the caller frame pointer and return address.
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000291 mov(sp, fp);
292 ldm(ia_w, sp, fp.bit() | lr.bit());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000293}
294
295
ager@chromium.org236ad962008-09-25 09:45:57 +0000296void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
297 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
kasperl@chromium.orgb3284ad2009-05-18 06:12:45 +0000298
299 // Compute the argv pointer and keep it in a callee-saved register.
300 // r0 is argc.
301 add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
302 sub(r6, r6, Operand(kPointerSize));
303
ager@chromium.org236ad962008-09-25 09:45:57 +0000304 // Compute parameter pointer before making changes and save it as ip
305 // register so that it is restored as sp register on exit, thereby
306 // popping the args.
307
308 // ip = sp + kPointerSize * #args;
309 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
310
kasperl@chromium.orgb3284ad2009-05-18 06:12:45 +0000311 // Align the stack at this point. After this point we have 5 pushes,
312 // so in fact we have to unalign here! See also the assert on the
313 // alignment immediately below.
314 if (OS::ActivationFrameAlignment() != kPointerSize) {
315 // This code needs to be made more general if this assert doesn't hold.
316 ASSERT(OS::ActivationFrameAlignment() == 2 * kPointerSize);
317 mov(r7, Operand(Smi::FromInt(0)));
318 tst(sp, Operand(OS::ActivationFrameAlignment() - 1));
319 push(r7, eq); // Conditional push instruction.
320 }
321
ager@chromium.org236ad962008-09-25 09:45:57 +0000322 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
323 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
324 mov(fp, Operand(sp)); // setup new frame pointer
325
326 // Push debug marker.
327 mov(ip, Operand(type == StackFrame::EXIT_DEBUG ? 1 : 0));
328 push(ip);
329
330 // Save the frame pointer and the context in top.
331 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
332 str(fp, MemOperand(ip));
333 mov(ip, Operand(ExternalReference(Top::k_context_address)));
334 str(cp, MemOperand(ip));
335
336 // Setup argc and the builtin function in callee-saved registers.
337 mov(r4, Operand(r0));
338 mov(r5, Operand(r1));
339
ager@chromium.org236ad962008-09-25 09:45:57 +0000340
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000341#ifdef ENABLE_DEBUGGER_SUPPORT
ager@chromium.org236ad962008-09-25 09:45:57 +0000342 // Save the state of all registers to the stack from the memory
343 // location. This is needed to allow nested break points.
344 if (type == StackFrame::EXIT_DEBUG) {
345 // Use sp as base to push.
346 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
347 }
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000348#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000349}
350
351
352void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000353#ifdef ENABLE_DEBUGGER_SUPPORT
ager@chromium.org236ad962008-09-25 09:45:57 +0000354 // Restore the memory copy of the registers by digging them out from
355 // the stack. This is needed to allow nested break points.
356 if (type == StackFrame::EXIT_DEBUG) {
357 // This code intentionally clobbers r2 and r3.
358 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
359 const int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
360 add(r3, fp, Operand(kOffset));
361 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
362 }
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000363#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000364
365 // Clear top frame.
366 mov(r3, Operand(0));
367 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
368 str(r3, MemOperand(ip));
369
370 // Restore current context from top and clear it in debug mode.
371 mov(ip, Operand(ExternalReference(Top::k_context_address)));
372 ldr(cp, MemOperand(ip));
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000373#ifdef DEBUG
374 str(r3, MemOperand(ip));
375#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000376
377 // Pop the arguments, restore registers, and return.
378 mov(sp, Operand(fp)); // respect ABI stack constraint
379 ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
380}
381
382
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000383void MacroAssembler::InvokePrologue(const ParameterCount& expected,
384 const ParameterCount& actual,
385 Handle<Code> code_constant,
386 Register code_reg,
387 Label* done,
388 InvokeFlag flag) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000389 bool definitely_matches = false;
390 Label regular_invoke;
391
392 // Check whether the expected and actual arguments count match. If not,
393 // setup registers according to contract with ArgumentsAdaptorTrampoline:
394 // r0: actual arguments count
395 // r1: function (passed through to callee)
396 // r2: expected arguments count
397 // r3: callee code entry
398
399 // The code below is made a lot easier because the calling code already sets
400 // up actual and expected registers according to the contract if values are
401 // passed in registers.
402 ASSERT(actual.is_immediate() || actual.reg().is(r0));
403 ASSERT(expected.is_immediate() || expected.reg().is(r2));
404 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
405
406 if (expected.is_immediate()) {
407 ASSERT(actual.is_immediate());
408 if (expected.immediate() == actual.immediate()) {
409 definitely_matches = true;
410 } else {
411 mov(r0, Operand(actual.immediate()));
412 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
413 if (expected.immediate() == sentinel) {
414 // Don't worry about adapting arguments for builtins that
415 // don't want that done. Skip adaption code by making it look
416 // like we have a match between expected and actual number of
417 // arguments.
418 definitely_matches = true;
419 } else {
420 mov(r2, Operand(expected.immediate()));
421 }
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000422 }
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000423 } else {
424 if (actual.is_immediate()) {
425 cmp(expected.reg(), Operand(actual.immediate()));
426 b(eq, &regular_invoke);
427 mov(r0, Operand(actual.immediate()));
428 } else {
429 cmp(expected.reg(), Operand(actual.reg()));
430 b(eq, &regular_invoke);
431 }
432 }
433
434 if (!definitely_matches) {
435 if (!code_constant.is_null()) {
436 mov(r3, Operand(code_constant));
437 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
438 }
439
440 Handle<Code> adaptor =
441 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
442 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000443 Call(adaptor, RelocInfo::CODE_TARGET);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000444 b(done);
445 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000446 Jump(adaptor, RelocInfo::CODE_TARGET);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000447 }
448 bind(&regular_invoke);
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000449 }
450}
451
452
453void MacroAssembler::InvokeCode(Register code,
454 const ParameterCount& expected,
455 const ParameterCount& actual,
456 InvokeFlag flag) {
457 Label done;
458
459 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
460 if (flag == CALL_FUNCTION) {
461 Call(code);
462 } else {
463 ASSERT(flag == JUMP_FUNCTION);
464 Jump(code);
465 }
466
467 // Continue here if InvokePrologue does handle the invocation due to
468 // mismatched parameter counts.
469 bind(&done);
470}
471
472
473void MacroAssembler::InvokeCode(Handle<Code> code,
474 const ParameterCount& expected,
475 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000476 RelocInfo::Mode rmode,
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000477 InvokeFlag flag) {
478 Label done;
479
480 InvokePrologue(expected, actual, code, no_reg, &done, flag);
481 if (flag == CALL_FUNCTION) {
482 Call(code, rmode);
483 } else {
484 Jump(code, rmode);
485 }
486
487 // Continue here if InvokePrologue does handle the invocation due to
488 // mismatched parameter counts.
489 bind(&done);
490}
491
492
493void MacroAssembler::InvokeFunction(Register fun,
494 const ParameterCount& actual,
495 InvokeFlag flag) {
496 // Contract with called JS functions requires that function is passed in r1.
497 ASSERT(fun.is(r1));
498
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000499 Register expected_reg = r2;
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000500 Register code_reg = r3;
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000501
502 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
503 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
504 ldr(expected_reg,
505 FieldMemOperand(code_reg,
506 SharedFunctionInfo::kFormalParameterCountOffset));
507 ldr(code_reg,
508 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
509 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
510
511 ParameterCount expected(expected_reg);
512 InvokeCode(code_reg, expected, actual, flag);
513}
514
515
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000516#ifdef ENABLE_DEBUGGER_SUPPORT
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000517void MacroAssembler::SaveRegistersToMemory(RegList regs) {
518 ASSERT((regs & ~kJSCallerSaved) == 0);
519 // Copy the content of registers to memory location.
520 for (int i = 0; i < kNumJSCallerSaved; i++) {
521 int r = JSCallerSavedCode(i);
522 if ((regs & (1 << r)) != 0) {
523 Register reg = { r };
524 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
525 str(reg, MemOperand(ip));
526 }
527 }
528}
529
530
531void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
532 ASSERT((regs & ~kJSCallerSaved) == 0);
533 // Copy the content of memory location to registers.
534 for (int i = kNumJSCallerSaved; --i >= 0;) {
535 int r = JSCallerSavedCode(i);
536 if ((regs & (1 << r)) != 0) {
537 Register reg = { r };
538 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
539 ldr(reg, MemOperand(ip));
540 }
541 }
542}
543
544
545void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
546 RegList regs) {
547 ASSERT((regs & ~kJSCallerSaved) == 0);
548 // Copy the content of the memory location to the stack and adjust base.
549 for (int i = kNumJSCallerSaved; --i >= 0;) {
550 int r = JSCallerSavedCode(i);
551 if ((regs & (1 << r)) != 0) {
552 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
553 ldr(ip, MemOperand(ip));
554 str(ip, MemOperand(base, 4, NegPreIndex));
555 }
556 }
557}
558
559
560void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
561 Register scratch,
562 RegList regs) {
563 ASSERT((regs & ~kJSCallerSaved) == 0);
564 // Copy the content of the stack to the memory location and adjust base.
565 for (int i = 0; i < kNumJSCallerSaved; i++) {
566 int r = JSCallerSavedCode(i);
567 if ((regs & (1 << r)) != 0) {
568 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
569 ldr(scratch, MemOperand(base, 4, PostIndex));
570 str(scratch, MemOperand(ip));
571 }
572 }
573}
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000574#endif
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000575
576void MacroAssembler::PushTryHandler(CodeLocation try_location,
577 HandlerType type) {
578 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
579 // The pc (return address) is passed in register lr.
580 if (try_location == IN_JAVASCRIPT) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000581 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
582 if (type == TRY_CATCH_HANDLER) {
583 mov(r3, Operand(StackHandler::TRY_CATCH));
584 } else {
585 mov(r3, Operand(StackHandler::TRY_FINALLY));
586 }
587 push(r3); // state
588 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
589 ldr(r1, MemOperand(r3));
590 push(r1); // next sp
591 str(sp, MemOperand(r3)); // chain handler
mads.s.ager31e71382008-08-13 09:32:07 +0000592 mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
593 push(r0);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000594 } else {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000595 // Must preserve r0-r4, r5-r7 are available.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000596 ASSERT(try_location == IN_JS_ENTRY);
597 // The parameter pointer is meaningless here and fp does not point to a JS
598 // frame. So we save NULL for both pp and fp. We expect the code throwing an
599 // exception to check fp before dereferencing it to restore the context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000600 mov(pp, Operand(0)); // set pp to NULL
601 mov(ip, Operand(0)); // to save a NULL fp
602 stm(db_w, sp, pp.bit() | ip.bit() | lr.bit());
603 mov(r6, Operand(StackHandler::ENTRY));
604 push(r6); // state
605 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
606 ldr(r6, MemOperand(r7));
607 push(r6); // next sp
608 str(sp, MemOperand(r7)); // chain handler
mads.s.ager31e71382008-08-13 09:32:07 +0000609 mov(r5, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000610 push(r5); // flush TOS
611 }
612}
613
614
615Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
616 JSObject* holder, Register holder_reg,
617 Register scratch,
618 Label* miss) {
619 // Make sure there's no overlap between scratch and the other
620 // registers.
621 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
622
623 // Keep track of the current object in register reg.
624 Register reg = object_reg;
625 int depth = 1;
626
627 // Check the maps in the prototype chain.
628 // Traverse the prototype chain from the object and do map checks.
629 while (object != holder) {
630 depth++;
631
632 // Only global objects and objects that do not require access
633 // checks are allowed in stubs.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000634 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000635
636 // Get the map of the current object.
637 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
638 cmp(scratch, Operand(Handle<Map>(object->map())));
639
640 // Branch on the result of the map check.
641 b(ne, miss);
642
643 // Check access rights to the global object. This has to happen
644 // after the map check so that we know that the object is
645 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000646 if (object->IsJSGlobalProxy()) {
647 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000648 // Restore scratch register to be the map of the object. In the
649 // new space case below, we load the prototype from the map in
650 // the scratch register.
651 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
652 }
653
654 reg = holder_reg; // from now the object is in holder_reg
655 JSObject* prototype = JSObject::cast(object->GetPrototype());
656 if (Heap::InNewSpace(prototype)) {
657 // The prototype is in new space; we cannot store a reference
658 // to it in the code. Load it from the map.
659 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
660 } else {
661 // The prototype is in old space; load it directly.
662 mov(reg, Operand(Handle<JSObject>(prototype)));
663 }
664
665 // Go to the next object in the prototype chain.
666 object = prototype;
667 }
668
669 // Check the holder map.
670 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
671 cmp(scratch, Operand(Handle<Map>(object->map())));
672 b(ne, miss);
673
674 // Log the check depth.
675 LOG(IntEvent("check-maps-depth", depth));
676
677 // Perform security check for access to the global object and return
678 // the holder register.
679 ASSERT(object == holder);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000680 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
681 if (object->IsJSGlobalProxy()) {
682 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000683 }
684 return reg;
685}
686
687
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000688void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
689 Register scratch,
690 Label* miss) {
691 Label same_contexts;
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000692
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000693 ASSERT(!holder_reg.is(scratch));
694 ASSERT(!holder_reg.is(ip));
695 ASSERT(!scratch.is(ip));
696
697 // Load current lexical context from the stack frame.
698 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
699 // In debug mode, make sure the lexical context is set.
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000700#ifdef DEBUG
701 cmp(scratch, Operand(0));
702 Check(ne, "we should not have an empty lexical context");
703#endif
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000704
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000705 // Load the global context of the current context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000706 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
707 ldr(scratch, FieldMemOperand(scratch, offset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000708 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
709
710 // Check the context is a global context.
711 if (FLAG_debug_code) {
712 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
713 // Cannot use ip as a temporary in this verification code. Due to the fact
714 // that ip is clobbered as part of cmp with an object Operand.
715 push(holder_reg); // Temporarily save holder on the stack.
716 // Read the first word and compare to the global_context_map.
717 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
718 cmp(holder_reg, Operand(Factory::global_context_map()));
719 Check(eq, "JSGlobalObject::global_context should be a global context.");
720 pop(holder_reg); // Restore holder.
721 }
722
723 // Check if both contexts are the same.
724 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
725 cmp(scratch, Operand(ip));
726 b(eq, &same_contexts);
727
728 // Check the context is a global context.
729 if (FLAG_debug_code) {
730 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
731 // Cannot use ip as a temporary in this verification code. Due to the fact
732 // that ip is clobbered as part of cmp with an object Operand.
733 push(holder_reg); // Temporarily save holder on the stack.
734 mov(holder_reg, ip); // Move ip to its holding place.
735 cmp(holder_reg, Operand(Factory::null_value()));
736 Check(ne, "JSGlobalProxy::context() should not be null.");
737
738 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
739 cmp(holder_reg, Operand(Factory::global_context_map()));
740 Check(eq, "JSGlobalObject::global_context should be a global context.");
741 // Restore ip is not needed. ip is reloaded below.
742 pop(holder_reg); // Restore holder.
743 // Restore ip to holder's context.
744 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
745 }
746
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000747 // Check that the security token in the calling global object is
748 // compatible with the security token in the receiving global
749 // object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000750 int token_offset = Context::kHeaderSize +
751 Context::SECURITY_TOKEN_INDEX * kPointerSize;
752
753 ldr(scratch, FieldMemOperand(scratch, token_offset));
754 ldr(ip, FieldMemOperand(ip, token_offset));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000755 cmp(scratch, Operand(ip));
756 b(ne, miss);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000757
758 bind(&same_contexts);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000759}
760
761
762void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000763 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000764 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000765}
766
767
768void MacroAssembler::StubReturn(int argc) {
769 ASSERT(argc >= 1 && generating_stub());
770 if (argc > 1)
771 add(sp, sp, Operand((argc - 1) * kPointerSize));
772 Ret();
773}
774
mads.s.ager31e71382008-08-13 09:32:07 +0000775
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000776void MacroAssembler::IllegalOperation(int num_arguments) {
777 if (num_arguments > 0) {
778 add(sp, sp, Operand(num_arguments * kPointerSize));
779 }
780 mov(r0, Operand(Factory::undefined_value()));
781}
782
783
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000784void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000785 // All parameters are on the stack. r0 has the return value after call.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000786
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000787 // If the expected number of arguments of the runtime function is
788 // constant, we check that the actual number of arguments match the
789 // expectation.
790 if (f->nargs >= 0 && f->nargs != num_arguments) {
791 IllegalOperation(num_arguments);
792 return;
793 }
kasper.lund7276f142008-07-30 08:49:36 +0000794
mads.s.ager31e71382008-08-13 09:32:07 +0000795 Runtime::FunctionId function_id =
796 static_cast<Runtime::FunctionId>(f->stub_id);
797 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000798 CallStub(&stub);
799}
800
801
802void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
803 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
804}
805
806
mads.s.ager31e71382008-08-13 09:32:07 +0000807void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
808 int num_arguments) {
809 // TODO(1236192): Most runtime routines don't need the number of
810 // arguments passed in because it is constant. At some point we
811 // should remove this need and make the runtime routine entry code
812 // smarter.
813 mov(r0, Operand(num_arguments));
814 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000815}
816
817
818void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
819#if defined(__thumb__)
820 // Thumb mode builtin.
821 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
822#endif
823 mov(r1, Operand(builtin));
824 CEntryStub stub;
ager@chromium.org236ad962008-09-25 09:45:57 +0000825 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000826}
827
828
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000829Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
830 bool* resolved) {
831 // Contract with compiled functions is that the function is passed in r1.
832 int builtins_offset =
833 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
834 ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
835 ldr(r1, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset));
836 ldr(r1, FieldMemOperand(r1, builtins_offset));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000837
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000838 return Builtins::GetCode(id, resolved);
839}
840
841
842void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
843 InvokeJSFlags flags) {
844 bool resolved;
845 Handle<Code> code = ResolveBuiltin(id, &resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000846
847 if (flags == CALL_JS) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000848 Call(code, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000849 } else {
850 ASSERT(flags == JUMP_JS);
ager@chromium.org236ad962008-09-25 09:45:57 +0000851 Jump(code, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000852 }
853
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000854 if (!resolved) {
855 const char* name = Builtins::GetName(id);
856 int argc = Builtins::GetArgumentsCount(id);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000857 uint32_t flags =
858 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000859 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
860 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000861 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
862 unresolved_.Add(entry);
863 }
864}
865
866
867void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
868 bool resolved;
869 Handle<Code> code = ResolveBuiltin(id, &resolved);
870
871 mov(target, Operand(code));
872 if (!resolved) {
873 const char* name = Builtins::GetName(id);
874 int argc = Builtins::GetArgumentsCount(id);
875 uint32_t flags =
876 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000877 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
878 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000879 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
880 unresolved_.Add(entry);
881 }
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000882
883 add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000884}
885
886
ager@chromium.orga74f0da2008-12-03 16:05:52 +0000887void MacroAssembler::SetCounter(StatsCounter* counter, int value,
888 Register scratch1, Register scratch2) {
889 if (FLAG_native_code_counters && counter->Enabled()) {
890 mov(scratch1, Operand(value));
891 mov(scratch2, Operand(ExternalReference(counter)));
892 str(scratch1, MemOperand(scratch2));
893 }
894}
895
896
897void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
898 Register scratch1, Register scratch2) {
899 ASSERT(value > 0);
900 if (FLAG_native_code_counters && counter->Enabled()) {
901 mov(scratch2, Operand(ExternalReference(counter)));
902 ldr(scratch1, MemOperand(scratch2));
903 add(scratch1, scratch1, Operand(value));
904 str(scratch1, MemOperand(scratch2));
905 }
906}
907
908
909void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
910 Register scratch1, Register scratch2) {
911 ASSERT(value > 0);
912 if (FLAG_native_code_counters && counter->Enabled()) {
913 mov(scratch2, Operand(ExternalReference(counter)));
914 ldr(scratch1, MemOperand(scratch2));
915 sub(scratch1, scratch1, Operand(value));
916 str(scratch1, MemOperand(scratch2));
917 }
918}
919
920
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000921void MacroAssembler::Assert(Condition cc, const char* msg) {
922 if (FLAG_debug_code)
923 Check(cc, msg);
924}
925
926
927void MacroAssembler::Check(Condition cc, const char* msg) {
928 Label L;
929 b(cc, &L);
930 Abort(msg);
931 // will not return here
932 bind(&L);
933}
934
935
936void MacroAssembler::Abort(const char* msg) {
937 // We want to pass the msg string like a smi to avoid GC
938 // problems, however msg is not guaranteed to be aligned
939 // properly. Instead, we pass an aligned pointer that is
ager@chromium.org32912102009-01-16 10:38:43 +0000940 // a proper v8 smi, but also pass the alignment difference
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000941 // from the real pointer as a smi.
942 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
943 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
944 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
945#ifdef DEBUG
946 if (msg != NULL) {
947 RecordComment("Abort message: ");
948 RecordComment(msg);
949 }
950#endif
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000951 mov(r0, Operand(p0));
952 push(r0);
953 mov(r0, Operand(Smi::FromInt(p1 - p0)));
mads.s.ager31e71382008-08-13 09:32:07 +0000954 push(r0);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000955 CallRuntime(Runtime::kAbort, 2);
956 // will not return here
957}
958
959} } // namespace v8::internal