blob: 88a300b11bf9f3381199a637c54b1b74d3da0cc9 [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34
35namespace v8 { namespace internal {
36
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000037// Give alias names to registers
38Register cp = { 8 }; // JavaScript context pointer
39Register pp = { 10 }; // parameter pointer
40
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000045 generating_stub_(false),
kasperl@chromium.org061ef742009-02-27 12:16:20 +000046 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000048}
49
50
51// We always generate arm code, never thumb code, even if V8 is compiled to
52// thumb, so we require inter-working support
53#if defined(__thumb__) && !defined(__THUMB_INTERWORK__)
54#error "flag -mthumb-interwork missing"
55#endif
56
57
58// We do not support thumb inter-working with an arm architecture not supporting
59// the blx instruction (below v5t)
60#if defined(__THUMB_INTERWORK__)
61#if !defined(__ARM_ARCH_5T__) && !defined(__ARM_ARCH_5TE__)
62// add tests for other versions above v5t as required
63#error "for thumb inter-working we require architecture v5t or above"
64#endif
65#endif
66
67
68// Using blx may yield better code, so use it when required or when available
69#if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__)
70#define USE_BLX 1
71#endif
72
73// Using bx does not yield better code, so use it only when required
74#if defined(__THUMB_INTERWORK__)
75#define USE_BX 1
76#endif
77
78
79void MacroAssembler::Jump(Register target, Condition cond) {
80#if USE_BX
81 bx(target, cond);
82#else
83 mov(pc, Operand(target), LeaveCC, cond);
84#endif
85}
86
87
ager@chromium.org236ad962008-09-25 09:45:57 +000088void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
89 Condition cond) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000090#if USE_BX
91 mov(ip, Operand(target, rmode), LeaveCC, cond);
92 bx(ip, cond);
93#else
94 mov(pc, Operand(target, rmode), LeaveCC, cond);
95#endif
96}
97
98
ager@chromium.org236ad962008-09-25 09:45:57 +000099void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
100 Condition cond) {
101 ASSERT(!RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000102 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
103}
104
105
ager@chromium.org236ad962008-09-25 09:45:57 +0000106void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
107 Condition cond) {
108 ASSERT(RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000109 // 'code' is always generated ARM code, never THUMB code
110 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
111}
112
113
114void MacroAssembler::Call(Register target, Condition cond) {
115#if USE_BLX
116 blx(target, cond);
117#else
118 // set lr for return at current pc + 8
119 mov(lr, Operand(pc), LeaveCC, cond);
120 mov(pc, Operand(target), LeaveCC, cond);
121#endif
122}
123
124
ager@chromium.org236ad962008-09-25 09:45:57 +0000125void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
126 Condition cond) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000127#if !defined(__arm__)
ager@chromium.org236ad962008-09-25 09:45:57 +0000128 if (rmode == RelocInfo::RUNTIME_ENTRY) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000129 mov(r2, Operand(target, rmode), LeaveCC, cond);
130 // Set lr for return at current pc + 8.
131 mov(lr, Operand(pc), LeaveCC, cond);
132 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
133 // Notify the simulator of the transition to C code.
134 swi(assembler::arm::call_rt_r2);
135 } else {
136 // set lr for return at current pc + 8
137 mov(lr, Operand(pc), LeaveCC, cond);
138 // emit a ldr<cond> pc, [pc + offset of target in constant pool]
139 mov(pc, Operand(target, rmode), LeaveCC, cond);
140 }
141#else
142 // Set lr for return at current pc + 8.
143 mov(lr, Operand(pc), LeaveCC, cond);
144 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
145 mov(pc, Operand(target, rmode), LeaveCC, cond);
146#endif // !defined(__arm__)
147 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a
148 // 'blx ip'; however, the code would not be shorter than the above sequence
149 // and the target address of the call would be referenced by the first
150 // instruction rather than the second one, which would make it harder to patch
151 // (two instructions before the return address, instead of one).
152 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr));
153}
154
155
ager@chromium.org236ad962008-09-25 09:45:57 +0000156void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
157 Condition cond) {
158 ASSERT(!RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000159 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
160}
161
162
ager@chromium.org236ad962008-09-25 09:45:57 +0000163void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
164 Condition cond) {
165 ASSERT(RelocInfo::IsCodeTarget(rmode));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000166 // 'code' is always generated ARM code, never THUMB code
167 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
168}
169
170
171void MacroAssembler::Ret() {
172#if USE_BX
173 bx(lr);
174#else
175 mov(pc, Operand(lr));
176#endif
177}
178
179
ager@chromium.org8bb60582008-12-11 12:02:20 +0000180void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
181 // Empty the const pool.
182 CheckConstPool(true, true);
183 add(pc, pc, Operand(index,
184 LSL,
185 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize));
186 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * sizeof(Instr));
187 nop(); // Jump table alignment.
188 for (int i = 0; i < targets.length(); i++) {
189 b(targets[i]);
190 }
191}
192
193
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000194// Will clobber 4 registers: object, offset, scratch, ip. The
195// register 'object' contains a heap object pointer. The heap object
196// tag is shifted away.
197void MacroAssembler::RecordWrite(Register object, Register offset,
198 Register scratch) {
199 // This is how much we shift the remembered set bit offset to get the
200 // offset of the word in the remembered set. We divide by kBitsPerInt (32,
201 // shift right 5) and then multiply by kIntSize (4, shift left 2).
202 const int kRSetWordShift = 3;
203
204 Label fast, done;
205
kasper.lund7276f142008-07-30 08:49:36 +0000206 // First, test that the object is not in the new space. We cannot set
207 // remembered set bits in the new space.
208 // object: heap object pointer (with tag)
209 // offset: offset to store location from the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000210 and_(scratch, object, Operand(Heap::NewSpaceMask()));
211 cmp(scratch, Operand(ExternalReference::new_space_start()));
212 b(eq, &done);
213
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000214 // Compute the bit offset in the remembered set.
kasper.lund7276f142008-07-30 08:49:36 +0000215 // object: heap object pointer (with tag)
216 // offset: offset to store location from the object
217 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once
218 and_(scratch, object, Operand(ip)); // offset into page of the object
219 add(offset, scratch, Operand(offset)); // add offset into the object
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000220 mov(offset, Operand(offset, LSR, kObjectAlignmentBits));
221
222 // Compute the page address from the heap object pointer.
kasper.lund7276f142008-07-30 08:49:36 +0000223 // object: heap object pointer (with tag)
224 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000225 bic(object, object, Operand(ip));
226
227 // If the bit offset lies beyond the normal remembered set range, it is in
228 // the extra remembered set area of a large object.
kasper.lund7276f142008-07-30 08:49:36 +0000229 // object: page start
230 // offset: bit offset of store position in the remembered set
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000231 cmp(offset, Operand(Page::kPageSize / kPointerSize));
232 b(lt, &fast);
233
234 // Adjust the bit offset to be relative to the start of the extra
235 // remembered set and the start address to be the address of the extra
236 // remembered set.
237 sub(offset, offset, Operand(Page::kPageSize / kPointerSize));
238 // Load the array length into 'scratch' and multiply by four to get the
239 // size in bytes of the elements.
240 ldr(scratch, MemOperand(object, Page::kObjectStartOffset
241 + FixedArray::kLengthOffset));
242 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits));
243 // Add the page header (including remembered set), array header, and array
244 // body size to the page address.
245 add(object, object, Operand(Page::kObjectStartOffset
246 + Array::kHeaderSize));
247 add(object, object, Operand(scratch));
248
249 bind(&fast);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000250 // Get address of the rset word.
kasper.lund7276f142008-07-30 08:49:36 +0000251 // object: start of the remembered set (page start for the fast case)
252 // offset: bit offset of store position in the remembered set
253 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset
254 add(object, object, Operand(scratch, LSR, kRSetWordShift));
255 // Get bit offset in the rset word.
256 // object: address of remembered set word
257 // offset: bit offset of store position
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000258 and_(offset, offset, Operand(kBitsPerInt - 1));
259
260 ldr(scratch, MemOperand(object));
261 mov(ip, Operand(1));
262 orr(scratch, scratch, Operand(ip, LSL, offset));
263 str(scratch, MemOperand(object));
264
265 bind(&done);
266}
267
268
ager@chromium.org7c537e22008-10-16 08:43:32 +0000269void MacroAssembler::EnterFrame(StackFrame::Type type) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000270 // r0-r3: preserved
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000271 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
272 mov(ip, Operand(Smi::FromInt(type)));
273 push(ip);
kasperl@chromium.org061ef742009-02-27 12:16:20 +0000274 mov(ip, Operand(CodeObject()));
275 push(ip);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000276 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000277}
278
279
ager@chromium.org7c537e22008-10-16 08:43:32 +0000280void MacroAssembler::LeaveFrame(StackFrame::Type type) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000281 // r0: preserved
282 // r1: preserved
283 // r2: preserved
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000284
ager@chromium.org7c537e22008-10-16 08:43:32 +0000285 // Drop the execution stack down to the frame pointer and restore
286 // the caller frame pointer and return address.
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000287 mov(sp, fp);
288 ldm(ia_w, sp, fp.bit() | lr.bit());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000289}
290
291
ager@chromium.org236ad962008-09-25 09:45:57 +0000292void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
293 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
294 // Compute parameter pointer before making changes and save it as ip
295 // register so that it is restored as sp register on exit, thereby
296 // popping the args.
297
298 // ip = sp + kPointerSize * #args;
299 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
300
301 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
302 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
303 mov(fp, Operand(sp)); // setup new frame pointer
304
305 // Push debug marker.
306 mov(ip, Operand(type == StackFrame::EXIT_DEBUG ? 1 : 0));
307 push(ip);
308
309 // Save the frame pointer and the context in top.
310 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
311 str(fp, MemOperand(ip));
312 mov(ip, Operand(ExternalReference(Top::k_context_address)));
313 str(cp, MemOperand(ip));
314
315 // Setup argc and the builtin function in callee-saved registers.
316 mov(r4, Operand(r0));
317 mov(r5, Operand(r1));
318
319 // Compute the argv pointer and keep it in a callee-saved register.
320 add(r6, fp, Operand(r4, LSL, kPointerSizeLog2));
321 add(r6, r6, Operand(ExitFrameConstants::kPPDisplacement - kPointerSize));
322
323 // Save the state of all registers to the stack from the memory
324 // location. This is needed to allow nested break points.
325 if (type == StackFrame::EXIT_DEBUG) {
326 // Use sp as base to push.
327 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
328 }
329}
330
331
332void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
333 // Restore the memory copy of the registers by digging them out from
334 // the stack. This is needed to allow nested break points.
335 if (type == StackFrame::EXIT_DEBUG) {
336 // This code intentionally clobbers r2 and r3.
337 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
338 const int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
339 add(r3, fp, Operand(kOffset));
340 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
341 }
342
343 // Clear top frame.
344 mov(r3, Operand(0));
345 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
346 str(r3, MemOperand(ip));
347
348 // Restore current context from top and clear it in debug mode.
349 mov(ip, Operand(ExternalReference(Top::k_context_address)));
350 ldr(cp, MemOperand(ip));
351 if (kDebug) {
352 str(r3, MemOperand(ip));
353 }
354
355 // Pop the arguments, restore registers, and return.
356 mov(sp, Operand(fp)); // respect ABI stack constraint
357 ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
358}
359
360
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000361void MacroAssembler::InvokePrologue(const ParameterCount& expected,
362 const ParameterCount& actual,
363 Handle<Code> code_constant,
364 Register code_reg,
365 Label* done,
366 InvokeFlag flag) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000367 bool definitely_matches = false;
368 Label regular_invoke;
369
370 // Check whether the expected and actual arguments count match. If not,
371 // setup registers according to contract with ArgumentsAdaptorTrampoline:
372 // r0: actual arguments count
373 // r1: function (passed through to callee)
374 // r2: expected arguments count
375 // r3: callee code entry
376
377 // The code below is made a lot easier because the calling code already sets
378 // up actual and expected registers according to the contract if values are
379 // passed in registers.
380 ASSERT(actual.is_immediate() || actual.reg().is(r0));
381 ASSERT(expected.is_immediate() || expected.reg().is(r2));
382 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
383
384 if (expected.is_immediate()) {
385 ASSERT(actual.is_immediate());
386 if (expected.immediate() == actual.immediate()) {
387 definitely_matches = true;
388 } else {
389 mov(r0, Operand(actual.immediate()));
390 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
391 if (expected.immediate() == sentinel) {
392 // Don't worry about adapting arguments for builtins that
393 // don't want that done. Skip adaption code by making it look
394 // like we have a match between expected and actual number of
395 // arguments.
396 definitely_matches = true;
397 } else {
398 mov(r2, Operand(expected.immediate()));
399 }
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000400 }
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000401 } else {
402 if (actual.is_immediate()) {
403 cmp(expected.reg(), Operand(actual.immediate()));
404 b(eq, &regular_invoke);
405 mov(r0, Operand(actual.immediate()));
406 } else {
407 cmp(expected.reg(), Operand(actual.reg()));
408 b(eq, &regular_invoke);
409 }
410 }
411
412 if (!definitely_matches) {
413 if (!code_constant.is_null()) {
414 mov(r3, Operand(code_constant));
415 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
416 }
417
418 Handle<Code> adaptor =
419 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
420 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000421 Call(adaptor, RelocInfo::CODE_TARGET);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000422 b(done);
423 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000424 Jump(adaptor, RelocInfo::CODE_TARGET);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000425 }
426 bind(&regular_invoke);
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000427 }
428}
429
430
431void MacroAssembler::InvokeCode(Register code,
432 const ParameterCount& expected,
433 const ParameterCount& actual,
434 InvokeFlag flag) {
435 Label done;
436
437 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
438 if (flag == CALL_FUNCTION) {
439 Call(code);
440 } else {
441 ASSERT(flag == JUMP_FUNCTION);
442 Jump(code);
443 }
444
445 // Continue here if InvokePrologue does handle the invocation due to
446 // mismatched parameter counts.
447 bind(&done);
448}
449
450
451void MacroAssembler::InvokeCode(Handle<Code> code,
452 const ParameterCount& expected,
453 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000454 RelocInfo::Mode rmode,
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000455 InvokeFlag flag) {
456 Label done;
457
458 InvokePrologue(expected, actual, code, no_reg, &done, flag);
459 if (flag == CALL_FUNCTION) {
460 Call(code, rmode);
461 } else {
462 Jump(code, rmode);
463 }
464
465 // Continue here if InvokePrologue does handle the invocation due to
466 // mismatched parameter counts.
467 bind(&done);
468}
469
470
471void MacroAssembler::InvokeFunction(Register fun,
472 const ParameterCount& actual,
473 InvokeFlag flag) {
474 // Contract with called JS functions requires that function is passed in r1.
475 ASSERT(fun.is(r1));
476
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000477 Register expected_reg = r2;
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000478 Register code_reg = r3;
mads.s.ager@gmail.com769cc962008-08-06 10:02:49 +0000479
480 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
481 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
482 ldr(expected_reg,
483 FieldMemOperand(code_reg,
484 SharedFunctionInfo::kFormalParameterCountOffset));
485 ldr(code_reg,
486 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
487 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
488
489 ParameterCount expected(expected_reg);
490 InvokeCode(code_reg, expected, actual, flag);
491}
492
493
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000494void MacroAssembler::SaveRegistersToMemory(RegList regs) {
495 ASSERT((regs & ~kJSCallerSaved) == 0);
496 // Copy the content of registers to memory location.
497 for (int i = 0; i < kNumJSCallerSaved; i++) {
498 int r = JSCallerSavedCode(i);
499 if ((regs & (1 << r)) != 0) {
500 Register reg = { r };
501 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
502 str(reg, MemOperand(ip));
503 }
504 }
505}
506
507
508void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
509 ASSERT((regs & ~kJSCallerSaved) == 0);
510 // Copy the content of memory location to registers.
511 for (int i = kNumJSCallerSaved; --i >= 0;) {
512 int r = JSCallerSavedCode(i);
513 if ((regs & (1 << r)) != 0) {
514 Register reg = { r };
515 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
516 ldr(reg, MemOperand(ip));
517 }
518 }
519}
520
521
522void MacroAssembler::CopyRegistersFromMemoryToStack(Register base,
523 RegList regs) {
524 ASSERT((regs & ~kJSCallerSaved) == 0);
525 // Copy the content of the memory location to the stack and adjust base.
526 for (int i = kNumJSCallerSaved; --i >= 0;) {
527 int r = JSCallerSavedCode(i);
528 if ((regs & (1 << r)) != 0) {
529 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
530 ldr(ip, MemOperand(ip));
531 str(ip, MemOperand(base, 4, NegPreIndex));
532 }
533 }
534}
535
536
537void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
538 Register scratch,
539 RegList regs) {
540 ASSERT((regs & ~kJSCallerSaved) == 0);
541 // Copy the content of the stack to the memory location and adjust base.
542 for (int i = 0; i < kNumJSCallerSaved; i++) {
543 int r = JSCallerSavedCode(i);
544 if ((regs & (1 << r)) != 0) {
545 mov(ip, Operand(ExternalReference(Debug_Address::Register(i))));
546 ldr(scratch, MemOperand(base, 4, PostIndex));
547 str(scratch, MemOperand(ip));
548 }
549 }
550}
551
552
553void MacroAssembler::PushTryHandler(CodeLocation try_location,
554 HandlerType type) {
555 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
556 // The pc (return address) is passed in register lr.
557 if (try_location == IN_JAVASCRIPT) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000558 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
559 if (type == TRY_CATCH_HANDLER) {
560 mov(r3, Operand(StackHandler::TRY_CATCH));
561 } else {
562 mov(r3, Operand(StackHandler::TRY_FINALLY));
563 }
564 push(r3); // state
565 mov(r3, Operand(ExternalReference(Top::k_handler_address)));
566 ldr(r1, MemOperand(r3));
567 push(r1); // next sp
568 str(sp, MemOperand(r3)); // chain handler
mads.s.ager31e71382008-08-13 09:32:07 +0000569 mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
570 push(r0);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000571 } else {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000572 // Must preserve r0-r4, r5-r7 are available.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000573 ASSERT(try_location == IN_JS_ENTRY);
574 // The parameter pointer is meaningless here and fp does not point to a JS
575 // frame. So we save NULL for both pp and fp. We expect the code throwing an
576 // exception to check fp before dereferencing it to restore the context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000577 mov(pp, Operand(0)); // set pp to NULL
578 mov(ip, Operand(0)); // to save a NULL fp
579 stm(db_w, sp, pp.bit() | ip.bit() | lr.bit());
580 mov(r6, Operand(StackHandler::ENTRY));
581 push(r6); // state
582 mov(r7, Operand(ExternalReference(Top::k_handler_address)));
583 ldr(r6, MemOperand(r7));
584 push(r6); // next sp
585 str(sp, MemOperand(r7)); // chain handler
mads.s.ager31e71382008-08-13 09:32:07 +0000586 mov(r5, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000587 push(r5); // flush TOS
588 }
589}
590
591
592Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
593 JSObject* holder, Register holder_reg,
594 Register scratch,
595 Label* miss) {
596 // Make sure there's no overlap between scratch and the other
597 // registers.
598 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
599
600 // Keep track of the current object in register reg.
601 Register reg = object_reg;
602 int depth = 1;
603
604 // Check the maps in the prototype chain.
605 // Traverse the prototype chain from the object and do map checks.
606 while (object != holder) {
607 depth++;
608
609 // Only global objects and objects that do not require access
610 // checks are allowed in stubs.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000611 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000612
613 // Get the map of the current object.
614 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
615 cmp(scratch, Operand(Handle<Map>(object->map())));
616
617 // Branch on the result of the map check.
618 b(ne, miss);
619
620 // Check access rights to the global object. This has to happen
621 // after the map check so that we know that the object is
622 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000623 if (object->IsJSGlobalProxy()) {
624 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000625 // Restore scratch register to be the map of the object. In the
626 // new space case below, we load the prototype from the map in
627 // the scratch register.
628 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
629 }
630
631 reg = holder_reg; // from now the object is in holder_reg
632 JSObject* prototype = JSObject::cast(object->GetPrototype());
633 if (Heap::InNewSpace(prototype)) {
634 // The prototype is in new space; we cannot store a reference
635 // to it in the code. Load it from the map.
636 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset));
637 } else {
638 // The prototype is in old space; load it directly.
639 mov(reg, Operand(Handle<JSObject>(prototype)));
640 }
641
642 // Go to the next object in the prototype chain.
643 object = prototype;
644 }
645
646 // Check the holder map.
647 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
648 cmp(scratch, Operand(Handle<Map>(object->map())));
649 b(ne, miss);
650
651 // Log the check depth.
652 LOG(IntEvent("check-maps-depth", depth));
653
654 // Perform security check for access to the global object and return
655 // the holder register.
656 ASSERT(object == holder);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000657 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
658 if (object->IsJSGlobalProxy()) {
659 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000660 }
661 return reg;
662}
663
664
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000665void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
666 Register scratch,
667 Label* miss) {
668 Label same_contexts;
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000669
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000670 ASSERT(!holder_reg.is(scratch));
671 ASSERT(!holder_reg.is(ip));
672 ASSERT(!scratch.is(ip));
673
674 // Load current lexical context from the stack frame.
675 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
676 // In debug mode, make sure the lexical context is set.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000677 if (kDebug) {
678 cmp(scratch, Operand(0));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000679 Check(ne, "we should not have an empty lexical context");
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000680 }
681
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000682 // Load the global context of the current context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000683 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
684 ldr(scratch, FieldMemOperand(scratch, offset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000685 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
686
687 // Check the context is a global context.
688 if (FLAG_debug_code) {
689 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
690 // Cannot use ip as a temporary in this verification code. Due to the fact
691 // that ip is clobbered as part of cmp with an object Operand.
692 push(holder_reg); // Temporarily save holder on the stack.
693 // Read the first word and compare to the global_context_map.
694 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
695 cmp(holder_reg, Operand(Factory::global_context_map()));
696 Check(eq, "JSGlobalObject::global_context should be a global context.");
697 pop(holder_reg); // Restore holder.
698 }
699
700 // Check if both contexts are the same.
701 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
702 cmp(scratch, Operand(ip));
703 b(eq, &same_contexts);
704
705 // Check the context is a global context.
706 if (FLAG_debug_code) {
707 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
708 // Cannot use ip as a temporary in this verification code. Due to the fact
709 // that ip is clobbered as part of cmp with an object Operand.
710 push(holder_reg); // Temporarily save holder on the stack.
711 mov(holder_reg, ip); // Move ip to its holding place.
712 cmp(holder_reg, Operand(Factory::null_value()));
713 Check(ne, "JSGlobalProxy::context() should not be null.");
714
715 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
716 cmp(holder_reg, Operand(Factory::global_context_map()));
717 Check(eq, "JSGlobalObject::global_context should be a global context.");
718 // Restore ip is not needed. ip is reloaded below.
719 pop(holder_reg); // Restore holder.
720 // Restore ip to holder's context.
721 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset));
722 }
723
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000724 // Check that the security token in the calling global object is
725 // compatible with the security token in the receiving global
726 // object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000727 int token_offset = Context::kHeaderSize +
728 Context::SECURITY_TOKEN_INDEX * kPointerSize;
729
730 ldr(scratch, FieldMemOperand(scratch, token_offset));
731 ldr(ip, FieldMemOperand(ip, token_offset));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000732 cmp(scratch, Operand(ip));
733 b(ne, miss);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000734
735 bind(&same_contexts);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000736}
737
738
739void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000740 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000741 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000742}
743
744
745void MacroAssembler::StubReturn(int argc) {
746 ASSERT(argc >= 1 && generating_stub());
747 if (argc > 1)
748 add(sp, sp, Operand((argc - 1) * kPointerSize));
749 Ret();
750}
751
mads.s.ager31e71382008-08-13 09:32:07 +0000752
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000753void MacroAssembler::IllegalOperation(int num_arguments) {
754 if (num_arguments > 0) {
755 add(sp, sp, Operand(num_arguments * kPointerSize));
756 }
757 mov(r0, Operand(Factory::undefined_value()));
758}
759
760
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000761void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000762 // All parameters are on the stack. r0 has the return value after call.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000763
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000764 // If the expected number of arguments of the runtime function is
765 // constant, we check that the actual number of arguments match the
766 // expectation.
767 if (f->nargs >= 0 && f->nargs != num_arguments) {
768 IllegalOperation(num_arguments);
769 return;
770 }
kasper.lund7276f142008-07-30 08:49:36 +0000771
mads.s.ager31e71382008-08-13 09:32:07 +0000772 Runtime::FunctionId function_id =
773 static_cast<Runtime::FunctionId>(f->stub_id);
774 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000775 CallStub(&stub);
776}
777
778
779void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
780 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
781}
782
783
mads.s.ager31e71382008-08-13 09:32:07 +0000784void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
785 int num_arguments) {
786 // TODO(1236192): Most runtime routines don't need the number of
787 // arguments passed in because it is constant. At some point we
788 // should remove this need and make the runtime routine entry code
789 // smarter.
790 mov(r0, Operand(num_arguments));
791 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000792}
793
794
795void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
796#if defined(__thumb__)
797 // Thumb mode builtin.
798 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
799#endif
800 mov(r1, Operand(builtin));
801 CEntryStub stub;
ager@chromium.org236ad962008-09-25 09:45:57 +0000802 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000803}
804
805
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000806Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
807 bool* resolved) {
808 // Contract with compiled functions is that the function is passed in r1.
809 int builtins_offset =
810 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
811 ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
812 ldr(r1, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset));
813 ldr(r1, FieldMemOperand(r1, builtins_offset));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000814
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000815 return Builtins::GetCode(id, resolved);
816}
817
818
819void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
820 InvokeJSFlags flags) {
821 bool resolved;
822 Handle<Code> code = ResolveBuiltin(id, &resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000823
824 if (flags == CALL_JS) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000825 Call(code, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000826 } else {
827 ASSERT(flags == JUMP_JS);
ager@chromium.org236ad962008-09-25 09:45:57 +0000828 Jump(code, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000829 }
830
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000831 if (!resolved) {
832 const char* name = Builtins::GetName(id);
833 int argc = Builtins::GetArgumentsCount(id);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000834 uint32_t flags =
835 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000836 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
837 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000838 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
839 unresolved_.Add(entry);
840 }
841}
842
843
844void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
845 bool resolved;
846 Handle<Code> code = ResolveBuiltin(id, &resolved);
847
848 mov(target, Operand(code));
849 if (!resolved) {
850 const char* name = Builtins::GetName(id);
851 int argc = Builtins::GetArgumentsCount(id);
852 uint32_t flags =
853 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000854 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
855 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000856 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
857 unresolved_.Add(entry);
858 }
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000859
860 add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000861}
862
863
ager@chromium.orga74f0da2008-12-03 16:05:52 +0000864void MacroAssembler::SetCounter(StatsCounter* counter, int value,
865 Register scratch1, Register scratch2) {
866 if (FLAG_native_code_counters && counter->Enabled()) {
867 mov(scratch1, Operand(value));
868 mov(scratch2, Operand(ExternalReference(counter)));
869 str(scratch1, MemOperand(scratch2));
870 }
871}
872
873
874void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
875 Register scratch1, Register scratch2) {
876 ASSERT(value > 0);
877 if (FLAG_native_code_counters && counter->Enabled()) {
878 mov(scratch2, Operand(ExternalReference(counter)));
879 ldr(scratch1, MemOperand(scratch2));
880 add(scratch1, scratch1, Operand(value));
881 str(scratch1, MemOperand(scratch2));
882 }
883}
884
885
886void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
887 Register scratch1, Register scratch2) {
888 ASSERT(value > 0);
889 if (FLAG_native_code_counters && counter->Enabled()) {
890 mov(scratch2, Operand(ExternalReference(counter)));
891 ldr(scratch1, MemOperand(scratch2));
892 sub(scratch1, scratch1, Operand(value));
893 str(scratch1, MemOperand(scratch2));
894 }
895}
896
897
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000898void MacroAssembler::Assert(Condition cc, const char* msg) {
899 if (FLAG_debug_code)
900 Check(cc, msg);
901}
902
903
904void MacroAssembler::Check(Condition cc, const char* msg) {
905 Label L;
906 b(cc, &L);
907 Abort(msg);
908 // will not return here
909 bind(&L);
910}
911
912
913void MacroAssembler::Abort(const char* msg) {
914 // We want to pass the msg string like a smi to avoid GC
915 // problems, however msg is not guaranteed to be aligned
916 // properly. Instead, we pass an aligned pointer that is
ager@chromium.org32912102009-01-16 10:38:43 +0000917 // a proper v8 smi, but also pass the alignment difference
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000918 // from the real pointer as a smi.
919 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
920 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
921 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
922#ifdef DEBUG
923 if (msg != NULL) {
924 RecordComment("Abort message: ");
925 RecordComment(msg);
926 }
927#endif
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000928 mov(r0, Operand(p0));
929 push(r0);
930 mov(r0, Operand(Smi::FromInt(p1 - p0)));
mads.s.ager31e71382008-08-13 09:32:07 +0000931 push(r0);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000932 CallRuntime(Runtime::kAbort, 2);
933 // will not return here
934}
935
936} } // namespace v8::internal