blob: dab6778e5d4c287886a6d0421f4c9db7ce676dd4 [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 { namespace internal {
37
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038MacroAssembler::MacroAssembler(void* buffer, int size)
39 : Assembler(buffer, size),
40 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000041 generating_stub_(false),
42 allow_stub_calls_(true) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000043}
44
45
46static void RecordWriteHelper(MacroAssembler* masm,
47 Register object,
48 Register addr,
49 Register scratch) {
50 Label fast;
51
52 // Compute the page address from the heap object pointer, leave it
53 // in 'object'.
54 masm->and_(object, ~Page::kPageAlignmentMask);
55
56 // Compute the bit addr in the remembered set, leave it in "addr".
57 masm->sub(addr, Operand(object));
58 masm->shr(addr, kObjectAlignmentBits);
59
60 // If the bit offset lies beyond the normal remembered set range, it is in
61 // the extra remembered set area of a large object.
62 masm->cmp(addr, Page::kPageSize / kPointerSize);
63 masm->j(less, &fast);
64
65 // Adjust 'addr' to be relative to the start of the extra remembered set
66 // and the page address in 'object' to be the address of the extra
67 // remembered set.
68 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
69 // Load the array length into 'scratch' and multiply by four to get the
70 // size in bytes of the elements.
71 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
72 + FixedArray::kLengthOffset));
73 masm->shl(scratch, kObjectAlignmentBits);
74 // Add the page header, array header, and array body size to the page
75 // address.
76 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
77 + Array::kHeaderSize));
78 masm->add(object, Operand(scratch));
79
80
81 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
82 // to limit code size. We should probably evaluate this decision by
83 // measuring the performance of an equivalent implementation using
84 // "simpler" instructions
85 masm->bind(&fast);
86 masm->bts(Operand(object, 0), addr);
87}
88
89
90class RecordWriteStub : public CodeStub {
91 public:
92 RecordWriteStub(Register object, Register addr, Register scratch)
93 : object_(object), addr_(addr), scratch_(scratch) { }
94
95 void Generate(MacroAssembler* masm);
96
97 private:
98 Register object_;
99 Register addr_;
100 Register scratch_;
101
102 const char* GetName() { return "RecordWriteStub"; }
103
104#ifdef DEBUG
105 void Print() {
106 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
107 object_.code(), addr_.code(), scratch_.code());
108 }
109#endif
110
111 // Minor key encoding in 12 bits of three registers (object, address and
112 // scratch) OOOOAAAASSSS.
113 class ScratchBits: public BitField<uint32_t, 0, 4> {};
114 class AddressBits: public BitField<uint32_t, 4, 4> {};
115 class ObjectBits: public BitField<uint32_t, 8, 4> {
116};
117
118 Major MajorKey() { return RecordWrite; }
119
120 int MinorKey() {
121 // Encode the registers.
122 return ObjectBits::encode(object_.code()) |
123 AddressBits::encode(addr_.code()) |
124 ScratchBits::encode(scratch_.code());
125 }
126};
127
128
129void RecordWriteStub::Generate(MacroAssembler* masm) {
130 RecordWriteHelper(masm, object_, addr_, scratch_);
131 masm->ret(0);
132}
133
134
135// Set the remembered set bit for [object+offset].
136// object is the object being stored into, value is the object being stored.
137// If offset is zero, then the scratch register contains the array index into
138// the elements array represented as a Smi.
139// All registers are clobbered by the operation.
140void MacroAssembler::RecordWrite(Register object, int offset,
141 Register value, Register scratch) {
142 // First, check if a remembered set write is even needed. The tests below
143 // catch stores of Smis and stores into young gen (which does not have space
144 // for the remembered set bits.
145 Label done;
146
147 // This optimization cannot survive serialization and deserialization,
148 // so we disable as long as serialization can take place.
149 int32_t new_space_start =
150 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
151 if (Serializer::enabled() || new_space_start < 0) {
152 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
153 // Check for Smi first.
154 test(value, Immediate(kSmiTagMask));
155 j(zero, &done);
156 // Test that the object address is not in the new space. We cannot
157 // set remembered set bits in the new space.
158 mov(value, Operand(object));
159 and_(value, Heap::NewSpaceMask());
160 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
161 j(equal, &done);
162 } else {
163 // move the value SmiTag into the sign bit
164 shl(value, 31);
165 // combine the object with value SmiTag
166 or_(value, Operand(object));
167 // remove the uninteresing bits inside the page
168 and_(value, Heap::NewSpaceMask() | (1 << 31));
169 // xor has two effects:
170 // - if the value was a smi, then the result will be negative
171 // - if the object is pointing into new space area the page bits will
172 // all be zero
173 xor_(value, new_space_start | (1 << 31));
174 // Check for both conditions in one branch
175 j(less_equal, &done);
176 }
177
178 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
179 // Compute the bit offset in the remembered set, leave it in 'value'.
180 mov(value, Operand(object));
181 and_(value, Page::kPageAlignmentMask);
182 add(Operand(value), Immediate(offset));
183 shr(value, kObjectAlignmentBits);
184
185 // Compute the page address from the heap object pointer, leave it in
186 // 'object'.
187 and_(object, ~Page::kPageAlignmentMask);
188
189 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
190 // to limit code size. We should probably evaluate this decision by
191 // measuring the performance of an equivalent implementation using
192 // "simpler" instructions
193 bts(Operand(object, 0), value);
194 } else {
195 Register dst = scratch;
196 if (offset != 0) {
197 lea(dst, Operand(object, offset));
198 } else {
199 // array access: calculate the destination address in the same manner as
200 // KeyedStoreIC::GenerateGeneric
201 lea(dst,
202 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
203 }
204 // If we are already generating a shared stub, not inlining the
205 // record write code isn't going to save us any memory.
206 if (generating_stub()) {
207 RecordWriteHelper(this, object, dst, value);
208 } else {
209 RecordWriteStub stub(object, dst, value);
210 CallStub(&stub);
211 }
212 }
213
214 bind(&done);
215}
216
217
218void MacroAssembler::SaveRegistersToMemory(RegList regs) {
219 ASSERT((regs & ~kJSCallerSaved) == 0);
220 // Copy the content of registers to memory location.
221 for (int i = 0; i < kNumJSCallerSaved; i++) {
222 int r = JSCallerSavedCode(i);
223 if ((regs & (1 << r)) != 0) {
224 Register reg = { r };
225 ExternalReference reg_addr =
226 ExternalReference(Debug_Address::Register(i));
227 mov(Operand::StaticVariable(reg_addr), reg);
228 }
229 }
230}
231
232
233void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
234 ASSERT((regs & ~kJSCallerSaved) == 0);
235 // Copy the content of memory location to registers.
236 for (int i = kNumJSCallerSaved; --i >= 0;) {
237 int r = JSCallerSavedCode(i);
238 if ((regs & (1 << r)) != 0) {
239 Register reg = { r };
240 ExternalReference reg_addr =
241 ExternalReference(Debug_Address::Register(i));
242 mov(reg, Operand::StaticVariable(reg_addr));
243 }
244 }
245}
246
247
248void MacroAssembler::PushRegistersFromMemory(RegList regs) {
249 ASSERT((regs & ~kJSCallerSaved) == 0);
250 // Push the content of the memory location to the stack.
251 for (int i = 0; i < kNumJSCallerSaved; i++) {
252 int r = JSCallerSavedCode(i);
253 if ((regs & (1 << r)) != 0) {
254 ExternalReference reg_addr =
255 ExternalReference(Debug_Address::Register(i));
256 push(Operand::StaticVariable(reg_addr));
257 }
258 }
259}
260
261
262void MacroAssembler::PopRegistersToMemory(RegList regs) {
263 ASSERT((regs & ~kJSCallerSaved) == 0);
264 // Pop the content from the stack to the memory location.
265 for (int i = kNumJSCallerSaved; --i >= 0;) {
266 int r = JSCallerSavedCode(i);
267 if ((regs & (1 << r)) != 0) {
268 ExternalReference reg_addr =
269 ExternalReference(Debug_Address::Register(i));
270 pop(Operand::StaticVariable(reg_addr));
271 }
272 }
273}
274
275
276void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
277 Register scratch,
278 RegList regs) {
279 ASSERT((regs & ~kJSCallerSaved) == 0);
280 // Copy the content of the stack to the memory location and adjust base.
281 for (int i = kNumJSCallerSaved; --i >= 0;) {
282 int r = JSCallerSavedCode(i);
283 if ((regs & (1 << r)) != 0) {
284 mov(scratch, Operand(base, 0));
285 ExternalReference reg_addr =
286 ExternalReference(Debug_Address::Register(i));
287 mov(Operand::StaticVariable(reg_addr), scratch);
288 lea(base, Operand(base, kPointerSize));
289 }
290 }
291}
292
293
294void MacroAssembler::Set(Register dst, const Immediate& x) {
295 if (x.is_zero()) {
296 xor_(dst, Operand(dst)); // shorter than mov
297 } else {
298 mov(Operand(dst), x);
299 }
300}
301
302
303void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
304 mov(dst, x);
305}
306
307
308void MacroAssembler::FCmp() {
309 fcompp();
310 push(eax);
311 fnstsw_ax();
312 sahf();
313 pop(eax);
314}
315
316
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000317void MacroAssembler::EnterInternalFrame() {
318 int type = StackFrame::INTERNAL;
319
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000320 push(ebp);
321 mov(ebp, Operand(esp));
322 push(esi);
323 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000324 push(Immediate(0)); // Push an empty code cache slot.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000325}
326
327
ager@chromium.org236ad962008-09-25 09:45:57 +0000328void MacroAssembler::LeaveInternalFrame() {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000329 if (FLAG_debug_code) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000330 StackFrame::Type type = StackFrame::INTERNAL;
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000331 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
332 Immediate(Smi::FromInt(type)));
333 Check(equal, "stack frame types must match");
334 }
335 leave();
336}
337
338
ager@chromium.org236ad962008-09-25 09:45:57 +0000339void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
340 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
341
342 // Setup the frame structure on the stack.
343 ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
344 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
345 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
346 push(ebp);
347 mov(ebp, Operand(esp));
348
349 // Reserve room for entry stack pointer and push the debug marker.
350 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
351 push(Immediate(0)); // saved entry sp, patched before call
352 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
353
354 // Save the frame pointer and the context in top.
355 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
356 ExternalReference context_address(Top::k_context_address);
357 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
358 mov(Operand::StaticVariable(context_address), esi);
359
360 // Setup argc and argv in callee-saved registers.
361 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
362 mov(edi, Operand(eax));
363 lea(esi, Operand(ebp, eax, times_4, offset));
364
365 // Save the state of all registers to the stack from the memory
366 // location. This is needed to allow nested break points.
367 if (type == StackFrame::EXIT_DEBUG) {
368 // TODO(1243899): This should be symmetric to
369 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
370 // correct here, but computed for the other call. Very error
371 // prone! FIX THIS. Actually there are deeper problems with
372 // register saving than this asymmetry (see the bug report
373 // associated with this issue).
374 PushRegistersFromMemory(kJSCallerSaved);
375 }
376
377 // Reserve space for two arguments: argc and argv.
378 sub(Operand(esp), Immediate(2 * kPointerSize));
379
380 // Get the required frame alignment for the OS.
381 static const int kFrameAlignment = OS::ActivationFrameAlignment();
382 if (kFrameAlignment > 0) {
383 ASSERT(IsPowerOf2(kFrameAlignment));
384 and_(esp, -kFrameAlignment);
385 }
386
387 // Patch the saved entry sp.
388 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
389}
390
391
392void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
393 // Restore the memory copy of the registers by digging them out from
394 // the stack. This is needed to allow nested break points.
395 if (type == StackFrame::EXIT_DEBUG) {
396 // It's okay to clobber register ebx below because we don't need
397 // the function pointer after this.
398 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
399 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
400 lea(ebx, Operand(ebp, kOffset));
401 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
402 }
403
404 // Get the return address from the stack and restore the frame pointer.
405 mov(ecx, Operand(ebp, 1 * kPointerSize));
406 mov(ebp, Operand(ebp, 0 * kPointerSize));
407
408 // Pop the arguments and the receiver from the caller stack.
409 lea(esp, Operand(esi, 1 * kPointerSize));
410
411 // Restore current context from top and clear it in debug mode.
412 ExternalReference context_address(Top::k_context_address);
413 mov(esi, Operand::StaticVariable(context_address));
414 if (kDebug) {
415 mov(Operand::StaticVariable(context_address), Immediate(0));
416 }
417
418 // Push the return address to get ready to return.
419 push(ecx);
420
421 // Clear the top frame.
422 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
423 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
424}
425
426
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000427void MacroAssembler::PushTryHandler(CodeLocation try_location,
428 HandlerType type) {
429 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
430 // The pc (return address) is already on TOS.
431 if (try_location == IN_JAVASCRIPT) {
432 if (type == TRY_CATCH_HANDLER) {
433 push(Immediate(StackHandler::TRY_CATCH));
434 } else {
435 push(Immediate(StackHandler::TRY_FINALLY));
436 }
437 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
438 push(ebp);
439 push(edi);
440 } else {
441 ASSERT(try_location == IN_JS_ENTRY);
442 // The parameter pointer is meaningless here and ebp does not
443 // point to a JS frame. So we save NULL for both pp and ebp. We
444 // expect the code throwing an exception to check ebp before
445 // dereferencing it to restore the context.
446 push(Immediate(StackHandler::ENTRY));
447 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
448 push(Immediate(0)); // NULL frame pointer
449 push(Immediate(0)); // NULL parameter pointer
450 }
451 // Cached TOS.
452 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
453 // Link this handler.
454 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
455}
456
457
458Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
459 JSObject* holder, Register holder_reg,
460 Register scratch,
461 Label* miss) {
462 // Make sure there's no overlap between scratch and the other
463 // registers.
464 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
465
466 // Keep track of the current object in register reg.
467 Register reg = object_reg;
468 int depth = 1;
469
470 // Check the maps in the prototype chain.
471 // Traverse the prototype chain from the object and do map checks.
472 while (object != holder) {
473 depth++;
474
475 // Only global objects and objects that do not require access
476 // checks are allowed in stubs.
477 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
478
479 JSObject* prototype = JSObject::cast(object->GetPrototype());
480 if (Heap::InNewSpace(prototype)) {
481 // Get the map of the current object.
482 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
483 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
484 // Branch on the result of the map check.
485 j(not_equal, miss, not_taken);
486 // Check access rights to the global object. This has to happen
487 // after the map check so that we know that the object is
488 // actually a global object.
489 if (object->IsJSGlobalObject()) {
490 CheckAccessGlobal(reg, scratch, miss);
491 // Restore scratch register to be the map of the object. We
492 // load the prototype from the map in the scratch register.
493 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
494 }
495 // The prototype is in new space; we cannot store a reference
496 // to it in the code. Load it from the map.
497 reg = holder_reg; // from now the object is in holder_reg
498 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
499 } else {
500 // Check the map of the current object.
501 cmp(FieldOperand(reg, HeapObject::kMapOffset),
502 Immediate(Handle<Map>(object->map())));
503 // Branch on the result of the map check.
504 j(not_equal, miss, not_taken);
505 // Check access rights to the global object. This has to happen
506 // after the map check so that we know that the object is
507 // actually a global object.
508 if (object->IsJSGlobalObject()) {
509 CheckAccessGlobal(reg, scratch, miss);
510 }
511 // The prototype is in old space; load it directly.
512 reg = holder_reg; // from now the object is in holder_reg
513 mov(reg, Handle<JSObject>(prototype));
514 }
515
516 // Go to the next object in the prototype chain.
517 object = prototype;
518 }
519
520 // Check the holder map.
521 cmp(FieldOperand(reg, HeapObject::kMapOffset),
522 Immediate(Handle<Map>(holder->map())));
523 j(not_equal, miss, not_taken);
524
525 // Log the check depth.
526 LOG(IntEvent("check-maps-depth", depth));
527
528 // Perform security check for access to the global object and return
529 // the holder register.
530 ASSERT(object == holder);
531 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
532 if (object->IsJSGlobalObject()) {
533 CheckAccessGlobal(reg, scratch, miss);
534 }
535 return reg;
536}
537
538
539void MacroAssembler::CheckAccessGlobal(Register holder_reg,
540 Register scratch,
541 Label* miss) {
542 ASSERT(!holder_reg.is(scratch));
543
544 // Load the security context.
545 ExternalReference security_context =
546 ExternalReference(Top::k_security_context_address);
547 mov(scratch, Operand::StaticVariable(security_context));
548 // When generating debug code, make sure the security context is set.
549 if (FLAG_debug_code) {
550 cmp(Operand(scratch), Immediate(0));
551 Check(not_equal, "we should not have an empty security context");
552 }
553 // Load the global object of the security context.
554 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
555 mov(scratch, FieldOperand(scratch, offset));
556 // Check that the security token in the calling global object is
557 // compatible with the security token in the receiving global
558 // object.
559 mov(scratch, FieldOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
560 cmp(scratch, FieldOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
561 j(not_equal, miss, not_taken);
562}
563
564
565void MacroAssembler::NegativeZeroTest(Register result,
566 Register op,
567 Label* then_label) {
568 Label ok;
569 test(result, Operand(result));
570 j(not_zero, &ok, taken);
571 test(op, Operand(op));
572 j(sign, then_label, not_taken);
573 bind(&ok);
574}
575
576
577void MacroAssembler::NegativeZeroTest(Register result,
578 Register op1,
579 Register op2,
580 Register scratch,
581 Label* then_label) {
582 Label ok;
583 test(result, Operand(result));
584 j(not_zero, &ok, taken);
585 mov(scratch, Operand(op1));
586 or_(scratch, Operand(op2));
587 j(sign, then_label, not_taken);
588 bind(&ok);
589}
590
591
592void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000593 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000594 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000595}
596
597
598void MacroAssembler::StubReturn(int argc) {
599 ASSERT(argc >= 1 && generating_stub());
600 ret((argc - 1) * kPointerSize);
601}
602
603
604void MacroAssembler::IllegalOperation() {
605 push(Immediate(Factory::undefined_value()));
606}
607
608
609void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
610 CallRuntime(Runtime::FunctionForId(id), num_arguments);
611}
612
613
614void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000615 // If the expected number of arguments of the runtime function is
616 // constant, we check that the actual number of arguments match the
617 // expectation.
618 if (f->nargs >= 0 && f->nargs != num_arguments) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000619 IllegalOperation();
620 return;
621 }
622
mads.s.ager31e71382008-08-13 09:32:07 +0000623 Runtime::FunctionId function_id =
624 static_cast<Runtime::FunctionId>(f->stub_id);
625 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000626 CallStub(&stub);
627}
628
629
mads.s.ager31e71382008-08-13 09:32:07 +0000630void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
631 int num_arguments) {
632 // TODO(1236192): Most runtime routines don't need the number of
633 // arguments passed in because it is constant. At some point we
634 // should remove this need and make the runtime routine entry code
635 // smarter.
636 mov(Operand(eax), Immediate(num_arguments));
637 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000638}
639
640
641void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
642 // Set the entry point and jump to the C entry runtime stub.
643 mov(Operand(ebx), Immediate(ext));
644 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000645 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000646}
647
648
649void MacroAssembler::InvokePrologue(const ParameterCount& expected,
650 const ParameterCount& actual,
651 Handle<Code> code_constant,
652 const Operand& code_operand,
653 Label* done,
654 InvokeFlag flag) {
655 bool definitely_matches = false;
656 Label invoke;
657 if (expected.is_immediate()) {
658 ASSERT(actual.is_immediate());
659 if (expected.immediate() == actual.immediate()) {
660 definitely_matches = true;
661 } else {
662 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000663 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
664 if (expected.immediate() == sentinel) {
665 // Don't worry about adapting arguments for builtins that
666 // don't want that done. Skip adaption code by making it look
667 // like we have a match between expected and actual number of
668 // arguments.
669 definitely_matches = true;
670 } else {
671 mov(ebx, expected.immediate());
672 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000673 }
674 } else {
675 if (actual.is_immediate()) {
676 // Expected is in register, actual is immediate. This is the
677 // case when we invoke function values without going through the
678 // IC mechanism.
679 cmp(expected.reg(), actual.immediate());
680 j(equal, &invoke);
681 ASSERT(expected.reg().is(ebx));
682 mov(eax, actual.immediate());
683 } else if (!expected.reg().is(actual.reg())) {
684 // Both expected and actual are in (different) registers. This
685 // is the case when we invoke functions using call and apply.
686 cmp(expected.reg(), Operand(actual.reg()));
687 j(equal, &invoke);
688 ASSERT(actual.reg().is(eax));
689 ASSERT(expected.reg().is(ebx));
690 }
691 }
692
693 if (!definitely_matches) {
694 Handle<Code> adaptor =
695 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
696 if (!code_constant.is_null()) {
697 mov(Operand(edx), Immediate(code_constant));
698 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
699 } else if (!code_operand.is_reg(edx)) {
700 mov(edx, code_operand);
701 }
702
703 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000704 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000705 jmp(done);
706 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000707 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000708 }
709 bind(&invoke);
710 }
711}
712
713
714void MacroAssembler::InvokeCode(const Operand& code,
715 const ParameterCount& expected,
716 const ParameterCount& actual,
717 InvokeFlag flag) {
718 Label done;
719 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
720 if (flag == CALL_FUNCTION) {
721 call(code);
722 } else {
723 ASSERT(flag == JUMP_FUNCTION);
724 jmp(code);
725 }
726 bind(&done);
727}
728
729
730void MacroAssembler::InvokeCode(Handle<Code> code,
731 const ParameterCount& expected,
732 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000733 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000734 InvokeFlag flag) {
735 Label done;
736 Operand dummy(eax);
737 InvokePrologue(expected, actual, code, dummy, &done, flag);
738 if (flag == CALL_FUNCTION) {
739 call(code, rmode);
740 } else {
741 ASSERT(flag == JUMP_FUNCTION);
742 jmp(code, rmode);
743 }
744 bind(&done);
745}
746
747
748void MacroAssembler::InvokeFunction(Register fun,
749 const ParameterCount& actual,
750 InvokeFlag flag) {
751 ASSERT(fun.is(edi));
752 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
753 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
754 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
755 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
756 lea(edx, FieldOperand(edx, Code::kHeaderSize));
757
758 ParameterCount expected(ebx);
759 InvokeCode(Operand(edx), expected, actual, flag);
760}
761
762
763void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
764 bool resolved;
765 Handle<Code> code = ResolveBuiltin(id, &resolved);
766
kasper.lund7276f142008-07-30 08:49:36 +0000767 // Calls are not allowed in some stubs.
768 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000769
770 // Rely on the assertion to check that the number of provided
771 // arguments match the expected number of arguments. Fake a
772 // parameter count to avoid emitting code to do the check.
773 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000774 InvokeCode(Handle<Code>(code), expected, expected,
775 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000776
777 const char* name = Builtins::GetName(id);
778 int argc = Builtins::GetArgumentsCount(id);
779
780 if (!resolved) {
781 uint32_t flags =
782 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
783 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
784 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
785 unresolved_.Add(entry);
786 }
787}
788
789
790void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
791 bool resolved;
792 Handle<Code> code = ResolveBuiltin(id, &resolved);
793
794 const char* name = Builtins::GetName(id);
795 int argc = Builtins::GetArgumentsCount(id);
796
797 mov(Operand(target), Immediate(code));
798 if (!resolved) {
799 uint32_t flags =
800 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
801 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
802 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
803 unresolved_.Add(entry);
804 }
805 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
806}
807
808
809Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
810 bool* resolved) {
811 // Move the builtin function into the temporary function slot by
812 // reading it from the builtins object. NOTE: We should be able to
813 // reduce this to two instructions by putting the function table in
814 // the global object instead of the "builtins" object and by using a
815 // real register for the function.
816 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
817 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
818 int builtins_offset =
819 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
820 mov(edi, FieldOperand(edx, builtins_offset));
821
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000822
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000823 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000824}
825
826
827void MacroAssembler::Ret() {
828 ret(0);
829}
830
831
832void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
833 if (FLAG_native_code_counters && counter->Enabled()) {
834 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
835 }
836}
837
838
839void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
840 ASSERT(value > 0);
841 if (FLAG_native_code_counters && counter->Enabled()) {
842 Operand operand = Operand::StaticVariable(ExternalReference(counter));
843 if (value == 1) {
844 inc(operand);
845 } else {
846 add(operand, Immediate(value));
847 }
848 }
849}
850
851
852void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
853 ASSERT(value > 0);
854 if (FLAG_native_code_counters && counter->Enabled()) {
855 Operand operand = Operand::StaticVariable(ExternalReference(counter));
856 if (value == 1) {
857 dec(operand);
858 } else {
859 sub(operand, Immediate(value));
860 }
861 }
862}
863
864
865void MacroAssembler::Assert(Condition cc, const char* msg) {
866 if (FLAG_debug_code) Check(cc, msg);
867}
868
869
870void MacroAssembler::Check(Condition cc, const char* msg) {
871 Label L;
872 j(cc, &L, taken);
873 Abort(msg);
874 // will not return here
875 bind(&L);
876}
877
878
879void MacroAssembler::Abort(const char* msg) {
880 // We want to pass the msg string like a smi to avoid GC
881 // problems, however msg is not guaranteed to be aligned
882 // properly. Instead, we pass an aligned pointer that is
883 // a proper v8 smi, but also pass the aligment difference
884 // from the real pointer as a smi.
885 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
886 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
887 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
888#ifdef DEBUG
889 if (msg != NULL) {
890 RecordComment("Abort message: ");
891 RecordComment(msg);
892 }
893#endif
894 push(eax);
895 push(Immediate(p0));
896 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
897 CallRuntime(Runtime::kAbort, 2);
898 // will not return here
899}
900
901
902CodePatcher::CodePatcher(byte* address, int size)
903 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
904 // Create a new macro assembler pointing to the assress of the code to patch.
905 // The size is adjusted with kGap on order for the assembler to generate size
906 // bytes of instructions without failing with buffer size constraints.
907 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
908}
909
910
911CodePatcher::~CodePatcher() {
912 // Indicate that code has changed.
913 CPU::FlushICache(address_, size_);
914
915 // Check that the code was patched as expected.
916 ASSERT(masm_.pc_ == address_ + size_);
917 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
918}
919
920
921} } // namespace v8::internal