blob: e72eeb2329bd4e78dc23e19b2e64eae49cf58461 [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 { namespace internal {
37
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038MacroAssembler::MacroAssembler(void* buffer, int size)
39 : Assembler(buffer, size),
40 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000041 generating_stub_(false),
42 allow_stub_calls_(true) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000043}
44
45
46static void RecordWriteHelper(MacroAssembler* masm,
47 Register object,
48 Register addr,
49 Register scratch) {
50 Label fast;
51
52 // Compute the page address from the heap object pointer, leave it
53 // in 'object'.
54 masm->and_(object, ~Page::kPageAlignmentMask);
55
56 // Compute the bit addr in the remembered set, leave it in "addr".
57 masm->sub(addr, Operand(object));
58 masm->shr(addr, kObjectAlignmentBits);
59
60 // If the bit offset lies beyond the normal remembered set range, it is in
61 // the extra remembered set area of a large object.
62 masm->cmp(addr, Page::kPageSize / kPointerSize);
63 masm->j(less, &fast);
64
65 // Adjust 'addr' to be relative to the start of the extra remembered set
66 // and the page address in 'object' to be the address of the extra
67 // remembered set.
68 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
69 // Load the array length into 'scratch' and multiply by four to get the
70 // size in bytes of the elements.
71 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
72 + FixedArray::kLengthOffset));
73 masm->shl(scratch, kObjectAlignmentBits);
74 // Add the page header, array header, and array body size to the page
75 // address.
76 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
77 + Array::kHeaderSize));
78 masm->add(object, Operand(scratch));
79
80
81 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
82 // to limit code size. We should probably evaluate this decision by
83 // measuring the performance of an equivalent implementation using
84 // "simpler" instructions
85 masm->bind(&fast);
86 masm->bts(Operand(object, 0), addr);
87}
88
89
90class RecordWriteStub : public CodeStub {
91 public:
92 RecordWriteStub(Register object, Register addr, Register scratch)
93 : object_(object), addr_(addr), scratch_(scratch) { }
94
95 void Generate(MacroAssembler* masm);
96
97 private:
98 Register object_;
99 Register addr_;
100 Register scratch_;
101
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000102#ifdef DEBUG
103 void Print() {
104 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
105 object_.code(), addr_.code(), scratch_.code());
106 }
107#endif
108
109 // Minor key encoding in 12 bits of three registers (object, address and
110 // scratch) OOOOAAAASSSS.
111 class ScratchBits: public BitField<uint32_t, 0, 4> {};
112 class AddressBits: public BitField<uint32_t, 4, 4> {};
113 class ObjectBits: public BitField<uint32_t, 8, 4> {
114};
115
116 Major MajorKey() { return RecordWrite; }
117
118 int MinorKey() {
119 // Encode the registers.
120 return ObjectBits::encode(object_.code()) |
121 AddressBits::encode(addr_.code()) |
122 ScratchBits::encode(scratch_.code());
123 }
124};
125
126
127void RecordWriteStub::Generate(MacroAssembler* masm) {
128 RecordWriteHelper(masm, object_, addr_, scratch_);
129 masm->ret(0);
130}
131
132
133// Set the remembered set bit for [object+offset].
134// object is the object being stored into, value is the object being stored.
135// If offset is zero, then the scratch register contains the array index into
136// the elements array represented as a Smi.
137// All registers are clobbered by the operation.
138void MacroAssembler::RecordWrite(Register object, int offset,
139 Register value, Register scratch) {
140 // First, check if a remembered set write is even needed. The tests below
141 // catch stores of Smis and stores into young gen (which does not have space
142 // for the remembered set bits.
143 Label done;
144
145 // This optimization cannot survive serialization and deserialization,
146 // so we disable as long as serialization can take place.
147 int32_t new_space_start =
148 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
149 if (Serializer::enabled() || new_space_start < 0) {
150 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
151 // Check for Smi first.
152 test(value, Immediate(kSmiTagMask));
153 j(zero, &done);
154 // Test that the object address is not in the new space. We cannot
155 // set remembered set bits in the new space.
156 mov(value, Operand(object));
157 and_(value, Heap::NewSpaceMask());
158 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
159 j(equal, &done);
160 } else {
161 // move the value SmiTag into the sign bit
162 shl(value, 31);
163 // combine the object with value SmiTag
164 or_(value, Operand(object));
165 // remove the uninteresing bits inside the page
166 and_(value, Heap::NewSpaceMask() | (1 << 31));
167 // xor has two effects:
168 // - if the value was a smi, then the result will be negative
169 // - if the object is pointing into new space area the page bits will
170 // all be zero
171 xor_(value, new_space_start | (1 << 31));
172 // Check for both conditions in one branch
173 j(less_equal, &done);
174 }
175
176 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
177 // Compute the bit offset in the remembered set, leave it in 'value'.
178 mov(value, Operand(object));
179 and_(value, Page::kPageAlignmentMask);
180 add(Operand(value), Immediate(offset));
181 shr(value, kObjectAlignmentBits);
182
183 // Compute the page address from the heap object pointer, leave it in
184 // 'object'.
185 and_(object, ~Page::kPageAlignmentMask);
186
187 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
188 // to limit code size. We should probably evaluate this decision by
189 // measuring the performance of an equivalent implementation using
190 // "simpler" instructions
191 bts(Operand(object, 0), value);
192 } else {
193 Register dst = scratch;
194 if (offset != 0) {
195 lea(dst, Operand(object, offset));
196 } else {
197 // array access: calculate the destination address in the same manner as
198 // KeyedStoreIC::GenerateGeneric
199 lea(dst,
200 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
201 }
202 // If we are already generating a shared stub, not inlining the
203 // record write code isn't going to save us any memory.
204 if (generating_stub()) {
205 RecordWriteHelper(this, object, dst, value);
206 } else {
207 RecordWriteStub stub(object, dst, value);
208 CallStub(&stub);
209 }
210 }
211
212 bind(&done);
213}
214
215
216void MacroAssembler::SaveRegistersToMemory(RegList regs) {
217 ASSERT((regs & ~kJSCallerSaved) == 0);
218 // Copy the content of registers to memory location.
219 for (int i = 0; i < kNumJSCallerSaved; i++) {
220 int r = JSCallerSavedCode(i);
221 if ((regs & (1 << r)) != 0) {
222 Register reg = { r };
223 ExternalReference reg_addr =
224 ExternalReference(Debug_Address::Register(i));
225 mov(Operand::StaticVariable(reg_addr), reg);
226 }
227 }
228}
229
230
231void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
232 ASSERT((regs & ~kJSCallerSaved) == 0);
233 // Copy the content of memory location to registers.
234 for (int i = kNumJSCallerSaved; --i >= 0;) {
235 int r = JSCallerSavedCode(i);
236 if ((regs & (1 << r)) != 0) {
237 Register reg = { r };
238 ExternalReference reg_addr =
239 ExternalReference(Debug_Address::Register(i));
240 mov(reg, Operand::StaticVariable(reg_addr));
241 }
242 }
243}
244
245
246void MacroAssembler::PushRegistersFromMemory(RegList regs) {
247 ASSERT((regs & ~kJSCallerSaved) == 0);
248 // Push the content of the memory location to the stack.
249 for (int i = 0; i < kNumJSCallerSaved; i++) {
250 int r = JSCallerSavedCode(i);
251 if ((regs & (1 << r)) != 0) {
252 ExternalReference reg_addr =
253 ExternalReference(Debug_Address::Register(i));
254 push(Operand::StaticVariable(reg_addr));
255 }
256 }
257}
258
259
260void MacroAssembler::PopRegistersToMemory(RegList regs) {
261 ASSERT((regs & ~kJSCallerSaved) == 0);
262 // Pop the content from the stack to the memory location.
263 for (int i = kNumJSCallerSaved; --i >= 0;) {
264 int r = JSCallerSavedCode(i);
265 if ((regs & (1 << r)) != 0) {
266 ExternalReference reg_addr =
267 ExternalReference(Debug_Address::Register(i));
268 pop(Operand::StaticVariable(reg_addr));
269 }
270 }
271}
272
273
274void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
275 Register scratch,
276 RegList regs) {
277 ASSERT((regs & ~kJSCallerSaved) == 0);
278 // Copy the content of the stack to the memory location and adjust base.
279 for (int i = kNumJSCallerSaved; --i >= 0;) {
280 int r = JSCallerSavedCode(i);
281 if ((regs & (1 << r)) != 0) {
282 mov(scratch, Operand(base, 0));
283 ExternalReference reg_addr =
284 ExternalReference(Debug_Address::Register(i));
285 mov(Operand::StaticVariable(reg_addr), scratch);
286 lea(base, Operand(base, kPointerSize));
287 }
288 }
289}
290
291
292void MacroAssembler::Set(Register dst, const Immediate& x) {
293 if (x.is_zero()) {
294 xor_(dst, Operand(dst)); // shorter than mov
295 } else {
296 mov(Operand(dst), x);
297 }
298}
299
300
301void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
302 mov(dst, x);
303}
304
305
306void MacroAssembler::FCmp() {
307 fcompp();
308 push(eax);
309 fnstsw_ax();
310 sahf();
311 pop(eax);
312}
313
314
ager@chromium.org7c537e22008-10-16 08:43:32 +0000315void MacroAssembler::EnterFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000316 push(ebp);
317 mov(ebp, Operand(esp));
318 push(esi);
319 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000320 push(Immediate(0)); // Push an empty code cache slot.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000321}
322
323
ager@chromium.org7c537e22008-10-16 08:43:32 +0000324void MacroAssembler::LeaveFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000325 if (FLAG_debug_code) {
326 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
327 Immediate(Smi::FromInt(type)));
328 Check(equal, "stack frame types must match");
329 }
330 leave();
331}
332
333
ager@chromium.org236ad962008-09-25 09:45:57 +0000334void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
335 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
336
337 // Setup the frame structure on the stack.
338 ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
339 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
340 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
341 push(ebp);
342 mov(ebp, Operand(esp));
343
344 // Reserve room for entry stack pointer and push the debug marker.
345 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
346 push(Immediate(0)); // saved entry sp, patched before call
347 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
348
349 // Save the frame pointer and the context in top.
350 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
351 ExternalReference context_address(Top::k_context_address);
352 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
353 mov(Operand::StaticVariable(context_address), esi);
354
355 // Setup argc and argv in callee-saved registers.
356 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
357 mov(edi, Operand(eax));
358 lea(esi, Operand(ebp, eax, times_4, offset));
359
360 // Save the state of all registers to the stack from the memory
361 // location. This is needed to allow nested break points.
362 if (type == StackFrame::EXIT_DEBUG) {
363 // TODO(1243899): This should be symmetric to
364 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
365 // correct here, but computed for the other call. Very error
366 // prone! FIX THIS. Actually there are deeper problems with
367 // register saving than this asymmetry (see the bug report
368 // associated with this issue).
369 PushRegistersFromMemory(kJSCallerSaved);
370 }
371
372 // Reserve space for two arguments: argc and argv.
373 sub(Operand(esp), Immediate(2 * kPointerSize));
374
375 // Get the required frame alignment for the OS.
376 static const int kFrameAlignment = OS::ActivationFrameAlignment();
377 if (kFrameAlignment > 0) {
378 ASSERT(IsPowerOf2(kFrameAlignment));
379 and_(esp, -kFrameAlignment);
380 }
381
382 // Patch the saved entry sp.
383 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
384}
385
386
387void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
388 // Restore the memory copy of the registers by digging them out from
389 // the stack. This is needed to allow nested break points.
390 if (type == StackFrame::EXIT_DEBUG) {
391 // It's okay to clobber register ebx below because we don't need
392 // the function pointer after this.
393 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
394 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
395 lea(ebx, Operand(ebp, kOffset));
396 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
397 }
398
399 // Get the return address from the stack and restore the frame pointer.
400 mov(ecx, Operand(ebp, 1 * kPointerSize));
401 mov(ebp, Operand(ebp, 0 * kPointerSize));
402
403 // Pop the arguments and the receiver from the caller stack.
404 lea(esp, Operand(esi, 1 * kPointerSize));
405
406 // Restore current context from top and clear it in debug mode.
407 ExternalReference context_address(Top::k_context_address);
408 mov(esi, Operand::StaticVariable(context_address));
409 if (kDebug) {
410 mov(Operand::StaticVariable(context_address), Immediate(0));
411 }
412
413 // Push the return address to get ready to return.
414 push(ecx);
415
416 // Clear the top frame.
417 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
418 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
419}
420
421
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000422void MacroAssembler::PushTryHandler(CodeLocation try_location,
423 HandlerType type) {
424 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
425 // The pc (return address) is already on TOS.
426 if (try_location == IN_JAVASCRIPT) {
427 if (type == TRY_CATCH_HANDLER) {
428 push(Immediate(StackHandler::TRY_CATCH));
429 } else {
430 push(Immediate(StackHandler::TRY_FINALLY));
431 }
432 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
433 push(ebp);
434 push(edi);
435 } else {
436 ASSERT(try_location == IN_JS_ENTRY);
437 // The parameter pointer is meaningless here and ebp does not
438 // point to a JS frame. So we save NULL for both pp and ebp. We
439 // expect the code throwing an exception to check ebp before
440 // dereferencing it to restore the context.
441 push(Immediate(StackHandler::ENTRY));
442 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
443 push(Immediate(0)); // NULL frame pointer
444 push(Immediate(0)); // NULL parameter pointer
445 }
446 // Cached TOS.
447 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
448 // Link this handler.
449 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
450}
451
452
453Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
454 JSObject* holder, Register holder_reg,
455 Register scratch,
456 Label* miss) {
457 // Make sure there's no overlap between scratch and the other
458 // registers.
459 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
460
461 // Keep track of the current object in register reg.
462 Register reg = object_reg;
463 int depth = 1;
464
465 // Check the maps in the prototype chain.
466 // Traverse the prototype chain from the object and do map checks.
467 while (object != holder) {
468 depth++;
469
470 // Only global objects and objects that do not require access
471 // checks are allowed in stubs.
472 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
473
474 JSObject* prototype = JSObject::cast(object->GetPrototype());
475 if (Heap::InNewSpace(prototype)) {
476 // Get the map of the current object.
477 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
478 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
479 // Branch on the result of the map check.
480 j(not_equal, miss, not_taken);
481 // Check access rights to the global object. This has to happen
482 // after the map check so that we know that the object is
483 // actually a global object.
484 if (object->IsJSGlobalObject()) {
485 CheckAccessGlobal(reg, scratch, miss);
486 // Restore scratch register to be the map of the object. We
487 // load the prototype from the map in the scratch register.
488 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
489 }
490 // The prototype is in new space; we cannot store a reference
491 // to it in the code. Load it from the map.
492 reg = holder_reg; // from now the object is in holder_reg
493 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
494 } else {
495 // Check the map of the current object.
496 cmp(FieldOperand(reg, HeapObject::kMapOffset),
497 Immediate(Handle<Map>(object->map())));
498 // Branch on the result of the map check.
499 j(not_equal, miss, not_taken);
500 // Check access rights to the global object. This has to happen
501 // after the map check so that we know that the object is
502 // actually a global object.
503 if (object->IsJSGlobalObject()) {
504 CheckAccessGlobal(reg, scratch, miss);
505 }
506 // The prototype is in old space; load it directly.
507 reg = holder_reg; // from now the object is in holder_reg
508 mov(reg, Handle<JSObject>(prototype));
509 }
510
511 // Go to the next object in the prototype chain.
512 object = prototype;
513 }
514
515 // Check the holder map.
516 cmp(FieldOperand(reg, HeapObject::kMapOffset),
517 Immediate(Handle<Map>(holder->map())));
518 j(not_equal, miss, not_taken);
519
520 // Log the check depth.
521 LOG(IntEvent("check-maps-depth", depth));
522
523 // Perform security check for access to the global object and return
524 // the holder register.
525 ASSERT(object == holder);
526 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
527 if (object->IsJSGlobalObject()) {
528 CheckAccessGlobal(reg, scratch, miss);
529 }
530 return reg;
531}
532
533
534void MacroAssembler::CheckAccessGlobal(Register holder_reg,
535 Register scratch,
536 Label* miss) {
537 ASSERT(!holder_reg.is(scratch));
538
539 // Load the security context.
540 ExternalReference security_context =
541 ExternalReference(Top::k_security_context_address);
542 mov(scratch, Operand::StaticVariable(security_context));
543 // When generating debug code, make sure the security context is set.
544 if (FLAG_debug_code) {
545 cmp(Operand(scratch), Immediate(0));
546 Check(not_equal, "we should not have an empty security context");
547 }
548 // Load the global object of the security context.
549 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
550 mov(scratch, FieldOperand(scratch, offset));
551 // Check that the security token in the calling global object is
552 // compatible with the security token in the receiving global
553 // object.
554 mov(scratch, FieldOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
555 cmp(scratch, FieldOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
556 j(not_equal, miss, not_taken);
557}
558
559
560void MacroAssembler::NegativeZeroTest(Register result,
561 Register op,
562 Label* then_label) {
563 Label ok;
564 test(result, Operand(result));
565 j(not_zero, &ok, taken);
566 test(op, Operand(op));
567 j(sign, then_label, not_taken);
568 bind(&ok);
569}
570
571
572void MacroAssembler::NegativeZeroTest(Register result,
573 Register op1,
574 Register op2,
575 Register scratch,
576 Label* then_label) {
577 Label ok;
578 test(result, Operand(result));
579 j(not_zero, &ok, taken);
580 mov(scratch, Operand(op1));
581 or_(scratch, Operand(op2));
582 j(sign, then_label, not_taken);
583 bind(&ok);
584}
585
586
ager@chromium.org7c537e22008-10-16 08:43:32 +0000587void MacroAssembler::TryGetFunctionPrototype(Register function,
588 Register result,
589 Register scratch,
590 Label* miss) {
591 // Check that the receiver isn't a smi.
592 test(function, Immediate(kSmiTagMask));
593 j(zero, miss, not_taken);
594
595 // Check that the function really is a function.
596 mov(result, FieldOperand(function, HeapObject::kMapOffset));
597 movzx_b(scratch, FieldOperand(result, Map::kInstanceTypeOffset));
598 cmp(scratch, JS_FUNCTION_TYPE);
599 j(not_equal, miss, not_taken);
600
601 // Make sure that the function has an instance prototype.
602 Label non_instance;
603 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
604 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
605 j(not_zero, &non_instance, not_taken);
606
607 // Get the prototype or initial map from the function.
608 mov(result,
609 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
610
611 // If the prototype or initial map is the hole, don't return it and
612 // simply miss the cache instead. This will allow us to allocate a
613 // prototype object on-demand in the runtime system.
614 cmp(Operand(result), Immediate(Factory::the_hole_value()));
615 j(equal, miss, not_taken);
616
617 // If the function does not have an initial map, we're done.
618 Label done;
619 mov(scratch, FieldOperand(result, HeapObject::kMapOffset));
620 movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
621 cmp(scratch, MAP_TYPE);
622 j(not_equal, &done);
623
624 // Get the prototype from the initial map.
625 mov(result, FieldOperand(result, Map::kPrototypeOffset));
626 jmp(&done);
627
628 // Non-instance prototype: Fetch prototype from constructor field
629 // in initial map.
630 bind(&non_instance);
631 mov(result, FieldOperand(result, Map::kConstructorOffset));
632
633 // All done.
634 bind(&done);
635}
636
637
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000638void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000639 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000640 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000641}
642
643
644void MacroAssembler::StubReturn(int argc) {
645 ASSERT(argc >= 1 && generating_stub());
646 ret((argc - 1) * kPointerSize);
647}
648
649
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000650void MacroAssembler::IllegalOperation(int num_arguments) {
651 if (num_arguments > 0) {
652 add(Operand(esp), Immediate(num_arguments * kPointerSize));
653 }
654 mov(Operand(eax), Immediate(Factory::undefined_value()));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000655}
656
657
658void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
659 CallRuntime(Runtime::FunctionForId(id), num_arguments);
660}
661
662
663void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000664 // If the expected number of arguments of the runtime function is
665 // constant, we check that the actual number of arguments match the
666 // expectation.
667 if (f->nargs >= 0 && f->nargs != num_arguments) {
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000668 IllegalOperation(num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000669 return;
670 }
671
mads.s.ager31e71382008-08-13 09:32:07 +0000672 Runtime::FunctionId function_id =
673 static_cast<Runtime::FunctionId>(f->stub_id);
674 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000675 CallStub(&stub);
676}
677
678
mads.s.ager31e71382008-08-13 09:32:07 +0000679void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
680 int num_arguments) {
681 // TODO(1236192): Most runtime routines don't need the number of
682 // arguments passed in because it is constant. At some point we
683 // should remove this need and make the runtime routine entry code
684 // smarter.
685 mov(Operand(eax), Immediate(num_arguments));
686 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000687}
688
689
690void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
691 // Set the entry point and jump to the C entry runtime stub.
692 mov(Operand(ebx), Immediate(ext));
693 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000694 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000695}
696
697
698void MacroAssembler::InvokePrologue(const ParameterCount& expected,
699 const ParameterCount& actual,
700 Handle<Code> code_constant,
701 const Operand& code_operand,
702 Label* done,
703 InvokeFlag flag) {
704 bool definitely_matches = false;
705 Label invoke;
706 if (expected.is_immediate()) {
707 ASSERT(actual.is_immediate());
708 if (expected.immediate() == actual.immediate()) {
709 definitely_matches = true;
710 } else {
711 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000712 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
713 if (expected.immediate() == sentinel) {
714 // Don't worry about adapting arguments for builtins that
715 // don't want that done. Skip adaption code by making it look
716 // like we have a match between expected and actual number of
717 // arguments.
718 definitely_matches = true;
719 } else {
720 mov(ebx, expected.immediate());
721 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000722 }
723 } else {
724 if (actual.is_immediate()) {
725 // Expected is in register, actual is immediate. This is the
726 // case when we invoke function values without going through the
727 // IC mechanism.
728 cmp(expected.reg(), actual.immediate());
729 j(equal, &invoke);
730 ASSERT(expected.reg().is(ebx));
731 mov(eax, actual.immediate());
732 } else if (!expected.reg().is(actual.reg())) {
733 // Both expected and actual are in (different) registers. This
734 // is the case when we invoke functions using call and apply.
735 cmp(expected.reg(), Operand(actual.reg()));
736 j(equal, &invoke);
737 ASSERT(actual.reg().is(eax));
738 ASSERT(expected.reg().is(ebx));
739 }
740 }
741
742 if (!definitely_matches) {
743 Handle<Code> adaptor =
744 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
745 if (!code_constant.is_null()) {
746 mov(Operand(edx), Immediate(code_constant));
747 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
748 } else if (!code_operand.is_reg(edx)) {
749 mov(edx, code_operand);
750 }
751
752 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000753 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000754 jmp(done);
755 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000756 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000757 }
758 bind(&invoke);
759 }
760}
761
762
763void MacroAssembler::InvokeCode(const Operand& code,
764 const ParameterCount& expected,
765 const ParameterCount& actual,
766 InvokeFlag flag) {
767 Label done;
768 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
769 if (flag == CALL_FUNCTION) {
770 call(code);
771 } else {
772 ASSERT(flag == JUMP_FUNCTION);
773 jmp(code);
774 }
775 bind(&done);
776}
777
778
779void MacroAssembler::InvokeCode(Handle<Code> code,
780 const ParameterCount& expected,
781 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000782 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000783 InvokeFlag flag) {
784 Label done;
785 Operand dummy(eax);
786 InvokePrologue(expected, actual, code, dummy, &done, flag);
787 if (flag == CALL_FUNCTION) {
788 call(code, rmode);
789 } else {
790 ASSERT(flag == JUMP_FUNCTION);
791 jmp(code, rmode);
792 }
793 bind(&done);
794}
795
796
797void MacroAssembler::InvokeFunction(Register fun,
798 const ParameterCount& actual,
799 InvokeFlag flag) {
800 ASSERT(fun.is(edi));
801 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
802 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
803 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
804 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
805 lea(edx, FieldOperand(edx, Code::kHeaderSize));
806
807 ParameterCount expected(ebx);
808 InvokeCode(Operand(edx), expected, actual, flag);
809}
810
811
812void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
813 bool resolved;
814 Handle<Code> code = ResolveBuiltin(id, &resolved);
815
kasper.lund7276f142008-07-30 08:49:36 +0000816 // Calls are not allowed in some stubs.
817 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000818
819 // Rely on the assertion to check that the number of provided
820 // arguments match the expected number of arguments. Fake a
821 // parameter count to avoid emitting code to do the check.
822 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000823 InvokeCode(Handle<Code>(code), expected, expected,
824 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000825
826 const char* name = Builtins::GetName(id);
827 int argc = Builtins::GetArgumentsCount(id);
828
829 if (!resolved) {
830 uint32_t flags =
831 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
832 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
833 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
834 unresolved_.Add(entry);
835 }
836}
837
838
839void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
840 bool resolved;
841 Handle<Code> code = ResolveBuiltin(id, &resolved);
842
843 const char* name = Builtins::GetName(id);
844 int argc = Builtins::GetArgumentsCount(id);
845
846 mov(Operand(target), Immediate(code));
847 if (!resolved) {
848 uint32_t flags =
849 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
850 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
851 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
852 unresolved_.Add(entry);
853 }
854 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
855}
856
857
858Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
859 bool* resolved) {
860 // Move the builtin function into the temporary function slot by
861 // reading it from the builtins object. NOTE: We should be able to
862 // reduce this to two instructions by putting the function table in
863 // the global object instead of the "builtins" object and by using a
864 // real register for the function.
865 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
866 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
867 int builtins_offset =
868 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
869 mov(edi, FieldOperand(edx, builtins_offset));
870
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000871
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000872 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000873}
874
875
876void MacroAssembler::Ret() {
877 ret(0);
878}
879
880
881void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
882 if (FLAG_native_code_counters && counter->Enabled()) {
883 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
884 }
885}
886
887
888void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
889 ASSERT(value > 0);
890 if (FLAG_native_code_counters && counter->Enabled()) {
891 Operand operand = Operand::StaticVariable(ExternalReference(counter));
892 if (value == 1) {
893 inc(operand);
894 } else {
895 add(operand, Immediate(value));
896 }
897 }
898}
899
900
901void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
902 ASSERT(value > 0);
903 if (FLAG_native_code_counters && counter->Enabled()) {
904 Operand operand = Operand::StaticVariable(ExternalReference(counter));
905 if (value == 1) {
906 dec(operand);
907 } else {
908 sub(operand, Immediate(value));
909 }
910 }
911}
912
913
914void MacroAssembler::Assert(Condition cc, const char* msg) {
915 if (FLAG_debug_code) Check(cc, msg);
916}
917
918
919void MacroAssembler::Check(Condition cc, const char* msg) {
920 Label L;
921 j(cc, &L, taken);
922 Abort(msg);
923 // will not return here
924 bind(&L);
925}
926
927
928void MacroAssembler::Abort(const char* msg) {
929 // We want to pass the msg string like a smi to avoid GC
930 // problems, however msg is not guaranteed to be aligned
931 // properly. Instead, we pass an aligned pointer that is
932 // a proper v8 smi, but also pass the aligment difference
933 // from the real pointer as a smi.
934 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
935 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
936 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
937#ifdef DEBUG
938 if (msg != NULL) {
939 RecordComment("Abort message: ");
940 RecordComment(msg);
941 }
942#endif
943 push(eax);
944 push(Immediate(p0));
945 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
946 CallRuntime(Runtime::kAbort, 2);
947 // will not return here
948}
949
950
951CodePatcher::CodePatcher(byte* address, int size)
952 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
953 // Create a new macro assembler pointing to the assress of the code to patch.
954 // The size is adjusted with kGap on order for the assembler to generate size
955 // bytes of instructions without failing with buffer size constraints.
956 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
957}
958
959
960CodePatcher::~CodePatcher() {
961 // Indicate that code has changed.
962 CPU::FlushICache(address_, size_);
963
964 // Check that the code was patched as expected.
965 ASSERT(masm_.pc_ == address_ + size_);
966 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
967}
968
969
970} } // namespace v8::internal