blob: c55605a184b2775101bcfc9a28ec40aba4540263 [file] [log] [blame]
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001// Copyright 2006-2008 Google Inc. All Rights Reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 { namespace internal {
37
38DECLARE_bool(debug_code);
39DEFINE_bool(native_code_counters, false,
40 "generate extra code for manipulating stats counters");
41
42
43MacroAssembler::MacroAssembler(void* buffer, int size)
44 : Assembler(buffer, size),
45 unresolved_(0),
46 generating_stub_(false) {
47}
48
49
50static void RecordWriteHelper(MacroAssembler* masm,
51 Register object,
52 Register addr,
53 Register scratch) {
54 Label fast;
55
56 // Compute the page address from the heap object pointer, leave it
57 // in 'object'.
58 masm->and_(object, ~Page::kPageAlignmentMask);
59
60 // Compute the bit addr in the remembered set, leave it in "addr".
61 masm->sub(addr, Operand(object));
62 masm->shr(addr, kObjectAlignmentBits);
63
64 // If the bit offset lies beyond the normal remembered set range, it is in
65 // the extra remembered set area of a large object.
66 masm->cmp(addr, Page::kPageSize / kPointerSize);
67 masm->j(less, &fast);
68
69 // Adjust 'addr' to be relative to the start of the extra remembered set
70 // and the page address in 'object' to be the address of the extra
71 // remembered set.
72 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
73 // Load the array length into 'scratch' and multiply by four to get the
74 // size in bytes of the elements.
75 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
76 + FixedArray::kLengthOffset));
77 masm->shl(scratch, kObjectAlignmentBits);
78 // Add the page header, array header, and array body size to the page
79 // address.
80 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
81 + Array::kHeaderSize));
82 masm->add(object, Operand(scratch));
83
84
85 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
86 // to limit code size. We should probably evaluate this decision by
87 // measuring the performance of an equivalent implementation using
88 // "simpler" instructions
89 masm->bind(&fast);
90 masm->bts(Operand(object, 0), addr);
91}
92
93
94class RecordWriteStub : public CodeStub {
95 public:
96 RecordWriteStub(Register object, Register addr, Register scratch)
97 : object_(object), addr_(addr), scratch_(scratch) { }
98
99 void Generate(MacroAssembler* masm);
100
101 private:
102 Register object_;
103 Register addr_;
104 Register scratch_;
105
106 const char* GetName() { return "RecordWriteStub"; }
107
108#ifdef DEBUG
109 void Print() {
110 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
111 object_.code(), addr_.code(), scratch_.code());
112 }
113#endif
114
115 // Minor key encoding in 12 bits of three registers (object, address and
116 // scratch) OOOOAAAASSSS.
117 class ScratchBits: public BitField<uint32_t, 0, 4> {};
118 class AddressBits: public BitField<uint32_t, 4, 4> {};
119 class ObjectBits: public BitField<uint32_t, 8, 4> {
120};
121
122 Major MajorKey() { return RecordWrite; }
123
124 int MinorKey() {
125 // Encode the registers.
126 return ObjectBits::encode(object_.code()) |
127 AddressBits::encode(addr_.code()) |
128 ScratchBits::encode(scratch_.code());
129 }
130};
131
132
133void RecordWriteStub::Generate(MacroAssembler* masm) {
134 RecordWriteHelper(masm, object_, addr_, scratch_);
135 masm->ret(0);
136}
137
138
139// Set the remembered set bit for [object+offset].
140// object is the object being stored into, value is the object being stored.
141// If offset is zero, then the scratch register contains the array index into
142// the elements array represented as a Smi.
143// All registers are clobbered by the operation.
144void MacroAssembler::RecordWrite(Register object, int offset,
145 Register value, Register scratch) {
146 // First, check if a remembered set write is even needed. The tests below
147 // catch stores of Smis and stores into young gen (which does not have space
148 // for the remembered set bits.
149 Label done;
150
151 // This optimization cannot survive serialization and deserialization,
152 // so we disable as long as serialization can take place.
153 int32_t new_space_start =
154 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
155 if (Serializer::enabled() || new_space_start < 0) {
156 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
157 // Check for Smi first.
158 test(value, Immediate(kSmiTagMask));
159 j(zero, &done);
160 // Test that the object address is not in the new space. We cannot
161 // set remembered set bits in the new space.
162 mov(value, Operand(object));
163 and_(value, Heap::NewSpaceMask());
164 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
165 j(equal, &done);
166 } else {
167 // move the value SmiTag into the sign bit
168 shl(value, 31);
169 // combine the object with value SmiTag
170 or_(value, Operand(object));
171 // remove the uninteresing bits inside the page
172 and_(value, Heap::NewSpaceMask() | (1 << 31));
173 // xor has two effects:
174 // - if the value was a smi, then the result will be negative
175 // - if the object is pointing into new space area the page bits will
176 // all be zero
177 xor_(value, new_space_start | (1 << 31));
178 // Check for both conditions in one branch
179 j(less_equal, &done);
180 }
181
182 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
183 // Compute the bit offset in the remembered set, leave it in 'value'.
184 mov(value, Operand(object));
185 and_(value, Page::kPageAlignmentMask);
186 add(Operand(value), Immediate(offset));
187 shr(value, kObjectAlignmentBits);
188
189 // Compute the page address from the heap object pointer, leave it in
190 // 'object'.
191 and_(object, ~Page::kPageAlignmentMask);
192
193 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
194 // to limit code size. We should probably evaluate this decision by
195 // measuring the performance of an equivalent implementation using
196 // "simpler" instructions
197 bts(Operand(object, 0), value);
198 } else {
199 Register dst = scratch;
200 if (offset != 0) {
201 lea(dst, Operand(object, offset));
202 } else {
203 // array access: calculate the destination address in the same manner as
204 // KeyedStoreIC::GenerateGeneric
205 lea(dst,
206 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
207 }
208 // If we are already generating a shared stub, not inlining the
209 // record write code isn't going to save us any memory.
210 if (generating_stub()) {
211 RecordWriteHelper(this, object, dst, value);
212 } else {
213 RecordWriteStub stub(object, dst, value);
214 CallStub(&stub);
215 }
216 }
217
218 bind(&done);
219}
220
221
222void MacroAssembler::SaveRegistersToMemory(RegList regs) {
223 ASSERT((regs & ~kJSCallerSaved) == 0);
224 // Copy the content of registers to memory location.
225 for (int i = 0; i < kNumJSCallerSaved; i++) {
226 int r = JSCallerSavedCode(i);
227 if ((regs & (1 << r)) != 0) {
228 Register reg = { r };
229 ExternalReference reg_addr =
230 ExternalReference(Debug_Address::Register(i));
231 mov(Operand::StaticVariable(reg_addr), reg);
232 }
233 }
234}
235
236
237void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
238 ASSERT((regs & ~kJSCallerSaved) == 0);
239 // Copy the content of memory location to registers.
240 for (int i = kNumJSCallerSaved; --i >= 0;) {
241 int r = JSCallerSavedCode(i);
242 if ((regs & (1 << r)) != 0) {
243 Register reg = { r };
244 ExternalReference reg_addr =
245 ExternalReference(Debug_Address::Register(i));
246 mov(reg, Operand::StaticVariable(reg_addr));
247 }
248 }
249}
250
251
252void MacroAssembler::PushRegistersFromMemory(RegList regs) {
253 ASSERT((regs & ~kJSCallerSaved) == 0);
254 // Push the content of the memory location to the stack.
255 for (int i = 0; i < kNumJSCallerSaved; i++) {
256 int r = JSCallerSavedCode(i);
257 if ((regs & (1 << r)) != 0) {
258 ExternalReference reg_addr =
259 ExternalReference(Debug_Address::Register(i));
260 push(Operand::StaticVariable(reg_addr));
261 }
262 }
263}
264
265
266void MacroAssembler::PopRegistersToMemory(RegList regs) {
267 ASSERT((regs & ~kJSCallerSaved) == 0);
268 // Pop the content from the stack to the memory location.
269 for (int i = kNumJSCallerSaved; --i >= 0;) {
270 int r = JSCallerSavedCode(i);
271 if ((regs & (1 << r)) != 0) {
272 ExternalReference reg_addr =
273 ExternalReference(Debug_Address::Register(i));
274 pop(Operand::StaticVariable(reg_addr));
275 }
276 }
277}
278
279
280void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
281 Register scratch,
282 RegList regs) {
283 ASSERT((regs & ~kJSCallerSaved) == 0);
284 // Copy the content of the stack to the memory location and adjust base.
285 for (int i = kNumJSCallerSaved; --i >= 0;) {
286 int r = JSCallerSavedCode(i);
287 if ((regs & (1 << r)) != 0) {
288 mov(scratch, Operand(base, 0));
289 ExternalReference reg_addr =
290 ExternalReference(Debug_Address::Register(i));
291 mov(Operand::StaticVariable(reg_addr), scratch);
292 lea(base, Operand(base, kPointerSize));
293 }
294 }
295}
296
297
298void MacroAssembler::Set(Register dst, const Immediate& x) {
299 if (x.is_zero()) {
300 xor_(dst, Operand(dst)); // shorter than mov
301 } else {
302 mov(Operand(dst), x);
303 }
304}
305
306
307void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
308 mov(dst, x);
309}
310
311
312void MacroAssembler::FCmp() {
313 fcompp();
314 push(eax);
315 fnstsw_ax();
316 sahf();
317 pop(eax);
318}
319
320
321void MacroAssembler::EnterFrame(StackFrame::Type type) {
322 ASSERT(type != StackFrame::JAVA_SCRIPT);
323 push(ebp);
324 mov(ebp, Operand(esp));
325 push(esi);
326 push(Immediate(Smi::FromInt(type)));
327 if (type == StackFrame::INTERNAL) {
328 push(Immediate(0));
329 }
330}
331
332
333void MacroAssembler::ExitFrame(StackFrame::Type type) {
334 ASSERT(type != StackFrame::JAVA_SCRIPT);
335 if (FLAG_debug_code) {
336 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
337 Immediate(Smi::FromInt(type)));
338 Check(equal, "stack frame types must match");
339 }
340 leave();
341}
342
343
344void MacroAssembler::PushTryHandler(CodeLocation try_location,
345 HandlerType type) {
346 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
347 // The pc (return address) is already on TOS.
348 if (try_location == IN_JAVASCRIPT) {
349 if (type == TRY_CATCH_HANDLER) {
350 push(Immediate(StackHandler::TRY_CATCH));
351 } else {
352 push(Immediate(StackHandler::TRY_FINALLY));
353 }
354 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
355 push(ebp);
356 push(edi);
357 } else {
358 ASSERT(try_location == IN_JS_ENTRY);
359 // The parameter pointer is meaningless here and ebp does not
360 // point to a JS frame. So we save NULL for both pp and ebp. We
361 // expect the code throwing an exception to check ebp before
362 // dereferencing it to restore the context.
363 push(Immediate(StackHandler::ENTRY));
364 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
365 push(Immediate(0)); // NULL frame pointer
366 push(Immediate(0)); // NULL parameter pointer
367 }
368 // Cached TOS.
369 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
370 // Link this handler.
371 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
372}
373
374
375Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
376 JSObject* holder, Register holder_reg,
377 Register scratch,
378 Label* miss) {
379 // Make sure there's no overlap between scratch and the other
380 // registers.
381 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
382
383 // Keep track of the current object in register reg.
384 Register reg = object_reg;
385 int depth = 1;
386
387 // Check the maps in the prototype chain.
388 // Traverse the prototype chain from the object and do map checks.
389 while (object != holder) {
390 depth++;
391
392 // Only global objects and objects that do not require access
393 // checks are allowed in stubs.
394 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
395
396 JSObject* prototype = JSObject::cast(object->GetPrototype());
397 if (Heap::InNewSpace(prototype)) {
398 // Get the map of the current object.
399 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
400 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
401 // Branch on the result of the map check.
402 j(not_equal, miss, not_taken);
403 // Check access rights to the global object. This has to happen
404 // after the map check so that we know that the object is
405 // actually a global object.
406 if (object->IsJSGlobalObject()) {
407 CheckAccessGlobal(reg, scratch, miss);
408 // Restore scratch register to be the map of the object. We
409 // load the prototype from the map in the scratch register.
410 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
411 }
412 // The prototype is in new space; we cannot store a reference
413 // to it in the code. Load it from the map.
414 reg = holder_reg; // from now the object is in holder_reg
415 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
416 } else {
417 // Check the map of the current object.
418 cmp(FieldOperand(reg, HeapObject::kMapOffset),
419 Immediate(Handle<Map>(object->map())));
420 // Branch on the result of the map check.
421 j(not_equal, miss, not_taken);
422 // Check access rights to the global object. This has to happen
423 // after the map check so that we know that the object is
424 // actually a global object.
425 if (object->IsJSGlobalObject()) {
426 CheckAccessGlobal(reg, scratch, miss);
427 }
428 // The prototype is in old space; load it directly.
429 reg = holder_reg; // from now the object is in holder_reg
430 mov(reg, Handle<JSObject>(prototype));
431 }
432
433 // Go to the next object in the prototype chain.
434 object = prototype;
435 }
436
437 // Check the holder map.
438 cmp(FieldOperand(reg, HeapObject::kMapOffset),
439 Immediate(Handle<Map>(holder->map())));
440 j(not_equal, miss, not_taken);
441
442 // Log the check depth.
443 LOG(IntEvent("check-maps-depth", depth));
444
445 // Perform security check for access to the global object and return
446 // the holder register.
447 ASSERT(object == holder);
448 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
449 if (object->IsJSGlobalObject()) {
450 CheckAccessGlobal(reg, scratch, miss);
451 }
452 return reg;
453}
454
455
456void MacroAssembler::CheckAccessGlobal(Register holder_reg,
457 Register scratch,
458 Label* miss) {
459 ASSERT(!holder_reg.is(scratch));
460
461 // Load the security context.
462 ExternalReference security_context =
463 ExternalReference(Top::k_security_context_address);
464 mov(scratch, Operand::StaticVariable(security_context));
465 // When generating debug code, make sure the security context is set.
466 if (FLAG_debug_code) {
467 cmp(Operand(scratch), Immediate(0));
468 Check(not_equal, "we should not have an empty security context");
469 }
470 // Load the global object of the security context.
471 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
472 mov(scratch, FieldOperand(scratch, offset));
473 // Check that the security token in the calling global object is
474 // compatible with the security token in the receiving global
475 // object.
476 mov(scratch, FieldOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
477 cmp(scratch, FieldOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
478 j(not_equal, miss, not_taken);
479}
480
481
482void MacroAssembler::NegativeZeroTest(Register result,
483 Register op,
484 Label* then_label) {
485 Label ok;
486 test(result, Operand(result));
487 j(not_zero, &ok, taken);
488 test(op, Operand(op));
489 j(sign, then_label, not_taken);
490 bind(&ok);
491}
492
493
494void MacroAssembler::NegativeZeroTest(Register result,
495 Register op1,
496 Register op2,
497 Register scratch,
498 Label* then_label) {
499 Label ok;
500 test(result, Operand(result));
501 j(not_zero, &ok, taken);
502 mov(scratch, Operand(op1));
503 or_(scratch, Operand(op2));
504 j(sign, then_label, not_taken);
505 bind(&ok);
506}
507
508
509void MacroAssembler::CallStub(CodeStub* stub) {
510 ASSERT(!generating_stub()); // calls are not allowed in stubs
511 call(stub->GetCode(), code_target);
512}
513
514
515void MacroAssembler::StubReturn(int argc) {
516 ASSERT(argc >= 1 && generating_stub());
517 ret((argc - 1) * kPointerSize);
518}
519
520
521void MacroAssembler::IllegalOperation() {
522 push(Immediate(Factory::undefined_value()));
523}
524
525
526void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
527 CallRuntime(Runtime::FunctionForId(id), num_arguments);
528}
529
530
531void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
532 if (num_arguments < 1) {
533 // must have receiver for call
534 IllegalOperation();
535 return;
536 }
537
538 // TODO(1236192): Most runtime routines don't need the number of
539 // arguments passed in because it is constant. At some point we
540 // should remove this need and make the runtime routine entry code
541 // smarter.
542
543 if (f->nargs < 0) {
544 // The number of arguments is not constant for this call.
545 // Receiver does not count as an argument.
546 mov(Operand(eax), Immediate(num_arguments - 1));
547 } else {
548 if (f->nargs != num_arguments) {
549 IllegalOperation();
550 return;
551 }
552 // Receiver does not count as an argument.
553 mov(Operand(eax), Immediate(f->nargs - 1));
554 }
555
556 RuntimeStub stub((Runtime::FunctionId) f->stub_id);
557 CallStub(&stub);
558}
559
560
561
562void MacroAssembler::TailCallRuntime(Runtime::Function* f) {
563 JumpToBuiltin(ExternalReference(f)); // tail call to runtime routine
564}
565
566
567void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
568 // Set the entry point and jump to the C entry runtime stub.
569 mov(Operand(ebx), Immediate(ext));
570 CEntryStub ces;
571 jmp(ces.GetCode(), code_target);
572}
573
574
575void MacroAssembler::InvokePrologue(const ParameterCount& expected,
576 const ParameterCount& actual,
577 Handle<Code> code_constant,
578 const Operand& code_operand,
579 Label* done,
580 InvokeFlag flag) {
581 bool definitely_matches = false;
582 Label invoke;
583 if (expected.is_immediate()) {
584 ASSERT(actual.is_immediate());
585 if (expected.immediate() == actual.immediate()) {
586 definitely_matches = true;
587 } else {
588 mov(eax, actual.immediate());
589 mov(ebx, expected.immediate());
590 }
591 } else {
592 if (actual.is_immediate()) {
593 // Expected is in register, actual is immediate. This is the
594 // case when we invoke function values without going through the
595 // IC mechanism.
596 cmp(expected.reg(), actual.immediate());
597 j(equal, &invoke);
598 ASSERT(expected.reg().is(ebx));
599 mov(eax, actual.immediate());
600 } else if (!expected.reg().is(actual.reg())) {
601 // Both expected and actual are in (different) registers. This
602 // is the case when we invoke functions using call and apply.
603 cmp(expected.reg(), Operand(actual.reg()));
604 j(equal, &invoke);
605 ASSERT(actual.reg().is(eax));
606 ASSERT(expected.reg().is(ebx));
607 }
608 }
609
610 if (!definitely_matches) {
611 Handle<Code> adaptor =
612 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
613 if (!code_constant.is_null()) {
614 mov(Operand(edx), Immediate(code_constant));
615 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
616 } else if (!code_operand.is_reg(edx)) {
617 mov(edx, code_operand);
618 }
619
620 if (flag == CALL_FUNCTION) {
621 call(adaptor, code_target);
622 jmp(done);
623 } else {
624 jmp(adaptor, code_target);
625 }
626 bind(&invoke);
627 }
628}
629
630
631void MacroAssembler::InvokeCode(const Operand& code,
632 const ParameterCount& expected,
633 const ParameterCount& actual,
634 InvokeFlag flag) {
635 Label done;
636 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
637 if (flag == CALL_FUNCTION) {
638 call(code);
639 } else {
640 ASSERT(flag == JUMP_FUNCTION);
641 jmp(code);
642 }
643 bind(&done);
644}
645
646
647void MacroAssembler::InvokeCode(Handle<Code> code,
648 const ParameterCount& expected,
649 const ParameterCount& actual,
650 RelocMode rmode,
651 InvokeFlag flag) {
652 Label done;
653 Operand dummy(eax);
654 InvokePrologue(expected, actual, code, dummy, &done, flag);
655 if (flag == CALL_FUNCTION) {
656 call(code, rmode);
657 } else {
658 ASSERT(flag == JUMP_FUNCTION);
659 jmp(code, rmode);
660 }
661 bind(&done);
662}
663
664
665void MacroAssembler::InvokeFunction(Register fun,
666 const ParameterCount& actual,
667 InvokeFlag flag) {
668 ASSERT(fun.is(edi));
669 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
670 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
671 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
672 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
673 lea(edx, FieldOperand(edx, Code::kHeaderSize));
674
675 ParameterCount expected(ebx);
676 InvokeCode(Operand(edx), expected, actual, flag);
677}
678
679
680void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
681 bool resolved;
682 Handle<Code> code = ResolveBuiltin(id, &resolved);
683
684 // Calls are not allowed in stubs.
685 ASSERT(flag == JUMP_FUNCTION || !generating_stub());
686
687 // Rely on the assertion to check that the number of provided
688 // arguments match the expected number of arguments. Fake a
689 // parameter count to avoid emitting code to do the check.
690 ParameterCount expected(0);
691 InvokeCode(Handle<Code>(code), expected, expected, code_target, flag);
692
693 const char* name = Builtins::GetName(id);
694 int argc = Builtins::GetArgumentsCount(id);
695
696 if (!resolved) {
697 uint32_t flags =
698 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
699 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
700 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
701 unresolved_.Add(entry);
702 }
703}
704
705
706void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
707 bool resolved;
708 Handle<Code> code = ResolveBuiltin(id, &resolved);
709
710 const char* name = Builtins::GetName(id);
711 int argc = Builtins::GetArgumentsCount(id);
712
713 mov(Operand(target), Immediate(code));
714 if (!resolved) {
715 uint32_t flags =
716 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
717 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
718 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
719 unresolved_.Add(entry);
720 }
721 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
722}
723
724
725Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
726 bool* resolved) {
727 // Move the builtin function into the temporary function slot by
728 // reading it from the builtins object. NOTE: We should be able to
729 // reduce this to two instructions by putting the function table in
730 // the global object instead of the "builtins" object and by using a
731 // real register for the function.
732 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
733 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
734 int builtins_offset =
735 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
736 mov(edi, FieldOperand(edx, builtins_offset));
737
738 Code* code = Builtins::builtin(Builtins::Illegal);
739 *resolved = false;
740
741 if (Top::security_context() != NULL) {
742 Object* object = Top::security_context_builtins()->javascript_builtin(id);
743 if (object->IsJSFunction()) {
744 Handle<JSFunction> function(JSFunction::cast(object));
745 // Make sure the number of parameters match the formal parameter count.
746 ASSERT(function->shared()->formal_parameter_count() ==
747 Builtins::GetArgumentsCount(id));
748 if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) {
749 code = function->code();
750 *resolved = true;
751 }
752 }
753 }
754
755 return Handle<Code>(code);
756}
757
758
759void MacroAssembler::Ret() {
760 ret(0);
761}
762
763
764void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
765 if (FLAG_native_code_counters && counter->Enabled()) {
766 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
767 }
768}
769
770
771void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
772 ASSERT(value > 0);
773 if (FLAG_native_code_counters && counter->Enabled()) {
774 Operand operand = Operand::StaticVariable(ExternalReference(counter));
775 if (value == 1) {
776 inc(operand);
777 } else {
778 add(operand, Immediate(value));
779 }
780 }
781}
782
783
784void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
785 ASSERT(value > 0);
786 if (FLAG_native_code_counters && counter->Enabled()) {
787 Operand operand = Operand::StaticVariable(ExternalReference(counter));
788 if (value == 1) {
789 dec(operand);
790 } else {
791 sub(operand, Immediate(value));
792 }
793 }
794}
795
796
797void MacroAssembler::Assert(Condition cc, const char* msg) {
798 if (FLAG_debug_code) Check(cc, msg);
799}
800
801
802void MacroAssembler::Check(Condition cc, const char* msg) {
803 Label L;
804 j(cc, &L, taken);
805 Abort(msg);
806 // will not return here
807 bind(&L);
808}
809
810
811void MacroAssembler::Abort(const char* msg) {
812 // We want to pass the msg string like a smi to avoid GC
813 // problems, however msg is not guaranteed to be aligned
814 // properly. Instead, we pass an aligned pointer that is
815 // a proper v8 smi, but also pass the aligment difference
816 // from the real pointer as a smi.
817 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
818 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
819 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
820#ifdef DEBUG
821 if (msg != NULL) {
822 RecordComment("Abort message: ");
823 RecordComment(msg);
824 }
825#endif
826 push(eax);
827 push(Immediate(p0));
828 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
829 CallRuntime(Runtime::kAbort, 2);
830 // will not return here
831}
832
833
834CodePatcher::CodePatcher(byte* address, int size)
835 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
836 // Create a new macro assembler pointing to the assress of the code to patch.
837 // The size is adjusted with kGap on order for the assembler to generate size
838 // bytes of instructions without failing with buffer size constraints.
839 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
840}
841
842
843CodePatcher::~CodePatcher() {
844 // Indicate that code has changed.
845 CPU::FlushICache(address_, size_);
846
847 // Check that the code was patched as expected.
848 ASSERT(masm_.pc_ == address_ + size_);
849 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
850}
851
852
853} } // namespace v8::internal