blob: 11ffa0138c6e6dd78a31a1d6b6772868d7d8d10c [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 { namespace internal {
37
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038MacroAssembler::MacroAssembler(void* buffer, int size)
39 : Assembler(buffer, size),
40 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000041 generating_stub_(false),
42 allow_stub_calls_(true) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000043}
44
45
46static void RecordWriteHelper(MacroAssembler* masm,
47 Register object,
48 Register addr,
49 Register scratch) {
50 Label fast;
51
52 // Compute the page address from the heap object pointer, leave it
53 // in 'object'.
54 masm->and_(object, ~Page::kPageAlignmentMask);
55
56 // Compute the bit addr in the remembered set, leave it in "addr".
57 masm->sub(addr, Operand(object));
58 masm->shr(addr, kObjectAlignmentBits);
59
60 // If the bit offset lies beyond the normal remembered set range, it is in
61 // the extra remembered set area of a large object.
62 masm->cmp(addr, Page::kPageSize / kPointerSize);
63 masm->j(less, &fast);
64
65 // Adjust 'addr' to be relative to the start of the extra remembered set
66 // and the page address in 'object' to be the address of the extra
67 // remembered set.
68 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
69 // Load the array length into 'scratch' and multiply by four to get the
70 // size in bytes of the elements.
71 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
72 + FixedArray::kLengthOffset));
73 masm->shl(scratch, kObjectAlignmentBits);
74 // Add the page header, array header, and array body size to the page
75 // address.
76 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
77 + Array::kHeaderSize));
78 masm->add(object, Operand(scratch));
79
80
81 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
82 // to limit code size. We should probably evaluate this decision by
83 // measuring the performance of an equivalent implementation using
84 // "simpler" instructions
85 masm->bind(&fast);
86 masm->bts(Operand(object, 0), addr);
87}
88
89
90class RecordWriteStub : public CodeStub {
91 public:
92 RecordWriteStub(Register object, Register addr, Register scratch)
93 : object_(object), addr_(addr), scratch_(scratch) { }
94
95 void Generate(MacroAssembler* masm);
96
97 private:
98 Register object_;
99 Register addr_;
100 Register scratch_;
101
102 const char* GetName() { return "RecordWriteStub"; }
103
104#ifdef DEBUG
105 void Print() {
106 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
107 object_.code(), addr_.code(), scratch_.code());
108 }
109#endif
110
111 // Minor key encoding in 12 bits of three registers (object, address and
112 // scratch) OOOOAAAASSSS.
113 class ScratchBits: public BitField<uint32_t, 0, 4> {};
114 class AddressBits: public BitField<uint32_t, 4, 4> {};
115 class ObjectBits: public BitField<uint32_t, 8, 4> {
116};
117
118 Major MajorKey() { return RecordWrite; }
119
120 int MinorKey() {
121 // Encode the registers.
122 return ObjectBits::encode(object_.code()) |
123 AddressBits::encode(addr_.code()) |
124 ScratchBits::encode(scratch_.code());
125 }
126};
127
128
129void RecordWriteStub::Generate(MacroAssembler* masm) {
130 RecordWriteHelper(masm, object_, addr_, scratch_);
131 masm->ret(0);
132}
133
134
135// Set the remembered set bit for [object+offset].
136// object is the object being stored into, value is the object being stored.
137// If offset is zero, then the scratch register contains the array index into
138// the elements array represented as a Smi.
139// All registers are clobbered by the operation.
140void MacroAssembler::RecordWrite(Register object, int offset,
141 Register value, Register scratch) {
142 // First, check if a remembered set write is even needed. The tests below
143 // catch stores of Smis and stores into young gen (which does not have space
144 // for the remembered set bits.
145 Label done;
146
147 // This optimization cannot survive serialization and deserialization,
148 // so we disable as long as serialization can take place.
149 int32_t new_space_start =
150 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
151 if (Serializer::enabled() || new_space_start < 0) {
152 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
153 // Check for Smi first.
154 test(value, Immediate(kSmiTagMask));
155 j(zero, &done);
156 // Test that the object address is not in the new space. We cannot
157 // set remembered set bits in the new space.
158 mov(value, Operand(object));
159 and_(value, Heap::NewSpaceMask());
160 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
161 j(equal, &done);
162 } else {
163 // move the value SmiTag into the sign bit
164 shl(value, 31);
165 // combine the object with value SmiTag
166 or_(value, Operand(object));
167 // remove the uninteresing bits inside the page
168 and_(value, Heap::NewSpaceMask() | (1 << 31));
169 // xor has two effects:
170 // - if the value was a smi, then the result will be negative
171 // - if the object is pointing into new space area the page bits will
172 // all be zero
173 xor_(value, new_space_start | (1 << 31));
174 // Check for both conditions in one branch
175 j(less_equal, &done);
176 }
177
178 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
179 // Compute the bit offset in the remembered set, leave it in 'value'.
180 mov(value, Operand(object));
181 and_(value, Page::kPageAlignmentMask);
182 add(Operand(value), Immediate(offset));
183 shr(value, kObjectAlignmentBits);
184
185 // Compute the page address from the heap object pointer, leave it in
186 // 'object'.
187 and_(object, ~Page::kPageAlignmentMask);
188
189 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
190 // to limit code size. We should probably evaluate this decision by
191 // measuring the performance of an equivalent implementation using
192 // "simpler" instructions
193 bts(Operand(object, 0), value);
194 } else {
195 Register dst = scratch;
196 if (offset != 0) {
197 lea(dst, Operand(object, offset));
198 } else {
199 // array access: calculate the destination address in the same manner as
200 // KeyedStoreIC::GenerateGeneric
201 lea(dst,
202 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
203 }
204 // If we are already generating a shared stub, not inlining the
205 // record write code isn't going to save us any memory.
206 if (generating_stub()) {
207 RecordWriteHelper(this, object, dst, value);
208 } else {
209 RecordWriteStub stub(object, dst, value);
210 CallStub(&stub);
211 }
212 }
213
214 bind(&done);
215}
216
217
218void MacroAssembler::SaveRegistersToMemory(RegList regs) {
219 ASSERT((regs & ~kJSCallerSaved) == 0);
220 // Copy the content of registers to memory location.
221 for (int i = 0; i < kNumJSCallerSaved; i++) {
222 int r = JSCallerSavedCode(i);
223 if ((regs & (1 << r)) != 0) {
224 Register reg = { r };
225 ExternalReference reg_addr =
226 ExternalReference(Debug_Address::Register(i));
227 mov(Operand::StaticVariable(reg_addr), reg);
228 }
229 }
230}
231
232
233void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
234 ASSERT((regs & ~kJSCallerSaved) == 0);
235 // Copy the content of memory location to registers.
236 for (int i = kNumJSCallerSaved; --i >= 0;) {
237 int r = JSCallerSavedCode(i);
238 if ((regs & (1 << r)) != 0) {
239 Register reg = { r };
240 ExternalReference reg_addr =
241 ExternalReference(Debug_Address::Register(i));
242 mov(reg, Operand::StaticVariable(reg_addr));
243 }
244 }
245}
246
247
248void MacroAssembler::PushRegistersFromMemory(RegList regs) {
249 ASSERT((regs & ~kJSCallerSaved) == 0);
250 // Push the content of the memory location to the stack.
251 for (int i = 0; i < kNumJSCallerSaved; i++) {
252 int r = JSCallerSavedCode(i);
253 if ((regs & (1 << r)) != 0) {
254 ExternalReference reg_addr =
255 ExternalReference(Debug_Address::Register(i));
256 push(Operand::StaticVariable(reg_addr));
257 }
258 }
259}
260
261
262void MacroAssembler::PopRegistersToMemory(RegList regs) {
263 ASSERT((regs & ~kJSCallerSaved) == 0);
264 // Pop the content from the stack to the memory location.
265 for (int i = kNumJSCallerSaved; --i >= 0;) {
266 int r = JSCallerSavedCode(i);
267 if ((regs & (1 << r)) != 0) {
268 ExternalReference reg_addr =
269 ExternalReference(Debug_Address::Register(i));
270 pop(Operand::StaticVariable(reg_addr));
271 }
272 }
273}
274
275
276void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
277 Register scratch,
278 RegList regs) {
279 ASSERT((regs & ~kJSCallerSaved) == 0);
280 // Copy the content of the stack to the memory location and adjust base.
281 for (int i = kNumJSCallerSaved; --i >= 0;) {
282 int r = JSCallerSavedCode(i);
283 if ((regs & (1 << r)) != 0) {
284 mov(scratch, Operand(base, 0));
285 ExternalReference reg_addr =
286 ExternalReference(Debug_Address::Register(i));
287 mov(Operand::StaticVariable(reg_addr), scratch);
288 lea(base, Operand(base, kPointerSize));
289 }
290 }
291}
292
293
294void MacroAssembler::Set(Register dst, const Immediate& x) {
295 if (x.is_zero()) {
296 xor_(dst, Operand(dst)); // shorter than mov
297 } else {
298 mov(Operand(dst), x);
299 }
300}
301
302
303void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
304 mov(dst, x);
305}
306
307
308void MacroAssembler::FCmp() {
309 fcompp();
310 push(eax);
311 fnstsw_ax();
312 sahf();
313 pop(eax);
314}
315
316
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000317void MacroAssembler::EnterInternalFrame() {
318 int type = StackFrame::INTERNAL;
319
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000320 push(ebp);
321 mov(ebp, Operand(esp));
322 push(esi);
323 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000324 push(Immediate(0)); // Push an empty code cache slot.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000325}
326
327
ager@chromium.org236ad962008-09-25 09:45:57 +0000328void MacroAssembler::LeaveInternalFrame() {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000329 if (FLAG_debug_code) {
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000330 StackFrame::Type type = StackFrame::INTERNAL;
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000331 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
332 Immediate(Smi::FromInt(type)));
333 Check(equal, "stack frame types must match");
334 }
335 leave();
336}
337
338
ager@chromium.org236ad962008-09-25 09:45:57 +0000339void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
340 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
341
342 // Setup the frame structure on the stack.
343 ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
344 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
345 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
346 push(ebp);
347 mov(ebp, Operand(esp));
348
349 // Reserve room for entry stack pointer and push the debug marker.
350 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
351 push(Immediate(0)); // saved entry sp, patched before call
352 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
353
354 // Save the frame pointer and the context in top.
355 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
356 ExternalReference context_address(Top::k_context_address);
357 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
358 mov(Operand::StaticVariable(context_address), esi);
359
360 // Setup argc and argv in callee-saved registers.
361 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
362 mov(edi, Operand(eax));
363 lea(esi, Operand(ebp, eax, times_4, offset));
364
365 // Save the state of all registers to the stack from the memory
366 // location. This is needed to allow nested break points.
367 if (type == StackFrame::EXIT_DEBUG) {
368 // TODO(1243899): This should be symmetric to
369 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
370 // correct here, but computed for the other call. Very error
371 // prone! FIX THIS. Actually there are deeper problems with
372 // register saving than this asymmetry (see the bug report
373 // associated with this issue).
374 PushRegistersFromMemory(kJSCallerSaved);
375 }
376
377 // Reserve space for two arguments: argc and argv.
378 sub(Operand(esp), Immediate(2 * kPointerSize));
379
380 // Get the required frame alignment for the OS.
381 static const int kFrameAlignment = OS::ActivationFrameAlignment();
382 if (kFrameAlignment > 0) {
383 ASSERT(IsPowerOf2(kFrameAlignment));
384 and_(esp, -kFrameAlignment);
385 }
386
387 // Patch the saved entry sp.
388 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
389}
390
391
392void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
393 // Restore the memory copy of the registers by digging them out from
394 // the stack. This is needed to allow nested break points.
395 if (type == StackFrame::EXIT_DEBUG) {
396 // It's okay to clobber register ebx below because we don't need
397 // the function pointer after this.
398 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
399 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
400 lea(ebx, Operand(ebp, kOffset));
401 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
402 }
403
404 // Get the return address from the stack and restore the frame pointer.
405 mov(ecx, Operand(ebp, 1 * kPointerSize));
406 mov(ebp, Operand(ebp, 0 * kPointerSize));
407
408 // Pop the arguments and the receiver from the caller stack.
409 lea(esp, Operand(esi, 1 * kPointerSize));
410
411 // Restore current context from top and clear it in debug mode.
412 ExternalReference context_address(Top::k_context_address);
413 mov(esi, Operand::StaticVariable(context_address));
414 if (kDebug) {
415 mov(Operand::StaticVariable(context_address), Immediate(0));
416 }
417
418 // Push the return address to get ready to return.
419 push(ecx);
420
421 // Clear the top frame.
422 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
423 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
424}
425
426
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000427void MacroAssembler::PushTryHandler(CodeLocation try_location,
428 HandlerType type) {
429 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
430 // The pc (return address) is already on TOS.
431 if (try_location == IN_JAVASCRIPT) {
432 if (type == TRY_CATCH_HANDLER) {
433 push(Immediate(StackHandler::TRY_CATCH));
434 } else {
435 push(Immediate(StackHandler::TRY_FINALLY));
436 }
437 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
438 push(ebp);
439 push(edi);
440 } else {
441 ASSERT(try_location == IN_JS_ENTRY);
442 // The parameter pointer is meaningless here and ebp does not
443 // point to a JS frame. So we save NULL for both pp and ebp. We
444 // expect the code throwing an exception to check ebp before
445 // dereferencing it to restore the context.
446 push(Immediate(StackHandler::ENTRY));
447 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
448 push(Immediate(0)); // NULL frame pointer
449 push(Immediate(0)); // NULL parameter pointer
450 }
451 // Cached TOS.
452 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
453 // Link this handler.
454 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
455}
456
457
458Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
459 JSObject* holder, Register holder_reg,
460 Register scratch,
461 Label* miss) {
462 // Make sure there's no overlap between scratch and the other
463 // registers.
464 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
465
466 // Keep track of the current object in register reg.
467 Register reg = object_reg;
468 int depth = 1;
469
470 // Check the maps in the prototype chain.
471 // Traverse the prototype chain from the object and do map checks.
472 while (object != holder) {
473 depth++;
474
475 // Only global objects and objects that do not require access
476 // checks are allowed in stubs.
477 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
478
479 JSObject* prototype = JSObject::cast(object->GetPrototype());
480 if (Heap::InNewSpace(prototype)) {
481 // Get the map of the current object.
482 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
483 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
484 // Branch on the result of the map check.
485 j(not_equal, miss, not_taken);
486 // Check access rights to the global object. This has to happen
487 // after the map check so that we know that the object is
488 // actually a global object.
489 if (object->IsJSGlobalObject()) {
490 CheckAccessGlobal(reg, scratch, miss);
491 // Restore scratch register to be the map of the object. We
492 // load the prototype from the map in the scratch register.
493 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
494 }
495 // The prototype is in new space; we cannot store a reference
496 // to it in the code. Load it from the map.
497 reg = holder_reg; // from now the object is in holder_reg
498 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
499 } else {
500 // Check the map of the current object.
501 cmp(FieldOperand(reg, HeapObject::kMapOffset),
502 Immediate(Handle<Map>(object->map())));
503 // Branch on the result of the map check.
504 j(not_equal, miss, not_taken);
505 // Check access rights to the global object. This has to happen
506 // after the map check so that we know that the object is
507 // actually a global object.
508 if (object->IsJSGlobalObject()) {
509 CheckAccessGlobal(reg, scratch, miss);
510 }
511 // The prototype is in old space; load it directly.
512 reg = holder_reg; // from now the object is in holder_reg
513 mov(reg, Handle<JSObject>(prototype));
514 }
515
516 // Go to the next object in the prototype chain.
517 object = prototype;
518 }
519
520 // Check the holder map.
521 cmp(FieldOperand(reg, HeapObject::kMapOffset),
522 Immediate(Handle<Map>(holder->map())));
523 j(not_equal, miss, not_taken);
524
525 // Log the check depth.
526 LOG(IntEvent("check-maps-depth", depth));
527
528 // Perform security check for access to the global object and return
529 // the holder register.
530 ASSERT(object == holder);
531 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
532 if (object->IsJSGlobalObject()) {
533 CheckAccessGlobal(reg, scratch, miss);
534 }
535 return reg;
536}
537
538
539void MacroAssembler::CheckAccessGlobal(Register holder_reg,
540 Register scratch,
541 Label* miss) {
542 ASSERT(!holder_reg.is(scratch));
543
544 // Load the security context.
545 ExternalReference security_context =
546 ExternalReference(Top::k_security_context_address);
547 mov(scratch, Operand::StaticVariable(security_context));
548 // When generating debug code, make sure the security context is set.
549 if (FLAG_debug_code) {
550 cmp(Operand(scratch), Immediate(0));
551 Check(not_equal, "we should not have an empty security context");
552 }
553 // Load the global object of the security context.
554 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
555 mov(scratch, FieldOperand(scratch, offset));
556 // Check that the security token in the calling global object is
557 // compatible with the security token in the receiving global
558 // object.
559 mov(scratch, FieldOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
560 cmp(scratch, FieldOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
561 j(not_equal, miss, not_taken);
562}
563
564
565void MacroAssembler::NegativeZeroTest(Register result,
566 Register op,
567 Label* then_label) {
568 Label ok;
569 test(result, Operand(result));
570 j(not_zero, &ok, taken);
571 test(op, Operand(op));
572 j(sign, then_label, not_taken);
573 bind(&ok);
574}
575
576
577void MacroAssembler::NegativeZeroTest(Register result,
578 Register op1,
579 Register op2,
580 Register scratch,
581 Label* then_label) {
582 Label ok;
583 test(result, Operand(result));
584 j(not_zero, &ok, taken);
585 mov(scratch, Operand(op1));
586 or_(scratch, Operand(op2));
587 j(sign, then_label, not_taken);
588 bind(&ok);
589}
590
591
592void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000593 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000594 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000595}
596
597
598void MacroAssembler::StubReturn(int argc) {
599 ASSERT(argc >= 1 && generating_stub());
600 ret((argc - 1) * kPointerSize);
601}
602
603
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000604void MacroAssembler::IllegalOperation(int num_arguments) {
605 if (num_arguments > 0) {
606 add(Operand(esp), Immediate(num_arguments * kPointerSize));
607 }
608 mov(Operand(eax), Immediate(Factory::undefined_value()));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000609}
610
611
612void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
613 CallRuntime(Runtime::FunctionForId(id), num_arguments);
614}
615
616
617void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000618 // If the expected number of arguments of the runtime function is
619 // constant, we check that the actual number of arguments match the
620 // expectation.
621 if (f->nargs >= 0 && f->nargs != num_arguments) {
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000622 IllegalOperation(num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000623 return;
624 }
625
mads.s.ager31e71382008-08-13 09:32:07 +0000626 Runtime::FunctionId function_id =
627 static_cast<Runtime::FunctionId>(f->stub_id);
628 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000629 CallStub(&stub);
630}
631
632
mads.s.ager31e71382008-08-13 09:32:07 +0000633void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
634 int num_arguments) {
635 // TODO(1236192): Most runtime routines don't need the number of
636 // arguments passed in because it is constant. At some point we
637 // should remove this need and make the runtime routine entry code
638 // smarter.
639 mov(Operand(eax), Immediate(num_arguments));
640 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000641}
642
643
644void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
645 // Set the entry point and jump to the C entry runtime stub.
646 mov(Operand(ebx), Immediate(ext));
647 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000648 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000649}
650
651
652void MacroAssembler::InvokePrologue(const ParameterCount& expected,
653 const ParameterCount& actual,
654 Handle<Code> code_constant,
655 const Operand& code_operand,
656 Label* done,
657 InvokeFlag flag) {
658 bool definitely_matches = false;
659 Label invoke;
660 if (expected.is_immediate()) {
661 ASSERT(actual.is_immediate());
662 if (expected.immediate() == actual.immediate()) {
663 definitely_matches = true;
664 } else {
665 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000666 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
667 if (expected.immediate() == sentinel) {
668 // Don't worry about adapting arguments for builtins that
669 // don't want that done. Skip adaption code by making it look
670 // like we have a match between expected and actual number of
671 // arguments.
672 definitely_matches = true;
673 } else {
674 mov(ebx, expected.immediate());
675 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000676 }
677 } else {
678 if (actual.is_immediate()) {
679 // Expected is in register, actual is immediate. This is the
680 // case when we invoke function values without going through the
681 // IC mechanism.
682 cmp(expected.reg(), actual.immediate());
683 j(equal, &invoke);
684 ASSERT(expected.reg().is(ebx));
685 mov(eax, actual.immediate());
686 } else if (!expected.reg().is(actual.reg())) {
687 // Both expected and actual are in (different) registers. This
688 // is the case when we invoke functions using call and apply.
689 cmp(expected.reg(), Operand(actual.reg()));
690 j(equal, &invoke);
691 ASSERT(actual.reg().is(eax));
692 ASSERT(expected.reg().is(ebx));
693 }
694 }
695
696 if (!definitely_matches) {
697 Handle<Code> adaptor =
698 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
699 if (!code_constant.is_null()) {
700 mov(Operand(edx), Immediate(code_constant));
701 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
702 } else if (!code_operand.is_reg(edx)) {
703 mov(edx, code_operand);
704 }
705
706 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000707 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000708 jmp(done);
709 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000710 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000711 }
712 bind(&invoke);
713 }
714}
715
716
717void MacroAssembler::InvokeCode(const Operand& code,
718 const ParameterCount& expected,
719 const ParameterCount& actual,
720 InvokeFlag flag) {
721 Label done;
722 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
723 if (flag == CALL_FUNCTION) {
724 call(code);
725 } else {
726 ASSERT(flag == JUMP_FUNCTION);
727 jmp(code);
728 }
729 bind(&done);
730}
731
732
733void MacroAssembler::InvokeCode(Handle<Code> code,
734 const ParameterCount& expected,
735 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000736 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000737 InvokeFlag flag) {
738 Label done;
739 Operand dummy(eax);
740 InvokePrologue(expected, actual, code, dummy, &done, flag);
741 if (flag == CALL_FUNCTION) {
742 call(code, rmode);
743 } else {
744 ASSERT(flag == JUMP_FUNCTION);
745 jmp(code, rmode);
746 }
747 bind(&done);
748}
749
750
751void MacroAssembler::InvokeFunction(Register fun,
752 const ParameterCount& actual,
753 InvokeFlag flag) {
754 ASSERT(fun.is(edi));
755 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
756 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
757 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
758 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
759 lea(edx, FieldOperand(edx, Code::kHeaderSize));
760
761 ParameterCount expected(ebx);
762 InvokeCode(Operand(edx), expected, actual, flag);
763}
764
765
766void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
767 bool resolved;
768 Handle<Code> code = ResolveBuiltin(id, &resolved);
769
kasper.lund7276f142008-07-30 08:49:36 +0000770 // Calls are not allowed in some stubs.
771 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000772
773 // Rely on the assertion to check that the number of provided
774 // arguments match the expected number of arguments. Fake a
775 // parameter count to avoid emitting code to do the check.
776 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000777 InvokeCode(Handle<Code>(code), expected, expected,
778 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000779
780 const char* name = Builtins::GetName(id);
781 int argc = Builtins::GetArgumentsCount(id);
782
783 if (!resolved) {
784 uint32_t flags =
785 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
786 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
787 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
788 unresolved_.Add(entry);
789 }
790}
791
792
793void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
794 bool resolved;
795 Handle<Code> code = ResolveBuiltin(id, &resolved);
796
797 const char* name = Builtins::GetName(id);
798 int argc = Builtins::GetArgumentsCount(id);
799
800 mov(Operand(target), Immediate(code));
801 if (!resolved) {
802 uint32_t flags =
803 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
804 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
805 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
806 unresolved_.Add(entry);
807 }
808 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
809}
810
811
812Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
813 bool* resolved) {
814 // Move the builtin function into the temporary function slot by
815 // reading it from the builtins object. NOTE: We should be able to
816 // reduce this to two instructions by putting the function table in
817 // the global object instead of the "builtins" object and by using a
818 // real register for the function.
819 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
820 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
821 int builtins_offset =
822 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
823 mov(edi, FieldOperand(edx, builtins_offset));
824
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000825
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000826 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000827}
828
829
830void MacroAssembler::Ret() {
831 ret(0);
832}
833
834
835void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
836 if (FLAG_native_code_counters && counter->Enabled()) {
837 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
838 }
839}
840
841
842void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
843 ASSERT(value > 0);
844 if (FLAG_native_code_counters && counter->Enabled()) {
845 Operand operand = Operand::StaticVariable(ExternalReference(counter));
846 if (value == 1) {
847 inc(operand);
848 } else {
849 add(operand, Immediate(value));
850 }
851 }
852}
853
854
855void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
856 ASSERT(value > 0);
857 if (FLAG_native_code_counters && counter->Enabled()) {
858 Operand operand = Operand::StaticVariable(ExternalReference(counter));
859 if (value == 1) {
860 dec(operand);
861 } else {
862 sub(operand, Immediate(value));
863 }
864 }
865}
866
867
868void MacroAssembler::Assert(Condition cc, const char* msg) {
869 if (FLAG_debug_code) Check(cc, msg);
870}
871
872
873void MacroAssembler::Check(Condition cc, const char* msg) {
874 Label L;
875 j(cc, &L, taken);
876 Abort(msg);
877 // will not return here
878 bind(&L);
879}
880
881
882void MacroAssembler::Abort(const char* msg) {
883 // We want to pass the msg string like a smi to avoid GC
884 // problems, however msg is not guaranteed to be aligned
885 // properly. Instead, we pass an aligned pointer that is
886 // a proper v8 smi, but also pass the aligment difference
887 // from the real pointer as a smi.
888 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
889 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
890 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
891#ifdef DEBUG
892 if (msg != NULL) {
893 RecordComment("Abort message: ");
894 RecordComment(msg);
895 }
896#endif
897 push(eax);
898 push(Immediate(p0));
899 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
900 CallRuntime(Runtime::kAbort, 2);
901 // will not return here
902}
903
904
905CodePatcher::CodePatcher(byte* address, int size)
906 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
907 // Create a new macro assembler pointing to the assress of the code to patch.
908 // The size is adjusted with kGap on order for the assembler to generate size
909 // bytes of instructions without failing with buffer size constraints.
910 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
911}
912
913
914CodePatcher::~CodePatcher() {
915 // Indicate that code has changed.
916 CPU::FlushICache(address_, size_);
917
918 // Check that the code was patched as expected.
919 ASSERT(masm_.pc_ == address_ + size_);
920 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
921}
922
923
924} } // namespace v8::internal