blob: 72808de4d1b86aff8e0185259214af364e3c96fa [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 { namespace internal {
37
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038MacroAssembler::MacroAssembler(void* buffer, int size)
39 : Assembler(buffer, size),
40 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000041 generating_stub_(false),
42 allow_stub_calls_(true) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000043}
44
45
46static void RecordWriteHelper(MacroAssembler* masm,
47 Register object,
48 Register addr,
49 Register scratch) {
50 Label fast;
51
52 // Compute the page address from the heap object pointer, leave it
53 // in 'object'.
54 masm->and_(object, ~Page::kPageAlignmentMask);
55
56 // Compute the bit addr in the remembered set, leave it in "addr".
57 masm->sub(addr, Operand(object));
58 masm->shr(addr, kObjectAlignmentBits);
59
60 // If the bit offset lies beyond the normal remembered set range, it is in
61 // the extra remembered set area of a large object.
62 masm->cmp(addr, Page::kPageSize / kPointerSize);
63 masm->j(less, &fast);
64
65 // Adjust 'addr' to be relative to the start of the extra remembered set
66 // and the page address in 'object' to be the address of the extra
67 // remembered set.
68 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
69 // Load the array length into 'scratch' and multiply by four to get the
70 // size in bytes of the elements.
71 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
72 + FixedArray::kLengthOffset));
73 masm->shl(scratch, kObjectAlignmentBits);
74 // Add the page header, array header, and array body size to the page
75 // address.
76 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
77 + Array::kHeaderSize));
78 masm->add(object, Operand(scratch));
79
80
81 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
82 // to limit code size. We should probably evaluate this decision by
83 // measuring the performance of an equivalent implementation using
84 // "simpler" instructions
85 masm->bind(&fast);
86 masm->bts(Operand(object, 0), addr);
87}
88
89
90class RecordWriteStub : public CodeStub {
91 public:
92 RecordWriteStub(Register object, Register addr, Register scratch)
93 : object_(object), addr_(addr), scratch_(scratch) { }
94
95 void Generate(MacroAssembler* masm);
96
97 private:
98 Register object_;
99 Register addr_;
100 Register scratch_;
101
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000102#ifdef DEBUG
103 void Print() {
104 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
105 object_.code(), addr_.code(), scratch_.code());
106 }
107#endif
108
109 // Minor key encoding in 12 bits of three registers (object, address and
110 // scratch) OOOOAAAASSSS.
111 class ScratchBits: public BitField<uint32_t, 0, 4> {};
112 class AddressBits: public BitField<uint32_t, 4, 4> {};
113 class ObjectBits: public BitField<uint32_t, 8, 4> {
114};
115
116 Major MajorKey() { return RecordWrite; }
117
118 int MinorKey() {
119 // Encode the registers.
120 return ObjectBits::encode(object_.code()) |
121 AddressBits::encode(addr_.code()) |
122 ScratchBits::encode(scratch_.code());
123 }
124};
125
126
127void RecordWriteStub::Generate(MacroAssembler* masm) {
128 RecordWriteHelper(masm, object_, addr_, scratch_);
129 masm->ret(0);
130}
131
132
133// Set the remembered set bit for [object+offset].
134// object is the object being stored into, value is the object being stored.
135// If offset is zero, then the scratch register contains the array index into
136// the elements array represented as a Smi.
137// All registers are clobbered by the operation.
138void MacroAssembler::RecordWrite(Register object, int offset,
139 Register value, Register scratch) {
140 // First, check if a remembered set write is even needed. The tests below
141 // catch stores of Smis and stores into young gen (which does not have space
142 // for the remembered set bits.
143 Label done;
144
145 // This optimization cannot survive serialization and deserialization,
146 // so we disable as long as serialization can take place.
147 int32_t new_space_start =
148 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
149 if (Serializer::enabled() || new_space_start < 0) {
150 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
151 // Check for Smi first.
152 test(value, Immediate(kSmiTagMask));
153 j(zero, &done);
154 // Test that the object address is not in the new space. We cannot
155 // set remembered set bits in the new space.
156 mov(value, Operand(object));
157 and_(value, Heap::NewSpaceMask());
158 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
159 j(equal, &done);
160 } else {
161 // move the value SmiTag into the sign bit
162 shl(value, 31);
163 // combine the object with value SmiTag
164 or_(value, Operand(object));
165 // remove the uninteresing bits inside the page
166 and_(value, Heap::NewSpaceMask() | (1 << 31));
167 // xor has two effects:
168 // - if the value was a smi, then the result will be negative
169 // - if the object is pointing into new space area the page bits will
170 // all be zero
171 xor_(value, new_space_start | (1 << 31));
172 // Check for both conditions in one branch
173 j(less_equal, &done);
174 }
175
176 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
177 // Compute the bit offset in the remembered set, leave it in 'value'.
178 mov(value, Operand(object));
179 and_(value, Page::kPageAlignmentMask);
180 add(Operand(value), Immediate(offset));
181 shr(value, kObjectAlignmentBits);
182
183 // Compute the page address from the heap object pointer, leave it in
184 // 'object'.
185 and_(object, ~Page::kPageAlignmentMask);
186
187 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
188 // to limit code size. We should probably evaluate this decision by
189 // measuring the performance of an equivalent implementation using
190 // "simpler" instructions
191 bts(Operand(object, 0), value);
192 } else {
193 Register dst = scratch;
194 if (offset != 0) {
195 lea(dst, Operand(object, offset));
196 } else {
197 // array access: calculate the destination address in the same manner as
198 // KeyedStoreIC::GenerateGeneric
199 lea(dst,
200 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
201 }
202 // If we are already generating a shared stub, not inlining the
203 // record write code isn't going to save us any memory.
204 if (generating_stub()) {
205 RecordWriteHelper(this, object, dst, value);
206 } else {
207 RecordWriteStub stub(object, dst, value);
208 CallStub(&stub);
209 }
210 }
211
212 bind(&done);
213}
214
215
216void MacroAssembler::SaveRegistersToMemory(RegList regs) {
217 ASSERT((regs & ~kJSCallerSaved) == 0);
218 // Copy the content of registers to memory location.
219 for (int i = 0; i < kNumJSCallerSaved; i++) {
220 int r = JSCallerSavedCode(i);
221 if ((regs & (1 << r)) != 0) {
222 Register reg = { r };
223 ExternalReference reg_addr =
224 ExternalReference(Debug_Address::Register(i));
225 mov(Operand::StaticVariable(reg_addr), reg);
226 }
227 }
228}
229
230
231void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
232 ASSERT((regs & ~kJSCallerSaved) == 0);
233 // Copy the content of memory location to registers.
234 for (int i = kNumJSCallerSaved; --i >= 0;) {
235 int r = JSCallerSavedCode(i);
236 if ((regs & (1 << r)) != 0) {
237 Register reg = { r };
238 ExternalReference reg_addr =
239 ExternalReference(Debug_Address::Register(i));
240 mov(reg, Operand::StaticVariable(reg_addr));
241 }
242 }
243}
244
245
246void MacroAssembler::PushRegistersFromMemory(RegList regs) {
247 ASSERT((regs & ~kJSCallerSaved) == 0);
248 // Push the content of the memory location to the stack.
249 for (int i = 0; i < kNumJSCallerSaved; i++) {
250 int r = JSCallerSavedCode(i);
251 if ((regs & (1 << r)) != 0) {
252 ExternalReference reg_addr =
253 ExternalReference(Debug_Address::Register(i));
254 push(Operand::StaticVariable(reg_addr));
255 }
256 }
257}
258
259
260void MacroAssembler::PopRegistersToMemory(RegList regs) {
261 ASSERT((regs & ~kJSCallerSaved) == 0);
262 // Pop the content from the stack to the memory location.
263 for (int i = kNumJSCallerSaved; --i >= 0;) {
264 int r = JSCallerSavedCode(i);
265 if ((regs & (1 << r)) != 0) {
266 ExternalReference reg_addr =
267 ExternalReference(Debug_Address::Register(i));
268 pop(Operand::StaticVariable(reg_addr));
269 }
270 }
271}
272
273
274void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
275 Register scratch,
276 RegList regs) {
277 ASSERT((regs & ~kJSCallerSaved) == 0);
278 // Copy the content of the stack to the memory location and adjust base.
279 for (int i = kNumJSCallerSaved; --i >= 0;) {
280 int r = JSCallerSavedCode(i);
281 if ((regs & (1 << r)) != 0) {
282 mov(scratch, Operand(base, 0));
283 ExternalReference reg_addr =
284 ExternalReference(Debug_Address::Register(i));
285 mov(Operand::StaticVariable(reg_addr), scratch);
286 lea(base, Operand(base, kPointerSize));
287 }
288 }
289}
290
291
292void MacroAssembler::Set(Register dst, const Immediate& x) {
293 if (x.is_zero()) {
294 xor_(dst, Operand(dst)); // shorter than mov
295 } else {
296 mov(Operand(dst), x);
297 }
298}
299
300
301void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
302 mov(dst, x);
303}
304
305
306void MacroAssembler::FCmp() {
307 fcompp();
308 push(eax);
309 fnstsw_ax();
310 sahf();
311 pop(eax);
312}
313
314
ager@chromium.org7c537e22008-10-16 08:43:32 +0000315void MacroAssembler::EnterFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000316 push(ebp);
317 mov(ebp, Operand(esp));
318 push(esi);
319 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000320 push(Immediate(0)); // Push an empty code cache slot.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000321}
322
323
ager@chromium.org7c537e22008-10-16 08:43:32 +0000324void MacroAssembler::LeaveFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000325 if (FLAG_debug_code) {
326 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
327 Immediate(Smi::FromInt(type)));
328 Check(equal, "stack frame types must match");
329 }
330 leave();
331}
332
333
ager@chromium.org236ad962008-09-25 09:45:57 +0000334void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
335 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
336
337 // Setup the frame structure on the stack.
338 ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
339 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
340 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
341 push(ebp);
342 mov(ebp, Operand(esp));
343
344 // Reserve room for entry stack pointer and push the debug marker.
345 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
346 push(Immediate(0)); // saved entry sp, patched before call
347 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
348
349 // Save the frame pointer and the context in top.
350 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
351 ExternalReference context_address(Top::k_context_address);
352 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
353 mov(Operand::StaticVariable(context_address), esi);
354
355 // Setup argc and argv in callee-saved registers.
356 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
357 mov(edi, Operand(eax));
358 lea(esi, Operand(ebp, eax, times_4, offset));
359
360 // Save the state of all registers to the stack from the memory
361 // location. This is needed to allow nested break points.
362 if (type == StackFrame::EXIT_DEBUG) {
363 // TODO(1243899): This should be symmetric to
364 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
365 // correct here, but computed for the other call. Very error
366 // prone! FIX THIS. Actually there are deeper problems with
367 // register saving than this asymmetry (see the bug report
368 // associated with this issue).
369 PushRegistersFromMemory(kJSCallerSaved);
370 }
371
372 // Reserve space for two arguments: argc and argv.
373 sub(Operand(esp), Immediate(2 * kPointerSize));
374
375 // Get the required frame alignment for the OS.
376 static const int kFrameAlignment = OS::ActivationFrameAlignment();
377 if (kFrameAlignment > 0) {
378 ASSERT(IsPowerOf2(kFrameAlignment));
379 and_(esp, -kFrameAlignment);
380 }
381
382 // Patch the saved entry sp.
383 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
384}
385
386
387void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
388 // Restore the memory copy of the registers by digging them out from
389 // the stack. This is needed to allow nested break points.
390 if (type == StackFrame::EXIT_DEBUG) {
391 // It's okay to clobber register ebx below because we don't need
392 // the function pointer after this.
393 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
394 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
395 lea(ebx, Operand(ebp, kOffset));
396 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
397 }
398
399 // Get the return address from the stack and restore the frame pointer.
400 mov(ecx, Operand(ebp, 1 * kPointerSize));
401 mov(ebp, Operand(ebp, 0 * kPointerSize));
402
403 // Pop the arguments and the receiver from the caller stack.
404 lea(esp, Operand(esi, 1 * kPointerSize));
405
406 // Restore current context from top and clear it in debug mode.
407 ExternalReference context_address(Top::k_context_address);
408 mov(esi, Operand::StaticVariable(context_address));
409 if (kDebug) {
410 mov(Operand::StaticVariable(context_address), Immediate(0));
411 }
412
413 // Push the return address to get ready to return.
414 push(ecx);
415
416 // Clear the top frame.
417 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
418 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
419}
420
421
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000422void MacroAssembler::PushTryHandler(CodeLocation try_location,
423 HandlerType type) {
424 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
425 // The pc (return address) is already on TOS.
426 if (try_location == IN_JAVASCRIPT) {
427 if (type == TRY_CATCH_HANDLER) {
428 push(Immediate(StackHandler::TRY_CATCH));
429 } else {
430 push(Immediate(StackHandler::TRY_FINALLY));
431 }
432 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
433 push(ebp);
434 push(edi);
435 } else {
436 ASSERT(try_location == IN_JS_ENTRY);
437 // The parameter pointer is meaningless here and ebp does not
438 // point to a JS frame. So we save NULL for both pp and ebp. We
439 // expect the code throwing an exception to check ebp before
440 // dereferencing it to restore the context.
441 push(Immediate(StackHandler::ENTRY));
442 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
443 push(Immediate(0)); // NULL frame pointer
444 push(Immediate(0)); // NULL parameter pointer
445 }
446 // Cached TOS.
447 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
448 // Link this handler.
449 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
450}
451
452
453Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
454 JSObject* holder, Register holder_reg,
455 Register scratch,
456 Label* miss) {
457 // Make sure there's no overlap between scratch and the other
458 // registers.
459 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
460
461 // Keep track of the current object in register reg.
462 Register reg = object_reg;
463 int depth = 1;
464
465 // Check the maps in the prototype chain.
466 // Traverse the prototype chain from the object and do map checks.
467 while (object != holder) {
468 depth++;
469
470 // Only global objects and objects that do not require access
471 // checks are allowed in stubs.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000472 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000473
474 JSObject* prototype = JSObject::cast(object->GetPrototype());
475 if (Heap::InNewSpace(prototype)) {
476 // Get the map of the current object.
477 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
478 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
479 // Branch on the result of the map check.
480 j(not_equal, miss, not_taken);
481 // Check access rights to the global object. This has to happen
482 // after the map check so that we know that the object is
483 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000484 if (object->IsJSGlobalProxy()) {
485 CheckAccessGlobalProxy(reg, scratch, miss);
486
487 // Restore scratch register to be the map of the object.
488 // We load the prototype from the map in the scratch register.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000489 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
490 }
491 // The prototype is in new space; we cannot store a reference
492 // to it in the code. Load it from the map.
493 reg = holder_reg; // from now the object is in holder_reg
494 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000495
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000496 } else {
497 // Check the map of the current object.
498 cmp(FieldOperand(reg, HeapObject::kMapOffset),
499 Immediate(Handle<Map>(object->map())));
500 // Branch on the result of the map check.
501 j(not_equal, miss, not_taken);
502 // Check access rights to the global object. This has to happen
503 // after the map check so that we know that the object is
504 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000505 if (object->IsJSGlobalProxy()) {
506 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000507 }
508 // The prototype is in old space; load it directly.
509 reg = holder_reg; // from now the object is in holder_reg
510 mov(reg, Handle<JSObject>(prototype));
511 }
512
513 // Go to the next object in the prototype chain.
514 object = prototype;
515 }
516
517 // Check the holder map.
518 cmp(FieldOperand(reg, HeapObject::kMapOffset),
519 Immediate(Handle<Map>(holder->map())));
520 j(not_equal, miss, not_taken);
521
522 // Log the check depth.
523 LOG(IntEvent("check-maps-depth", depth));
524
525 // Perform security check for access to the global object and return
526 // the holder register.
527 ASSERT(object == holder);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000528 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
529 if (object->IsJSGlobalProxy()) {
530 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000531 }
532 return reg;
533}
534
535
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000536void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
537 Register scratch,
538 Label* miss) {
539 Label same_contexts;
540
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000541 ASSERT(!holder_reg.is(scratch));
542
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000543 // Load current lexical context from the stack frame.
544 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
545
546 // When generating debug code, make sure the lexical context is set.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000547 if (FLAG_debug_code) {
548 cmp(Operand(scratch), Immediate(0));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000549 Check(not_equal, "we should not have an empty lexical context");
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000550 }
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000551 // Load the global context of the current context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000552 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
553 mov(scratch, FieldOperand(scratch, offset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000554 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
555
556 // Check the context is a global context.
557 if (FLAG_debug_code) {
558 push(scratch);
559 // Read the first word and compare to global_context_map.
560 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
561 cmp(scratch, Factory::global_context_map());
562 Check(equal, "JSGlobalObject::global_context should be a global context.");
563 pop(scratch);
564 }
565
566 // Check if both contexts are the same.
567 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
568 j(equal, &same_contexts, taken);
569
570 // Compare security tokens, save holder_reg on the stack so we can use it
571 // as a temporary register.
572 //
573 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
574 push(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000575 // Check that the security token in the calling global object is
576 // compatible with the security token in the receiving global
577 // object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000578 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
579
580 // Check the context is a global context.
581 if (FLAG_debug_code) {
582 cmp(holder_reg, Factory::null_value());
583 Check(not_equal, "JSGlobalProxy::context() should not be null.");
584
585 push(holder_reg);
586 // Read the first word and compare to global_context_map(),
587 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
588 cmp(holder_reg, Factory::global_context_map());
589 Check(equal, "JSGlobalObject::global_context should be a global context.");
590 pop(holder_reg);
591 }
592
593 int token_offset = Context::kHeaderSize +
594 Context::SECURITY_TOKEN_INDEX * kPointerSize;
595 mov(scratch, FieldOperand(scratch, token_offset));
596 cmp(scratch, FieldOperand(holder_reg, token_offset));
597 pop(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000598 j(not_equal, miss, not_taken);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000599
600 bind(&same_contexts);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000601}
602
603
604void MacroAssembler::NegativeZeroTest(Register result,
605 Register op,
606 Label* then_label) {
607 Label ok;
608 test(result, Operand(result));
609 j(not_zero, &ok, taken);
610 test(op, Operand(op));
611 j(sign, then_label, not_taken);
612 bind(&ok);
613}
614
615
616void MacroAssembler::NegativeZeroTest(Register result,
617 Register op1,
618 Register op2,
619 Register scratch,
620 Label* then_label) {
621 Label ok;
622 test(result, Operand(result));
623 j(not_zero, &ok, taken);
624 mov(scratch, Operand(op1));
625 or_(scratch, Operand(op2));
626 j(sign, then_label, not_taken);
627 bind(&ok);
628}
629
630
ager@chromium.org7c537e22008-10-16 08:43:32 +0000631void MacroAssembler::TryGetFunctionPrototype(Register function,
632 Register result,
633 Register scratch,
634 Label* miss) {
635 // Check that the receiver isn't a smi.
636 test(function, Immediate(kSmiTagMask));
637 j(zero, miss, not_taken);
638
639 // Check that the function really is a function.
640 mov(result, FieldOperand(function, HeapObject::kMapOffset));
641 movzx_b(scratch, FieldOperand(result, Map::kInstanceTypeOffset));
642 cmp(scratch, JS_FUNCTION_TYPE);
643 j(not_equal, miss, not_taken);
644
645 // Make sure that the function has an instance prototype.
646 Label non_instance;
647 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
648 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
649 j(not_zero, &non_instance, not_taken);
650
651 // Get the prototype or initial map from the function.
652 mov(result,
653 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
654
655 // If the prototype or initial map is the hole, don't return it and
656 // simply miss the cache instead. This will allow us to allocate a
657 // prototype object on-demand in the runtime system.
658 cmp(Operand(result), Immediate(Factory::the_hole_value()));
659 j(equal, miss, not_taken);
660
661 // If the function does not have an initial map, we're done.
662 Label done;
663 mov(scratch, FieldOperand(result, HeapObject::kMapOffset));
664 movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
665 cmp(scratch, MAP_TYPE);
666 j(not_equal, &done);
667
668 // Get the prototype from the initial map.
669 mov(result, FieldOperand(result, Map::kPrototypeOffset));
670 jmp(&done);
671
672 // Non-instance prototype: Fetch prototype from constructor field
673 // in initial map.
674 bind(&non_instance);
675 mov(result, FieldOperand(result, Map::kConstructorOffset));
676
677 // All done.
678 bind(&done);
679}
680
681
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000682void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000683 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000684 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000685}
686
687
688void MacroAssembler::StubReturn(int argc) {
689 ASSERT(argc >= 1 && generating_stub());
690 ret((argc - 1) * kPointerSize);
691}
692
693
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000694void MacroAssembler::IllegalOperation(int num_arguments) {
695 if (num_arguments > 0) {
696 add(Operand(esp), Immediate(num_arguments * kPointerSize));
697 }
698 mov(Operand(eax), Immediate(Factory::undefined_value()));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000699}
700
701
702void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
703 CallRuntime(Runtime::FunctionForId(id), num_arguments);
704}
705
706
707void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000708 // If the expected number of arguments of the runtime function is
709 // constant, we check that the actual number of arguments match the
710 // expectation.
711 if (f->nargs >= 0 && f->nargs != num_arguments) {
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000712 IllegalOperation(num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000713 return;
714 }
715
mads.s.ager31e71382008-08-13 09:32:07 +0000716 Runtime::FunctionId function_id =
717 static_cast<Runtime::FunctionId>(f->stub_id);
718 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000719 CallStub(&stub);
720}
721
722
mads.s.ager31e71382008-08-13 09:32:07 +0000723void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
724 int num_arguments) {
725 // TODO(1236192): Most runtime routines don't need the number of
726 // arguments passed in because it is constant. At some point we
727 // should remove this need and make the runtime routine entry code
728 // smarter.
729 mov(Operand(eax), Immediate(num_arguments));
730 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000731}
732
733
734void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
735 // Set the entry point and jump to the C entry runtime stub.
736 mov(Operand(ebx), Immediate(ext));
737 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000738 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000739}
740
741
742void MacroAssembler::InvokePrologue(const ParameterCount& expected,
743 const ParameterCount& actual,
744 Handle<Code> code_constant,
745 const Operand& code_operand,
746 Label* done,
747 InvokeFlag flag) {
748 bool definitely_matches = false;
749 Label invoke;
750 if (expected.is_immediate()) {
751 ASSERT(actual.is_immediate());
752 if (expected.immediate() == actual.immediate()) {
753 definitely_matches = true;
754 } else {
755 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000756 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
757 if (expected.immediate() == sentinel) {
758 // Don't worry about adapting arguments for builtins that
759 // don't want that done. Skip adaption code by making it look
760 // like we have a match between expected and actual number of
761 // arguments.
762 definitely_matches = true;
763 } else {
764 mov(ebx, expected.immediate());
765 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000766 }
767 } else {
768 if (actual.is_immediate()) {
769 // Expected is in register, actual is immediate. This is the
770 // case when we invoke function values without going through the
771 // IC mechanism.
772 cmp(expected.reg(), actual.immediate());
773 j(equal, &invoke);
774 ASSERT(expected.reg().is(ebx));
775 mov(eax, actual.immediate());
776 } else if (!expected.reg().is(actual.reg())) {
777 // Both expected and actual are in (different) registers. This
778 // is the case when we invoke functions using call and apply.
779 cmp(expected.reg(), Operand(actual.reg()));
780 j(equal, &invoke);
781 ASSERT(actual.reg().is(eax));
782 ASSERT(expected.reg().is(ebx));
783 }
784 }
785
786 if (!definitely_matches) {
787 Handle<Code> adaptor =
788 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
789 if (!code_constant.is_null()) {
790 mov(Operand(edx), Immediate(code_constant));
791 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
792 } else if (!code_operand.is_reg(edx)) {
793 mov(edx, code_operand);
794 }
795
796 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000797 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000798 jmp(done);
799 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000800 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000801 }
802 bind(&invoke);
803 }
804}
805
806
807void MacroAssembler::InvokeCode(const Operand& code,
808 const ParameterCount& expected,
809 const ParameterCount& actual,
810 InvokeFlag flag) {
811 Label done;
812 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
813 if (flag == CALL_FUNCTION) {
814 call(code);
815 } else {
816 ASSERT(flag == JUMP_FUNCTION);
817 jmp(code);
818 }
819 bind(&done);
820}
821
822
823void MacroAssembler::InvokeCode(Handle<Code> code,
824 const ParameterCount& expected,
825 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000826 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000827 InvokeFlag flag) {
828 Label done;
829 Operand dummy(eax);
830 InvokePrologue(expected, actual, code, dummy, &done, flag);
831 if (flag == CALL_FUNCTION) {
832 call(code, rmode);
833 } else {
834 ASSERT(flag == JUMP_FUNCTION);
835 jmp(code, rmode);
836 }
837 bind(&done);
838}
839
840
841void MacroAssembler::InvokeFunction(Register fun,
842 const ParameterCount& actual,
843 InvokeFlag flag) {
844 ASSERT(fun.is(edi));
845 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
846 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
847 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
848 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
849 lea(edx, FieldOperand(edx, Code::kHeaderSize));
850
851 ParameterCount expected(ebx);
852 InvokeCode(Operand(edx), expected, actual, flag);
853}
854
855
856void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
857 bool resolved;
858 Handle<Code> code = ResolveBuiltin(id, &resolved);
859
kasper.lund7276f142008-07-30 08:49:36 +0000860 // Calls are not allowed in some stubs.
861 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000862
863 // Rely on the assertion to check that the number of provided
864 // arguments match the expected number of arguments. Fake a
865 // parameter count to avoid emitting code to do the check.
866 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000867 InvokeCode(Handle<Code>(code), expected, expected,
868 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000869
870 const char* name = Builtins::GetName(id);
871 int argc = Builtins::GetArgumentsCount(id);
872
873 if (!resolved) {
874 uint32_t flags =
875 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
876 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
877 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
878 unresolved_.Add(entry);
879 }
880}
881
882
883void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
884 bool resolved;
885 Handle<Code> code = ResolveBuiltin(id, &resolved);
886
887 const char* name = Builtins::GetName(id);
888 int argc = Builtins::GetArgumentsCount(id);
889
890 mov(Operand(target), Immediate(code));
891 if (!resolved) {
892 uint32_t flags =
893 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
894 Bootstrapper::FixupFlagsIsPCRelative::encode(false);
895 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
896 unresolved_.Add(entry);
897 }
898 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
899}
900
901
902Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
903 bool* resolved) {
904 // Move the builtin function into the temporary function slot by
905 // reading it from the builtins object. NOTE: We should be able to
906 // reduce this to two instructions by putting the function table in
907 // the global object instead of the "builtins" object and by using a
908 // real register for the function.
909 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
910 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
911 int builtins_offset =
912 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
913 mov(edi, FieldOperand(edx, builtins_offset));
914
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000915
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000916 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000917}
918
919
920void MacroAssembler::Ret() {
921 ret(0);
922}
923
924
925void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
926 if (FLAG_native_code_counters && counter->Enabled()) {
927 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
928 }
929}
930
931
932void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
933 ASSERT(value > 0);
934 if (FLAG_native_code_counters && counter->Enabled()) {
935 Operand operand = Operand::StaticVariable(ExternalReference(counter));
936 if (value == 1) {
937 inc(operand);
938 } else {
939 add(operand, Immediate(value));
940 }
941 }
942}
943
944
945void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
946 ASSERT(value > 0);
947 if (FLAG_native_code_counters && counter->Enabled()) {
948 Operand operand = Operand::StaticVariable(ExternalReference(counter));
949 if (value == 1) {
950 dec(operand);
951 } else {
952 sub(operand, Immediate(value));
953 }
954 }
955}
956
957
958void MacroAssembler::Assert(Condition cc, const char* msg) {
959 if (FLAG_debug_code) Check(cc, msg);
960}
961
962
963void MacroAssembler::Check(Condition cc, const char* msg) {
964 Label L;
965 j(cc, &L, taken);
966 Abort(msg);
967 // will not return here
968 bind(&L);
969}
970
971
972void MacroAssembler::Abort(const char* msg) {
973 // We want to pass the msg string like a smi to avoid GC
974 // problems, however msg is not guaranteed to be aligned
975 // properly. Instead, we pass an aligned pointer that is
976 // a proper v8 smi, but also pass the aligment difference
977 // from the real pointer as a smi.
978 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
979 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
980 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
981#ifdef DEBUG
982 if (msg != NULL) {
983 RecordComment("Abort message: ");
984 RecordComment(msg);
985 }
986#endif
987 push(eax);
988 push(Immediate(p0));
989 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
990 CallRuntime(Runtime::kAbort, 2);
991 // will not return here
992}
993
994
995CodePatcher::CodePatcher(byte* address, int size)
996 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
997 // Create a new macro assembler pointing to the assress of the code to patch.
998 // The size is adjusted with kGap on order for the assembler to generate size
999 // bytes of instructions without failing with buffer size constraints.
1000 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1001}
1002
1003
1004CodePatcher::~CodePatcher() {
1005 // Indicate that code has changed.
1006 CPU::FlushICache(address_, size_);
1007
1008 // Check that the code was patched as expected.
1009 ASSERT(masm_.pc_ == address_ + size_);
1010 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1011}
1012
1013
1014} } // namespace v8::internal