blob: 506f890d337a9ee5b1b57b001e4db6547c8e92c6 [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 { namespace internal {
37
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038MacroAssembler::MacroAssembler(void* buffer, int size)
39 : Assembler(buffer, size),
40 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000041 generating_stub_(false),
kasperl@chromium.org061ef742009-02-27 12:16:20 +000042 allow_stub_calls_(true),
43 code_object_(Heap::undefined_value()) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000044}
45
46
47static void RecordWriteHelper(MacroAssembler* masm,
48 Register object,
49 Register addr,
50 Register scratch) {
51 Label fast;
52
53 // Compute the page address from the heap object pointer, leave it
54 // in 'object'.
55 masm->and_(object, ~Page::kPageAlignmentMask);
56
57 // Compute the bit addr in the remembered set, leave it in "addr".
58 masm->sub(addr, Operand(object));
59 masm->shr(addr, kObjectAlignmentBits);
60
61 // If the bit offset lies beyond the normal remembered set range, it is in
62 // the extra remembered set area of a large object.
63 masm->cmp(addr, Page::kPageSize / kPointerSize);
64 masm->j(less, &fast);
65
66 // Adjust 'addr' to be relative to the start of the extra remembered set
67 // and the page address in 'object' to be the address of the extra
68 // remembered set.
69 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
70 // Load the array length into 'scratch' and multiply by four to get the
71 // size in bytes of the elements.
72 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
73 + FixedArray::kLengthOffset));
74 masm->shl(scratch, kObjectAlignmentBits);
75 // Add the page header, array header, and array body size to the page
76 // address.
77 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
78 + Array::kHeaderSize));
79 masm->add(object, Operand(scratch));
80
81
82 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
83 // to limit code size. We should probably evaluate this decision by
84 // measuring the performance of an equivalent implementation using
85 // "simpler" instructions
86 masm->bind(&fast);
87 masm->bts(Operand(object, 0), addr);
88}
89
90
91class RecordWriteStub : public CodeStub {
92 public:
93 RecordWriteStub(Register object, Register addr, Register scratch)
94 : object_(object), addr_(addr), scratch_(scratch) { }
95
96 void Generate(MacroAssembler* masm);
97
98 private:
99 Register object_;
100 Register addr_;
101 Register scratch_;
102
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000103#ifdef DEBUG
104 void Print() {
105 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
106 object_.code(), addr_.code(), scratch_.code());
107 }
108#endif
109
110 // Minor key encoding in 12 bits of three registers (object, address and
111 // scratch) OOOOAAAASSSS.
112 class ScratchBits: public BitField<uint32_t, 0, 4> {};
113 class AddressBits: public BitField<uint32_t, 4, 4> {};
114 class ObjectBits: public BitField<uint32_t, 8, 4> {
115};
116
117 Major MajorKey() { return RecordWrite; }
118
119 int MinorKey() {
120 // Encode the registers.
121 return ObjectBits::encode(object_.code()) |
122 AddressBits::encode(addr_.code()) |
123 ScratchBits::encode(scratch_.code());
124 }
125};
126
127
128void RecordWriteStub::Generate(MacroAssembler* masm) {
129 RecordWriteHelper(masm, object_, addr_, scratch_);
130 masm->ret(0);
131}
132
133
134// Set the remembered set bit for [object+offset].
135// object is the object being stored into, value is the object being stored.
136// If offset is zero, then the scratch register contains the array index into
137// the elements array represented as a Smi.
138// All registers are clobbered by the operation.
139void MacroAssembler::RecordWrite(Register object, int offset,
140 Register value, Register scratch) {
141 // First, check if a remembered set write is even needed. The tests below
142 // catch stores of Smis and stores into young gen (which does not have space
143 // for the remembered set bits.
144 Label done;
145
146 // This optimization cannot survive serialization and deserialization,
147 // so we disable as long as serialization can take place.
148 int32_t new_space_start =
149 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
150 if (Serializer::enabled() || new_space_start < 0) {
151 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
152 // Check for Smi first.
153 test(value, Immediate(kSmiTagMask));
154 j(zero, &done);
155 // Test that the object address is not in the new space. We cannot
156 // set remembered set bits in the new space.
157 mov(value, Operand(object));
158 and_(value, Heap::NewSpaceMask());
159 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
160 j(equal, &done);
161 } else {
162 // move the value SmiTag into the sign bit
163 shl(value, 31);
164 // combine the object with value SmiTag
165 or_(value, Operand(object));
166 // remove the uninteresing bits inside the page
167 and_(value, Heap::NewSpaceMask() | (1 << 31));
168 // xor has two effects:
169 // - if the value was a smi, then the result will be negative
170 // - if the object is pointing into new space area the page bits will
171 // all be zero
172 xor_(value, new_space_start | (1 << 31));
173 // Check for both conditions in one branch
174 j(less_equal, &done);
175 }
176
177 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
178 // Compute the bit offset in the remembered set, leave it in 'value'.
179 mov(value, Operand(object));
180 and_(value, Page::kPageAlignmentMask);
181 add(Operand(value), Immediate(offset));
182 shr(value, kObjectAlignmentBits);
183
184 // Compute the page address from the heap object pointer, leave it in
185 // 'object'.
186 and_(object, ~Page::kPageAlignmentMask);
187
188 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
189 // to limit code size. We should probably evaluate this decision by
190 // measuring the performance of an equivalent implementation using
191 // "simpler" instructions
192 bts(Operand(object, 0), value);
193 } else {
194 Register dst = scratch;
195 if (offset != 0) {
196 lea(dst, Operand(object, offset));
197 } else {
198 // array access: calculate the destination address in the same manner as
199 // KeyedStoreIC::GenerateGeneric
200 lea(dst,
201 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
202 }
203 // If we are already generating a shared stub, not inlining the
204 // record write code isn't going to save us any memory.
205 if (generating_stub()) {
206 RecordWriteHelper(this, object, dst, value);
207 } else {
208 RecordWriteStub stub(object, dst, value);
209 CallStub(&stub);
210 }
211 }
212
213 bind(&done);
214}
215
216
217void MacroAssembler::SaveRegistersToMemory(RegList regs) {
218 ASSERT((regs & ~kJSCallerSaved) == 0);
219 // Copy the content of registers to memory location.
220 for (int i = 0; i < kNumJSCallerSaved; i++) {
221 int r = JSCallerSavedCode(i);
222 if ((regs & (1 << r)) != 0) {
223 Register reg = { r };
224 ExternalReference reg_addr =
225 ExternalReference(Debug_Address::Register(i));
226 mov(Operand::StaticVariable(reg_addr), reg);
227 }
228 }
229}
230
231
232void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
233 ASSERT((regs & ~kJSCallerSaved) == 0);
234 // Copy the content of memory location to registers.
235 for (int i = kNumJSCallerSaved; --i >= 0;) {
236 int r = JSCallerSavedCode(i);
237 if ((regs & (1 << r)) != 0) {
238 Register reg = { r };
239 ExternalReference reg_addr =
240 ExternalReference(Debug_Address::Register(i));
241 mov(reg, Operand::StaticVariable(reg_addr));
242 }
243 }
244}
245
246
247void MacroAssembler::PushRegistersFromMemory(RegList regs) {
248 ASSERT((regs & ~kJSCallerSaved) == 0);
249 // Push the content of the memory location to the stack.
250 for (int i = 0; i < kNumJSCallerSaved; i++) {
251 int r = JSCallerSavedCode(i);
252 if ((regs & (1 << r)) != 0) {
253 ExternalReference reg_addr =
254 ExternalReference(Debug_Address::Register(i));
255 push(Operand::StaticVariable(reg_addr));
256 }
257 }
258}
259
260
261void MacroAssembler::PopRegistersToMemory(RegList regs) {
262 ASSERT((regs & ~kJSCallerSaved) == 0);
263 // Pop the content from the stack to the memory location.
264 for (int i = kNumJSCallerSaved; --i >= 0;) {
265 int r = JSCallerSavedCode(i);
266 if ((regs & (1 << r)) != 0) {
267 ExternalReference reg_addr =
268 ExternalReference(Debug_Address::Register(i));
269 pop(Operand::StaticVariable(reg_addr));
270 }
271 }
272}
273
274
275void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
276 Register scratch,
277 RegList regs) {
278 ASSERT((regs & ~kJSCallerSaved) == 0);
279 // Copy the content of the stack to the memory location and adjust base.
280 for (int i = kNumJSCallerSaved; --i >= 0;) {
281 int r = JSCallerSavedCode(i);
282 if ((regs & (1 << r)) != 0) {
283 mov(scratch, Operand(base, 0));
284 ExternalReference reg_addr =
285 ExternalReference(Debug_Address::Register(i));
286 mov(Operand::StaticVariable(reg_addr), scratch);
287 lea(base, Operand(base, kPointerSize));
288 }
289 }
290}
291
292
293void MacroAssembler::Set(Register dst, const Immediate& x) {
294 if (x.is_zero()) {
295 xor_(dst, Operand(dst)); // shorter than mov
296 } else {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000297 mov(dst, x);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000298 }
299}
300
301
302void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
303 mov(dst, x);
304}
305
306
307void MacroAssembler::FCmp() {
308 fcompp();
309 push(eax);
310 fnstsw_ax();
311 sahf();
312 pop(eax);
313}
314
315
ager@chromium.org7c537e22008-10-16 08:43:32 +0000316void MacroAssembler::EnterFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000317 push(ebp);
318 mov(ebp, Operand(esp));
319 push(esi);
320 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.org061ef742009-02-27 12:16:20 +0000321 push(Immediate(CodeObject()));
322 if (FLAG_debug_code) {
323 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
324 Check(not_equal, "code object not properly patched");
325 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000326}
327
328
ager@chromium.org7c537e22008-10-16 08:43:32 +0000329void MacroAssembler::LeaveFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000330 if (FLAG_debug_code) {
331 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
332 Immediate(Smi::FromInt(type)));
333 Check(equal, "stack frame types must match");
334 }
335 leave();
336}
337
338
ager@chromium.org236ad962008-09-25 09:45:57 +0000339void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
340 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
341
342 // Setup the frame structure on the stack.
343 ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
344 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
345 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
346 push(ebp);
347 mov(ebp, Operand(esp));
348
349 // Reserve room for entry stack pointer and push the debug marker.
350 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
351 push(Immediate(0)); // saved entry sp, patched before call
352 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
353
354 // Save the frame pointer and the context in top.
355 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
356 ExternalReference context_address(Top::k_context_address);
357 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
358 mov(Operand::StaticVariable(context_address), esi);
359
360 // Setup argc and argv in callee-saved registers.
361 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
362 mov(edi, Operand(eax));
363 lea(esi, Operand(ebp, eax, times_4, offset));
364
365 // Save the state of all registers to the stack from the memory
366 // location. This is needed to allow nested break points.
367 if (type == StackFrame::EXIT_DEBUG) {
368 // TODO(1243899): This should be symmetric to
369 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
370 // correct here, but computed for the other call. Very error
371 // prone! FIX THIS. Actually there are deeper problems with
372 // register saving than this asymmetry (see the bug report
373 // associated with this issue).
374 PushRegistersFromMemory(kJSCallerSaved);
375 }
376
377 // Reserve space for two arguments: argc and argv.
378 sub(Operand(esp), Immediate(2 * kPointerSize));
379
380 // Get the required frame alignment for the OS.
381 static const int kFrameAlignment = OS::ActivationFrameAlignment();
382 if (kFrameAlignment > 0) {
383 ASSERT(IsPowerOf2(kFrameAlignment));
384 and_(esp, -kFrameAlignment);
385 }
386
387 // Patch the saved entry sp.
388 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
389}
390
391
392void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
393 // Restore the memory copy of the registers by digging them out from
394 // the stack. This is needed to allow nested break points.
395 if (type == StackFrame::EXIT_DEBUG) {
396 // It's okay to clobber register ebx below because we don't need
397 // the function pointer after this.
398 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
399 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
400 lea(ebx, Operand(ebp, kOffset));
401 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
402 }
403
404 // Get the return address from the stack and restore the frame pointer.
405 mov(ecx, Operand(ebp, 1 * kPointerSize));
406 mov(ebp, Operand(ebp, 0 * kPointerSize));
407
408 // Pop the arguments and the receiver from the caller stack.
409 lea(esp, Operand(esi, 1 * kPointerSize));
410
411 // Restore current context from top and clear it in debug mode.
412 ExternalReference context_address(Top::k_context_address);
413 mov(esi, Operand::StaticVariable(context_address));
414 if (kDebug) {
415 mov(Operand::StaticVariable(context_address), Immediate(0));
416 }
417
418 // Push the return address to get ready to return.
419 push(ecx);
420
421 // Clear the top frame.
422 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
423 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
424}
425
426
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000427void MacroAssembler::PushTryHandler(CodeLocation try_location,
428 HandlerType type) {
429 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
430 // The pc (return address) is already on TOS.
431 if (try_location == IN_JAVASCRIPT) {
432 if (type == TRY_CATCH_HANDLER) {
433 push(Immediate(StackHandler::TRY_CATCH));
434 } else {
435 push(Immediate(StackHandler::TRY_FINALLY));
436 }
437 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
438 push(ebp);
439 push(edi);
440 } else {
441 ASSERT(try_location == IN_JS_ENTRY);
442 // The parameter pointer is meaningless here and ebp does not
443 // point to a JS frame. So we save NULL for both pp and ebp. We
444 // expect the code throwing an exception to check ebp before
445 // dereferencing it to restore the context.
446 push(Immediate(StackHandler::ENTRY));
447 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
448 push(Immediate(0)); // NULL frame pointer
449 push(Immediate(0)); // NULL parameter pointer
450 }
451 // Cached TOS.
452 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
453 // Link this handler.
454 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
455}
456
457
458Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
459 JSObject* holder, Register holder_reg,
460 Register scratch,
461 Label* miss) {
462 // Make sure there's no overlap between scratch and the other
463 // registers.
464 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
465
466 // Keep track of the current object in register reg.
467 Register reg = object_reg;
468 int depth = 1;
469
470 // Check the maps in the prototype chain.
471 // Traverse the prototype chain from the object and do map checks.
472 while (object != holder) {
473 depth++;
474
475 // Only global objects and objects that do not require access
476 // checks are allowed in stubs.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000477 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000478
479 JSObject* prototype = JSObject::cast(object->GetPrototype());
480 if (Heap::InNewSpace(prototype)) {
481 // Get the map of the current object.
482 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
483 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
484 // Branch on the result of the map check.
485 j(not_equal, miss, not_taken);
486 // Check access rights to the global object. This has to happen
487 // after the map check so that we know that the object is
488 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000489 if (object->IsJSGlobalProxy()) {
490 CheckAccessGlobalProxy(reg, scratch, miss);
491
492 // Restore scratch register to be the map of the object.
493 // We load the prototype from the map in the scratch register.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000494 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
495 }
496 // The prototype is in new space; we cannot store a reference
497 // to it in the code. Load it from the map.
498 reg = holder_reg; // from now the object is in holder_reg
499 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000500
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000501 } else {
502 // Check the map of the current object.
503 cmp(FieldOperand(reg, HeapObject::kMapOffset),
504 Immediate(Handle<Map>(object->map())));
505 // Branch on the result of the map check.
506 j(not_equal, miss, not_taken);
507 // Check access rights to the global object. This has to happen
508 // after the map check so that we know that the object is
509 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000510 if (object->IsJSGlobalProxy()) {
511 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000512 }
513 // The prototype is in old space; load it directly.
514 reg = holder_reg; // from now the object is in holder_reg
515 mov(reg, Handle<JSObject>(prototype));
516 }
517
518 // Go to the next object in the prototype chain.
519 object = prototype;
520 }
521
522 // Check the holder map.
523 cmp(FieldOperand(reg, HeapObject::kMapOffset),
524 Immediate(Handle<Map>(holder->map())));
525 j(not_equal, miss, not_taken);
526
527 // Log the check depth.
528 LOG(IntEvent("check-maps-depth", depth));
529
530 // Perform security check for access to the global object and return
531 // the holder register.
532 ASSERT(object == holder);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000533 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
534 if (object->IsJSGlobalProxy()) {
535 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000536 }
537 return reg;
538}
539
540
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000541void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
542 Register scratch,
543 Label* miss) {
544 Label same_contexts;
545
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000546 ASSERT(!holder_reg.is(scratch));
547
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000548 // Load current lexical context from the stack frame.
549 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
550
551 // When generating debug code, make sure the lexical context is set.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000552 if (FLAG_debug_code) {
553 cmp(Operand(scratch), Immediate(0));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000554 Check(not_equal, "we should not have an empty lexical context");
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000555 }
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000556 // Load the global context of the current context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000557 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
558 mov(scratch, FieldOperand(scratch, offset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000559 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
560
561 // Check the context is a global context.
562 if (FLAG_debug_code) {
563 push(scratch);
564 // Read the first word and compare to global_context_map.
565 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
566 cmp(scratch, Factory::global_context_map());
567 Check(equal, "JSGlobalObject::global_context should be a global context.");
568 pop(scratch);
569 }
570
571 // Check if both contexts are the same.
572 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
573 j(equal, &same_contexts, taken);
574
575 // Compare security tokens, save holder_reg on the stack so we can use it
576 // as a temporary register.
577 //
578 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
579 push(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000580 // Check that the security token in the calling global object is
581 // compatible with the security token in the receiving global
582 // object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000583 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
584
585 // Check the context is a global context.
586 if (FLAG_debug_code) {
587 cmp(holder_reg, Factory::null_value());
588 Check(not_equal, "JSGlobalProxy::context() should not be null.");
589
590 push(holder_reg);
591 // Read the first word and compare to global_context_map(),
592 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
593 cmp(holder_reg, Factory::global_context_map());
594 Check(equal, "JSGlobalObject::global_context should be a global context.");
595 pop(holder_reg);
596 }
597
598 int token_offset = Context::kHeaderSize +
599 Context::SECURITY_TOKEN_INDEX * kPointerSize;
600 mov(scratch, FieldOperand(scratch, token_offset));
601 cmp(scratch, FieldOperand(holder_reg, token_offset));
602 pop(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000603 j(not_equal, miss, not_taken);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000604
605 bind(&same_contexts);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000606}
607
608
609void MacroAssembler::NegativeZeroTest(Register result,
610 Register op,
611 Label* then_label) {
612 Label ok;
613 test(result, Operand(result));
614 j(not_zero, &ok, taken);
615 test(op, Operand(op));
616 j(sign, then_label, not_taken);
617 bind(&ok);
618}
619
620
621void MacroAssembler::NegativeZeroTest(Register result,
622 Register op1,
623 Register op2,
624 Register scratch,
625 Label* then_label) {
626 Label ok;
627 test(result, Operand(result));
628 j(not_zero, &ok, taken);
629 mov(scratch, Operand(op1));
630 or_(scratch, Operand(op2));
631 j(sign, then_label, not_taken);
632 bind(&ok);
633}
634
635
ager@chromium.org7c537e22008-10-16 08:43:32 +0000636void MacroAssembler::TryGetFunctionPrototype(Register function,
637 Register result,
638 Register scratch,
639 Label* miss) {
640 // Check that the receiver isn't a smi.
641 test(function, Immediate(kSmiTagMask));
642 j(zero, miss, not_taken);
643
644 // Check that the function really is a function.
645 mov(result, FieldOperand(function, HeapObject::kMapOffset));
646 movzx_b(scratch, FieldOperand(result, Map::kInstanceTypeOffset));
647 cmp(scratch, JS_FUNCTION_TYPE);
648 j(not_equal, miss, not_taken);
649
650 // Make sure that the function has an instance prototype.
651 Label non_instance;
652 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
653 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
654 j(not_zero, &non_instance, not_taken);
655
656 // Get the prototype or initial map from the function.
657 mov(result,
658 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
659
660 // If the prototype or initial map is the hole, don't return it and
661 // simply miss the cache instead. This will allow us to allocate a
662 // prototype object on-demand in the runtime system.
663 cmp(Operand(result), Immediate(Factory::the_hole_value()));
664 j(equal, miss, not_taken);
665
666 // If the function does not have an initial map, we're done.
667 Label done;
668 mov(scratch, FieldOperand(result, HeapObject::kMapOffset));
669 movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
670 cmp(scratch, MAP_TYPE);
671 j(not_equal, &done);
672
673 // Get the prototype from the initial map.
674 mov(result, FieldOperand(result, Map::kPrototypeOffset));
675 jmp(&done);
676
677 // Non-instance prototype: Fetch prototype from constructor field
678 // in initial map.
679 bind(&non_instance);
680 mov(result, FieldOperand(result, Map::kConstructorOffset));
681
682 // All done.
683 bind(&done);
684}
685
686
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000687void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000688 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000689 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000690}
691
692
693void MacroAssembler::StubReturn(int argc) {
694 ASSERT(argc >= 1 && generating_stub());
695 ret((argc - 1) * kPointerSize);
696}
697
698
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000699void MacroAssembler::IllegalOperation(int num_arguments) {
700 if (num_arguments > 0) {
701 add(Operand(esp), Immediate(num_arguments * kPointerSize));
702 }
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000703 mov(eax, Immediate(Factory::undefined_value()));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000704}
705
706
707void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
708 CallRuntime(Runtime::FunctionForId(id), num_arguments);
709}
710
711
712void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000713 // If the expected number of arguments of the runtime function is
714 // constant, we check that the actual number of arguments match the
715 // expectation.
716 if (f->nargs >= 0 && f->nargs != num_arguments) {
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000717 IllegalOperation(num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000718 return;
719 }
720
mads.s.ager31e71382008-08-13 09:32:07 +0000721 Runtime::FunctionId function_id =
722 static_cast<Runtime::FunctionId>(f->stub_id);
723 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000724 CallStub(&stub);
725}
726
727
mads.s.ager31e71382008-08-13 09:32:07 +0000728void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
729 int num_arguments) {
730 // TODO(1236192): Most runtime routines don't need the number of
731 // arguments passed in because it is constant. At some point we
732 // should remove this need and make the runtime routine entry code
733 // smarter.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000734 Set(eax, Immediate(num_arguments));
mads.s.ager31e71382008-08-13 09:32:07 +0000735 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000736}
737
738
739void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
740 // Set the entry point and jump to the C entry runtime stub.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000741 mov(ebx, Immediate(ext));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000742 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000743 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000744}
745
746
747void MacroAssembler::InvokePrologue(const ParameterCount& expected,
748 const ParameterCount& actual,
749 Handle<Code> code_constant,
750 const Operand& code_operand,
751 Label* done,
752 InvokeFlag flag) {
753 bool definitely_matches = false;
754 Label invoke;
755 if (expected.is_immediate()) {
756 ASSERT(actual.is_immediate());
757 if (expected.immediate() == actual.immediate()) {
758 definitely_matches = true;
759 } else {
760 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000761 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
762 if (expected.immediate() == sentinel) {
763 // Don't worry about adapting arguments for builtins that
764 // don't want that done. Skip adaption code by making it look
765 // like we have a match between expected and actual number of
766 // arguments.
767 definitely_matches = true;
768 } else {
769 mov(ebx, expected.immediate());
770 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000771 }
772 } else {
773 if (actual.is_immediate()) {
774 // Expected is in register, actual is immediate. This is the
775 // case when we invoke function values without going through the
776 // IC mechanism.
777 cmp(expected.reg(), actual.immediate());
778 j(equal, &invoke);
779 ASSERT(expected.reg().is(ebx));
780 mov(eax, actual.immediate());
781 } else if (!expected.reg().is(actual.reg())) {
782 // Both expected and actual are in (different) registers. This
783 // is the case when we invoke functions using call and apply.
784 cmp(expected.reg(), Operand(actual.reg()));
785 j(equal, &invoke);
786 ASSERT(actual.reg().is(eax));
787 ASSERT(expected.reg().is(ebx));
788 }
789 }
790
791 if (!definitely_matches) {
792 Handle<Code> adaptor =
793 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
794 if (!code_constant.is_null()) {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000795 mov(edx, Immediate(code_constant));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000796 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
797 } else if (!code_operand.is_reg(edx)) {
798 mov(edx, code_operand);
799 }
800
801 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000802 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000803 jmp(done);
804 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000805 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000806 }
807 bind(&invoke);
808 }
809}
810
811
812void MacroAssembler::InvokeCode(const Operand& code,
813 const ParameterCount& expected,
814 const ParameterCount& actual,
815 InvokeFlag flag) {
816 Label done;
817 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
818 if (flag == CALL_FUNCTION) {
819 call(code);
820 } else {
821 ASSERT(flag == JUMP_FUNCTION);
822 jmp(code);
823 }
824 bind(&done);
825}
826
827
828void MacroAssembler::InvokeCode(Handle<Code> code,
829 const ParameterCount& expected,
830 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000831 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000832 InvokeFlag flag) {
833 Label done;
834 Operand dummy(eax);
835 InvokePrologue(expected, actual, code, dummy, &done, flag);
836 if (flag == CALL_FUNCTION) {
837 call(code, rmode);
838 } else {
839 ASSERT(flag == JUMP_FUNCTION);
840 jmp(code, rmode);
841 }
842 bind(&done);
843}
844
845
846void MacroAssembler::InvokeFunction(Register fun,
847 const ParameterCount& actual,
848 InvokeFlag flag) {
849 ASSERT(fun.is(edi));
850 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
851 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
852 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
853 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
854 lea(edx, FieldOperand(edx, Code::kHeaderSize));
855
856 ParameterCount expected(ebx);
857 InvokeCode(Operand(edx), expected, actual, flag);
858}
859
860
861void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
862 bool resolved;
863 Handle<Code> code = ResolveBuiltin(id, &resolved);
864
kasper.lund7276f142008-07-30 08:49:36 +0000865 // Calls are not allowed in some stubs.
866 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000867
868 // Rely on the assertion to check that the number of provided
869 // arguments match the expected number of arguments. Fake a
870 // parameter count to avoid emitting code to do the check.
871 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000872 InvokeCode(Handle<Code>(code), expected, expected,
873 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000874
875 const char* name = Builtins::GetName(id);
876 int argc = Builtins::GetArgumentsCount(id);
877
878 if (!resolved) {
879 uint32_t flags =
880 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000881 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
882 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000883 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
884 unresolved_.Add(entry);
885 }
886}
887
888
889void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
890 bool resolved;
891 Handle<Code> code = ResolveBuiltin(id, &resolved);
892
893 const char* name = Builtins::GetName(id);
894 int argc = Builtins::GetArgumentsCount(id);
895
896 mov(Operand(target), Immediate(code));
897 if (!resolved) {
898 uint32_t flags =
899 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000900 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
901 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000902 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
903 unresolved_.Add(entry);
904 }
905 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
906}
907
908
909Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
910 bool* resolved) {
911 // Move the builtin function into the temporary function slot by
912 // reading it from the builtins object. NOTE: We should be able to
913 // reduce this to two instructions by putting the function table in
914 // the global object instead of the "builtins" object and by using a
915 // real register for the function.
916 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
917 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
918 int builtins_offset =
919 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
920 mov(edi, FieldOperand(edx, builtins_offset));
921
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000922
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000923 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000924}
925
926
927void MacroAssembler::Ret() {
928 ret(0);
929}
930
931
932void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
933 if (FLAG_native_code_counters && counter->Enabled()) {
934 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
935 }
936}
937
938
939void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
940 ASSERT(value > 0);
941 if (FLAG_native_code_counters && counter->Enabled()) {
942 Operand operand = Operand::StaticVariable(ExternalReference(counter));
943 if (value == 1) {
944 inc(operand);
945 } else {
946 add(operand, Immediate(value));
947 }
948 }
949}
950
951
952void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
953 ASSERT(value > 0);
954 if (FLAG_native_code_counters && counter->Enabled()) {
955 Operand operand = Operand::StaticVariable(ExternalReference(counter));
956 if (value == 1) {
957 dec(operand);
958 } else {
959 sub(operand, Immediate(value));
960 }
961 }
962}
963
964
965void MacroAssembler::Assert(Condition cc, const char* msg) {
966 if (FLAG_debug_code) Check(cc, msg);
967}
968
969
970void MacroAssembler::Check(Condition cc, const char* msg) {
971 Label L;
972 j(cc, &L, taken);
973 Abort(msg);
974 // will not return here
975 bind(&L);
976}
977
978
979void MacroAssembler::Abort(const char* msg) {
980 // We want to pass the msg string like a smi to avoid GC
981 // problems, however msg is not guaranteed to be aligned
982 // properly. Instead, we pass an aligned pointer that is
ager@chromium.org32912102009-01-16 10:38:43 +0000983 // a proper v8 smi, but also pass the alignment difference
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000984 // from the real pointer as a smi.
985 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
986 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
987 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
988#ifdef DEBUG
989 if (msg != NULL) {
990 RecordComment("Abort message: ");
991 RecordComment(msg);
992 }
993#endif
994 push(eax);
995 push(Immediate(p0));
996 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
997 CallRuntime(Runtime::kAbort, 2);
998 // will not return here
999}
1000
1001
1002CodePatcher::CodePatcher(byte* address, int size)
1003 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
ager@chromium.org32912102009-01-16 10:38:43 +00001004 // Create a new macro assembler pointing to the address of the code to patch.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001005 // The size is adjusted with kGap on order for the assembler to generate size
1006 // bytes of instructions without failing with buffer size constraints.
1007 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1008}
1009
1010
1011CodePatcher::~CodePatcher() {
1012 // Indicate that code has changed.
1013 CPU::FlushICache(address_, size_);
1014
1015 // Check that the code was patched as expected.
1016 ASSERT(masm_.pc_ == address_ + size_);
1017 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1018}
1019
1020
1021} } // namespace v8::internal