blob: 4fad3be6702eb77204465f3c072996b2590f5b28 [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 { namespace internal {
37
kasperl@chromium.org7be3c992009-03-12 07:19:55 +000038// -------------------------------------------------------------------------
39// MacroAssembler implementation.
40
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000041MacroAssembler::MacroAssembler(void* buffer, int size)
42 : Assembler(buffer, size),
43 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000044 generating_stub_(false),
kasperl@chromium.org061ef742009-02-27 12:16:20 +000045 allow_stub_calls_(true),
46 code_object_(Heap::undefined_value()) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000047}
48
49
50static void RecordWriteHelper(MacroAssembler* masm,
51 Register object,
52 Register addr,
53 Register scratch) {
54 Label fast;
55
56 // Compute the page address from the heap object pointer, leave it
57 // in 'object'.
58 masm->and_(object, ~Page::kPageAlignmentMask);
59
60 // Compute the bit addr in the remembered set, leave it in "addr".
61 masm->sub(addr, Operand(object));
62 masm->shr(addr, kObjectAlignmentBits);
63
64 // If the bit offset lies beyond the normal remembered set range, it is in
65 // the extra remembered set area of a large object.
66 masm->cmp(addr, Page::kPageSize / kPointerSize);
67 masm->j(less, &fast);
68
69 // Adjust 'addr' to be relative to the start of the extra remembered set
70 // and the page address in 'object' to be the address of the extra
71 // remembered set.
72 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
73 // Load the array length into 'scratch' and multiply by four to get the
74 // size in bytes of the elements.
75 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
76 + FixedArray::kLengthOffset));
77 masm->shl(scratch, kObjectAlignmentBits);
78 // Add the page header, array header, and array body size to the page
79 // address.
80 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
81 + Array::kHeaderSize));
82 masm->add(object, Operand(scratch));
83
84
85 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
86 // to limit code size. We should probably evaluate this decision by
87 // measuring the performance of an equivalent implementation using
88 // "simpler" instructions
89 masm->bind(&fast);
90 masm->bts(Operand(object, 0), addr);
91}
92
93
94class RecordWriteStub : public CodeStub {
95 public:
96 RecordWriteStub(Register object, Register addr, Register scratch)
97 : object_(object), addr_(addr), scratch_(scratch) { }
98
99 void Generate(MacroAssembler* masm);
100
101 private:
102 Register object_;
103 Register addr_;
104 Register scratch_;
105
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000106#ifdef DEBUG
107 void Print() {
108 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
109 object_.code(), addr_.code(), scratch_.code());
110 }
111#endif
112
113 // Minor key encoding in 12 bits of three registers (object, address and
114 // scratch) OOOOAAAASSSS.
115 class ScratchBits: public BitField<uint32_t, 0, 4> {};
116 class AddressBits: public BitField<uint32_t, 4, 4> {};
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000117 class ObjectBits: public BitField<uint32_t, 8, 4> {};
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000118
119 Major MajorKey() { return RecordWrite; }
120
121 int MinorKey() {
122 // Encode the registers.
123 return ObjectBits::encode(object_.code()) |
124 AddressBits::encode(addr_.code()) |
125 ScratchBits::encode(scratch_.code());
126 }
127};
128
129
130void RecordWriteStub::Generate(MacroAssembler* masm) {
131 RecordWriteHelper(masm, object_, addr_, scratch_);
132 masm->ret(0);
133}
134
135
136// Set the remembered set bit for [object+offset].
137// object is the object being stored into, value is the object being stored.
138// If offset is zero, then the scratch register contains the array index into
139// the elements array represented as a Smi.
140// All registers are clobbered by the operation.
141void MacroAssembler::RecordWrite(Register object, int offset,
142 Register value, Register scratch) {
143 // First, check if a remembered set write is even needed. The tests below
144 // catch stores of Smis and stores into young gen (which does not have space
145 // for the remembered set bits.
146 Label done;
147
148 // This optimization cannot survive serialization and deserialization,
149 // so we disable as long as serialization can take place.
150 int32_t new_space_start =
151 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
152 if (Serializer::enabled() || new_space_start < 0) {
153 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
154 // Check for Smi first.
155 test(value, Immediate(kSmiTagMask));
156 j(zero, &done);
157 // Test that the object address is not in the new space. We cannot
158 // set remembered set bits in the new space.
159 mov(value, Operand(object));
160 and_(value, Heap::NewSpaceMask());
161 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
162 j(equal, &done);
163 } else {
164 // move the value SmiTag into the sign bit
165 shl(value, 31);
166 // combine the object with value SmiTag
167 or_(value, Operand(object));
168 // remove the uninteresing bits inside the page
169 and_(value, Heap::NewSpaceMask() | (1 << 31));
170 // xor has two effects:
171 // - if the value was a smi, then the result will be negative
172 // - if the object is pointing into new space area the page bits will
173 // all be zero
174 xor_(value, new_space_start | (1 << 31));
175 // Check for both conditions in one branch
176 j(less_equal, &done);
177 }
178
179 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
180 // Compute the bit offset in the remembered set, leave it in 'value'.
181 mov(value, Operand(object));
182 and_(value, Page::kPageAlignmentMask);
183 add(Operand(value), Immediate(offset));
184 shr(value, kObjectAlignmentBits);
185
186 // Compute the page address from the heap object pointer, leave it in
187 // 'object'.
188 and_(object, ~Page::kPageAlignmentMask);
189
190 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
191 // to limit code size. We should probably evaluate this decision by
192 // measuring the performance of an equivalent implementation using
193 // "simpler" instructions
194 bts(Operand(object, 0), value);
195 } else {
196 Register dst = scratch;
197 if (offset != 0) {
198 lea(dst, Operand(object, offset));
199 } else {
200 // array access: calculate the destination address in the same manner as
201 // KeyedStoreIC::GenerateGeneric
202 lea(dst,
203 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
204 }
205 // If we are already generating a shared stub, not inlining the
206 // record write code isn't going to save us any memory.
207 if (generating_stub()) {
208 RecordWriteHelper(this, object, dst, value);
209 } else {
210 RecordWriteStub stub(object, dst, value);
211 CallStub(&stub);
212 }
213 }
214
215 bind(&done);
216}
217
218
219void MacroAssembler::SaveRegistersToMemory(RegList regs) {
220 ASSERT((regs & ~kJSCallerSaved) == 0);
221 // Copy the content of registers to memory location.
222 for (int i = 0; i < kNumJSCallerSaved; i++) {
223 int r = JSCallerSavedCode(i);
224 if ((regs & (1 << r)) != 0) {
225 Register reg = { r };
226 ExternalReference reg_addr =
227 ExternalReference(Debug_Address::Register(i));
228 mov(Operand::StaticVariable(reg_addr), reg);
229 }
230 }
231}
232
233
234void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
235 ASSERT((regs & ~kJSCallerSaved) == 0);
236 // Copy the content of memory location to registers.
237 for (int i = kNumJSCallerSaved; --i >= 0;) {
238 int r = JSCallerSavedCode(i);
239 if ((regs & (1 << r)) != 0) {
240 Register reg = { r };
241 ExternalReference reg_addr =
242 ExternalReference(Debug_Address::Register(i));
243 mov(reg, Operand::StaticVariable(reg_addr));
244 }
245 }
246}
247
248
249void MacroAssembler::PushRegistersFromMemory(RegList regs) {
250 ASSERT((regs & ~kJSCallerSaved) == 0);
251 // Push the content of the memory location to the stack.
252 for (int i = 0; i < kNumJSCallerSaved; i++) {
253 int r = JSCallerSavedCode(i);
254 if ((regs & (1 << r)) != 0) {
255 ExternalReference reg_addr =
256 ExternalReference(Debug_Address::Register(i));
257 push(Operand::StaticVariable(reg_addr));
258 }
259 }
260}
261
262
263void MacroAssembler::PopRegistersToMemory(RegList regs) {
264 ASSERT((regs & ~kJSCallerSaved) == 0);
265 // Pop the content from the stack to the memory location.
266 for (int i = kNumJSCallerSaved; --i >= 0;) {
267 int r = JSCallerSavedCode(i);
268 if ((regs & (1 << r)) != 0) {
269 ExternalReference reg_addr =
270 ExternalReference(Debug_Address::Register(i));
271 pop(Operand::StaticVariable(reg_addr));
272 }
273 }
274}
275
276
277void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
278 Register scratch,
279 RegList regs) {
280 ASSERT((regs & ~kJSCallerSaved) == 0);
281 // Copy the content of the stack to the memory location and adjust base.
282 for (int i = kNumJSCallerSaved; --i >= 0;) {
283 int r = JSCallerSavedCode(i);
284 if ((regs & (1 << r)) != 0) {
285 mov(scratch, Operand(base, 0));
286 ExternalReference reg_addr =
287 ExternalReference(Debug_Address::Register(i));
288 mov(Operand::StaticVariable(reg_addr), scratch);
289 lea(base, Operand(base, kPointerSize));
290 }
291 }
292}
293
294
295void MacroAssembler::Set(Register dst, const Immediate& x) {
296 if (x.is_zero()) {
297 xor_(dst, Operand(dst)); // shorter than mov
298 } else {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000299 mov(dst, x);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000300 }
301}
302
303
304void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
305 mov(dst, x);
306}
307
308
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000309void MacroAssembler::CmpObjectType(Register heap_object,
310 InstanceType type,
311 Register map) {
312 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
313 CmpInstanceType(map, type);
314}
315
316
317void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
318 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
319 static_cast<int8_t>(type));
320}
321
322
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000323void MacroAssembler::FCmp() {
324 fcompp();
325 push(eax);
326 fnstsw_ax();
327 sahf();
328 pop(eax);
329}
330
331
ager@chromium.org7c537e22008-10-16 08:43:32 +0000332void MacroAssembler::EnterFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000333 push(ebp);
334 mov(ebp, Operand(esp));
335 push(esi);
336 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.org061ef742009-02-27 12:16:20 +0000337 push(Immediate(CodeObject()));
338 if (FLAG_debug_code) {
339 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
340 Check(not_equal, "code object not properly patched");
341 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000342}
343
344
ager@chromium.org7c537e22008-10-16 08:43:32 +0000345void MacroAssembler::LeaveFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000346 if (FLAG_debug_code) {
347 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
348 Immediate(Smi::FromInt(type)));
349 Check(equal, "stack frame types must match");
350 }
351 leave();
352}
353
354
ager@chromium.org236ad962008-09-25 09:45:57 +0000355void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
356 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
357
358 // Setup the frame structure on the stack.
359 ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
360 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
361 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
362 push(ebp);
363 mov(ebp, Operand(esp));
364
365 // Reserve room for entry stack pointer and push the debug marker.
366 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
367 push(Immediate(0)); // saved entry sp, patched before call
368 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
369
370 // Save the frame pointer and the context in top.
371 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
372 ExternalReference context_address(Top::k_context_address);
373 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
374 mov(Operand::StaticVariable(context_address), esi);
375
376 // Setup argc and argv in callee-saved registers.
377 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
378 mov(edi, Operand(eax));
379 lea(esi, Operand(ebp, eax, times_4, offset));
380
381 // Save the state of all registers to the stack from the memory
382 // location. This is needed to allow nested break points.
383 if (type == StackFrame::EXIT_DEBUG) {
384 // TODO(1243899): This should be symmetric to
385 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
386 // correct here, but computed for the other call. Very error
387 // prone! FIX THIS. Actually there are deeper problems with
388 // register saving than this asymmetry (see the bug report
389 // associated with this issue).
390 PushRegistersFromMemory(kJSCallerSaved);
391 }
392
393 // Reserve space for two arguments: argc and argv.
394 sub(Operand(esp), Immediate(2 * kPointerSize));
395
396 // Get the required frame alignment for the OS.
397 static const int kFrameAlignment = OS::ActivationFrameAlignment();
398 if (kFrameAlignment > 0) {
399 ASSERT(IsPowerOf2(kFrameAlignment));
400 and_(esp, -kFrameAlignment);
401 }
402
403 // Patch the saved entry sp.
404 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
405}
406
407
408void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
409 // Restore the memory copy of the registers by digging them out from
410 // the stack. This is needed to allow nested break points.
411 if (type == StackFrame::EXIT_DEBUG) {
412 // It's okay to clobber register ebx below because we don't need
413 // the function pointer after this.
414 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
415 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
416 lea(ebx, Operand(ebp, kOffset));
417 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
418 }
419
420 // Get the return address from the stack and restore the frame pointer.
421 mov(ecx, Operand(ebp, 1 * kPointerSize));
422 mov(ebp, Operand(ebp, 0 * kPointerSize));
423
424 // Pop the arguments and the receiver from the caller stack.
425 lea(esp, Operand(esi, 1 * kPointerSize));
426
427 // Restore current context from top and clear it in debug mode.
428 ExternalReference context_address(Top::k_context_address);
429 mov(esi, Operand::StaticVariable(context_address));
430 if (kDebug) {
431 mov(Operand::StaticVariable(context_address), Immediate(0));
432 }
433
434 // Push the return address to get ready to return.
435 push(ecx);
436
437 // Clear the top frame.
438 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
439 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
440}
441
442
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000443void MacroAssembler::PushTryHandler(CodeLocation try_location,
444 HandlerType type) {
445 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
446 // The pc (return address) is already on TOS.
447 if (try_location == IN_JAVASCRIPT) {
448 if (type == TRY_CATCH_HANDLER) {
449 push(Immediate(StackHandler::TRY_CATCH));
450 } else {
451 push(Immediate(StackHandler::TRY_FINALLY));
452 }
453 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
454 push(ebp);
455 push(edi);
456 } else {
457 ASSERT(try_location == IN_JS_ENTRY);
458 // The parameter pointer is meaningless here and ebp does not
459 // point to a JS frame. So we save NULL for both pp and ebp. We
460 // expect the code throwing an exception to check ebp before
461 // dereferencing it to restore the context.
462 push(Immediate(StackHandler::ENTRY));
463 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
464 push(Immediate(0)); // NULL frame pointer
465 push(Immediate(0)); // NULL parameter pointer
466 }
467 // Cached TOS.
468 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
469 // Link this handler.
470 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
471}
472
473
474Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
475 JSObject* holder, Register holder_reg,
476 Register scratch,
477 Label* miss) {
478 // Make sure there's no overlap between scratch and the other
479 // registers.
480 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
481
482 // Keep track of the current object in register reg.
483 Register reg = object_reg;
484 int depth = 1;
485
486 // Check the maps in the prototype chain.
487 // Traverse the prototype chain from the object and do map checks.
488 while (object != holder) {
489 depth++;
490
491 // Only global objects and objects that do not require access
492 // checks are allowed in stubs.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000493 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000494
495 JSObject* prototype = JSObject::cast(object->GetPrototype());
496 if (Heap::InNewSpace(prototype)) {
497 // Get the map of the current object.
498 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
499 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
500 // Branch on the result of the map check.
501 j(not_equal, miss, not_taken);
502 // Check access rights to the global object. This has to happen
503 // after the map check so that we know that the object is
504 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000505 if (object->IsJSGlobalProxy()) {
506 CheckAccessGlobalProxy(reg, scratch, miss);
507
508 // Restore scratch register to be the map of the object.
509 // We load the prototype from the map in the scratch register.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000510 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
511 }
512 // The prototype is in new space; we cannot store a reference
513 // to it in the code. Load it from the map.
514 reg = holder_reg; // from now the object is in holder_reg
515 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000516
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000517 } else {
518 // Check the map of the current object.
519 cmp(FieldOperand(reg, HeapObject::kMapOffset),
520 Immediate(Handle<Map>(object->map())));
521 // Branch on the result of the map check.
522 j(not_equal, miss, not_taken);
523 // Check access rights to the global object. This has to happen
524 // after the map check so that we know that the object is
525 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000526 if (object->IsJSGlobalProxy()) {
527 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000528 }
529 // The prototype is in old space; load it directly.
530 reg = holder_reg; // from now the object is in holder_reg
531 mov(reg, Handle<JSObject>(prototype));
532 }
533
534 // Go to the next object in the prototype chain.
535 object = prototype;
536 }
537
538 // Check the holder map.
539 cmp(FieldOperand(reg, HeapObject::kMapOffset),
540 Immediate(Handle<Map>(holder->map())));
541 j(not_equal, miss, not_taken);
542
543 // Log the check depth.
544 LOG(IntEvent("check-maps-depth", depth));
545
546 // Perform security check for access to the global object and return
547 // the holder register.
548 ASSERT(object == holder);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000549 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
550 if (object->IsJSGlobalProxy()) {
551 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000552 }
553 return reg;
554}
555
556
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000557void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
558 Register scratch,
559 Label* miss) {
560 Label same_contexts;
561
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000562 ASSERT(!holder_reg.is(scratch));
563
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000564 // Load current lexical context from the stack frame.
565 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
566
567 // When generating debug code, make sure the lexical context is set.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000568 if (FLAG_debug_code) {
569 cmp(Operand(scratch), Immediate(0));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000570 Check(not_equal, "we should not have an empty lexical context");
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000571 }
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000572 // Load the global context of the current context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000573 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
574 mov(scratch, FieldOperand(scratch, offset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000575 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
576
577 // Check the context is a global context.
578 if (FLAG_debug_code) {
579 push(scratch);
580 // Read the first word and compare to global_context_map.
581 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
582 cmp(scratch, Factory::global_context_map());
583 Check(equal, "JSGlobalObject::global_context should be a global context.");
584 pop(scratch);
585 }
586
587 // Check if both contexts are the same.
588 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
589 j(equal, &same_contexts, taken);
590
591 // Compare security tokens, save holder_reg on the stack so we can use it
592 // as a temporary register.
593 //
594 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
595 push(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000596 // Check that the security token in the calling global object is
597 // compatible with the security token in the receiving global
598 // object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000599 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
600
601 // Check the context is a global context.
602 if (FLAG_debug_code) {
603 cmp(holder_reg, Factory::null_value());
604 Check(not_equal, "JSGlobalProxy::context() should not be null.");
605
606 push(holder_reg);
607 // Read the first word and compare to global_context_map(),
608 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
609 cmp(holder_reg, Factory::global_context_map());
610 Check(equal, "JSGlobalObject::global_context should be a global context.");
611 pop(holder_reg);
612 }
613
614 int token_offset = Context::kHeaderSize +
615 Context::SECURITY_TOKEN_INDEX * kPointerSize;
616 mov(scratch, FieldOperand(scratch, token_offset));
617 cmp(scratch, FieldOperand(holder_reg, token_offset));
618 pop(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000619 j(not_equal, miss, not_taken);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000620
621 bind(&same_contexts);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000622}
623
624
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000625void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
626 Register result,
627 Register op,
628 JumpTarget* then_target) {
629 JumpTarget ok(cgen);
630 test(result, Operand(result));
631 ok.Branch(not_zero, taken);
632 test(op, Operand(op));
633 then_target->Branch(sign, not_taken);
634 ok.Bind();
635}
636
637
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000638void MacroAssembler::NegativeZeroTest(Register result,
639 Register op,
640 Label* then_label) {
641 Label ok;
642 test(result, Operand(result));
643 j(not_zero, &ok, taken);
644 test(op, Operand(op));
645 j(sign, then_label, not_taken);
646 bind(&ok);
647}
648
649
650void MacroAssembler::NegativeZeroTest(Register result,
651 Register op1,
652 Register op2,
653 Register scratch,
654 Label* then_label) {
655 Label ok;
656 test(result, Operand(result));
657 j(not_zero, &ok, taken);
658 mov(scratch, Operand(op1));
659 or_(scratch, Operand(op2));
660 j(sign, then_label, not_taken);
661 bind(&ok);
662}
663
664
ager@chromium.org7c537e22008-10-16 08:43:32 +0000665void MacroAssembler::TryGetFunctionPrototype(Register function,
666 Register result,
667 Register scratch,
668 Label* miss) {
669 // Check that the receiver isn't a smi.
670 test(function, Immediate(kSmiTagMask));
671 j(zero, miss, not_taken);
672
673 // Check that the function really is a function.
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000674 CmpObjectType(function, JS_FUNCTION_TYPE, result);
ager@chromium.org7c537e22008-10-16 08:43:32 +0000675 j(not_equal, miss, not_taken);
676
677 // Make sure that the function has an instance prototype.
678 Label non_instance;
679 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
680 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
681 j(not_zero, &non_instance, not_taken);
682
683 // Get the prototype or initial map from the function.
684 mov(result,
685 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
686
687 // If the prototype or initial map is the hole, don't return it and
688 // simply miss the cache instead. This will allow us to allocate a
689 // prototype object on-demand in the runtime system.
690 cmp(Operand(result), Immediate(Factory::the_hole_value()));
691 j(equal, miss, not_taken);
692
693 // If the function does not have an initial map, we're done.
694 Label done;
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000695 CmpObjectType(result, MAP_TYPE, scratch);
ager@chromium.org7c537e22008-10-16 08:43:32 +0000696 j(not_equal, &done);
697
698 // Get the prototype from the initial map.
699 mov(result, FieldOperand(result, Map::kPrototypeOffset));
700 jmp(&done);
701
702 // Non-instance prototype: Fetch prototype from constructor field
703 // in initial map.
704 bind(&non_instance);
705 mov(result, FieldOperand(result, Map::kConstructorOffset));
706
707 // All done.
708 bind(&done);
709}
710
711
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000712void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000713 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000714 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000715}
716
717
718void MacroAssembler::StubReturn(int argc) {
719 ASSERT(argc >= 1 && generating_stub());
720 ret((argc - 1) * kPointerSize);
721}
722
723
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000724void MacroAssembler::IllegalOperation(int num_arguments) {
725 if (num_arguments > 0) {
726 add(Operand(esp), Immediate(num_arguments * kPointerSize));
727 }
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000728 mov(eax, Immediate(Factory::undefined_value()));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000729}
730
731
732void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
733 CallRuntime(Runtime::FunctionForId(id), num_arguments);
734}
735
736
737void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000738 // If the expected number of arguments of the runtime function is
739 // constant, we check that the actual number of arguments match the
740 // expectation.
741 if (f->nargs >= 0 && f->nargs != num_arguments) {
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000742 IllegalOperation(num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000743 return;
744 }
745
mads.s.ager31e71382008-08-13 09:32:07 +0000746 Runtime::FunctionId function_id =
747 static_cast<Runtime::FunctionId>(f->stub_id);
748 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000749 CallStub(&stub);
750}
751
752
mads.s.ager31e71382008-08-13 09:32:07 +0000753void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
754 int num_arguments) {
755 // TODO(1236192): Most runtime routines don't need the number of
756 // arguments passed in because it is constant. At some point we
757 // should remove this need and make the runtime routine entry code
758 // smarter.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000759 Set(eax, Immediate(num_arguments));
mads.s.ager31e71382008-08-13 09:32:07 +0000760 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000761}
762
763
764void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
765 // Set the entry point and jump to the C entry runtime stub.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000766 mov(ebx, Immediate(ext));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000767 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000768 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000769}
770
771
772void MacroAssembler::InvokePrologue(const ParameterCount& expected,
773 const ParameterCount& actual,
774 Handle<Code> code_constant,
775 const Operand& code_operand,
776 Label* done,
777 InvokeFlag flag) {
778 bool definitely_matches = false;
779 Label invoke;
780 if (expected.is_immediate()) {
781 ASSERT(actual.is_immediate());
782 if (expected.immediate() == actual.immediate()) {
783 definitely_matches = true;
784 } else {
785 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000786 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
787 if (expected.immediate() == sentinel) {
788 // Don't worry about adapting arguments for builtins that
789 // don't want that done. Skip adaption code by making it look
790 // like we have a match between expected and actual number of
791 // arguments.
792 definitely_matches = true;
793 } else {
794 mov(ebx, expected.immediate());
795 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000796 }
797 } else {
798 if (actual.is_immediate()) {
799 // Expected is in register, actual is immediate. This is the
800 // case when we invoke function values without going through the
801 // IC mechanism.
802 cmp(expected.reg(), actual.immediate());
803 j(equal, &invoke);
804 ASSERT(expected.reg().is(ebx));
805 mov(eax, actual.immediate());
806 } else if (!expected.reg().is(actual.reg())) {
807 // Both expected and actual are in (different) registers. This
808 // is the case when we invoke functions using call and apply.
809 cmp(expected.reg(), Operand(actual.reg()));
810 j(equal, &invoke);
811 ASSERT(actual.reg().is(eax));
812 ASSERT(expected.reg().is(ebx));
813 }
814 }
815
816 if (!definitely_matches) {
817 Handle<Code> adaptor =
818 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
819 if (!code_constant.is_null()) {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000820 mov(edx, Immediate(code_constant));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000821 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
822 } else if (!code_operand.is_reg(edx)) {
823 mov(edx, code_operand);
824 }
825
826 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000827 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000828 jmp(done);
829 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000830 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000831 }
832 bind(&invoke);
833 }
834}
835
836
837void MacroAssembler::InvokeCode(const Operand& code,
838 const ParameterCount& expected,
839 const ParameterCount& actual,
840 InvokeFlag flag) {
841 Label done;
842 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
843 if (flag == CALL_FUNCTION) {
844 call(code);
845 } else {
846 ASSERT(flag == JUMP_FUNCTION);
847 jmp(code);
848 }
849 bind(&done);
850}
851
852
853void MacroAssembler::InvokeCode(Handle<Code> code,
854 const ParameterCount& expected,
855 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000856 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000857 InvokeFlag flag) {
858 Label done;
859 Operand dummy(eax);
860 InvokePrologue(expected, actual, code, dummy, &done, flag);
861 if (flag == CALL_FUNCTION) {
862 call(code, rmode);
863 } else {
864 ASSERT(flag == JUMP_FUNCTION);
865 jmp(code, rmode);
866 }
867 bind(&done);
868}
869
870
871void MacroAssembler::InvokeFunction(Register fun,
872 const ParameterCount& actual,
873 InvokeFlag flag) {
874 ASSERT(fun.is(edi));
875 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
876 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
877 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
878 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
879 lea(edx, FieldOperand(edx, Code::kHeaderSize));
880
881 ParameterCount expected(ebx);
882 InvokeCode(Operand(edx), expected, actual, flag);
883}
884
885
886void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
887 bool resolved;
888 Handle<Code> code = ResolveBuiltin(id, &resolved);
889
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000890 // Calls are not allowed in some stubs.
kasper.lund7276f142008-07-30 08:49:36 +0000891 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000892
893 // Rely on the assertion to check that the number of provided
894 // arguments match the expected number of arguments. Fake a
895 // parameter count to avoid emitting code to do the check.
896 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000897 InvokeCode(Handle<Code>(code), expected, expected,
898 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000899
900 const char* name = Builtins::GetName(id);
901 int argc = Builtins::GetArgumentsCount(id);
902
903 if (!resolved) {
904 uint32_t flags =
905 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000906 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
907 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000908 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
909 unresolved_.Add(entry);
910 }
911}
912
913
914void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
915 bool resolved;
916 Handle<Code> code = ResolveBuiltin(id, &resolved);
917
918 const char* name = Builtins::GetName(id);
919 int argc = Builtins::GetArgumentsCount(id);
920
921 mov(Operand(target), Immediate(code));
922 if (!resolved) {
923 uint32_t flags =
924 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000925 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
926 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000927 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
928 unresolved_.Add(entry);
929 }
930 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
931}
932
933
934Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
935 bool* resolved) {
936 // Move the builtin function into the temporary function slot by
937 // reading it from the builtins object. NOTE: We should be able to
938 // reduce this to two instructions by putting the function table in
939 // the global object instead of the "builtins" object and by using a
940 // real register for the function.
941 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
942 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
943 int builtins_offset =
944 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
945 mov(edi, FieldOperand(edx, builtins_offset));
946
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000947
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000948 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000949}
950
951
952void MacroAssembler::Ret() {
953 ret(0);
954}
955
956
957void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
958 if (FLAG_native_code_counters && counter->Enabled()) {
959 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
960 }
961}
962
963
964void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
965 ASSERT(value > 0);
966 if (FLAG_native_code_counters && counter->Enabled()) {
967 Operand operand = Operand::StaticVariable(ExternalReference(counter));
968 if (value == 1) {
969 inc(operand);
970 } else {
971 add(operand, Immediate(value));
972 }
973 }
974}
975
976
977void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
978 ASSERT(value > 0);
979 if (FLAG_native_code_counters && counter->Enabled()) {
980 Operand operand = Operand::StaticVariable(ExternalReference(counter));
981 if (value == 1) {
982 dec(operand);
983 } else {
984 sub(operand, Immediate(value));
985 }
986 }
987}
988
989
990void MacroAssembler::Assert(Condition cc, const char* msg) {
991 if (FLAG_debug_code) Check(cc, msg);
992}
993
994
995void MacroAssembler::Check(Condition cc, const char* msg) {
996 Label L;
997 j(cc, &L, taken);
998 Abort(msg);
999 // will not return here
1000 bind(&L);
1001}
1002
1003
1004void MacroAssembler::Abort(const char* msg) {
1005 // We want to pass the msg string like a smi to avoid GC
1006 // problems, however msg is not guaranteed to be aligned
1007 // properly. Instead, we pass an aligned pointer that is
ager@chromium.org32912102009-01-16 10:38:43 +00001008 // a proper v8 smi, but also pass the alignment difference
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001009 // from the real pointer as a smi.
1010 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1011 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1012 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1013#ifdef DEBUG
1014 if (msg != NULL) {
1015 RecordComment("Abort message: ");
1016 RecordComment(msg);
1017 }
1018#endif
1019 push(eax);
1020 push(Immediate(p0));
1021 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1022 CallRuntime(Runtime::kAbort, 2);
1023 // will not return here
1024}
1025
1026
1027CodePatcher::CodePatcher(byte* address, int size)
1028 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
ager@chromium.org32912102009-01-16 10:38:43 +00001029 // Create a new macro assembler pointing to the address of the code to patch.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001030 // The size is adjusted with kGap on order for the assembler to generate size
1031 // bytes of instructions without failing with buffer size constraints.
1032 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1033}
1034
1035
1036CodePatcher::~CodePatcher() {
1037 // Indicate that code has changed.
1038 CPU::FlushICache(address_, size_);
1039
1040 // Check that the code was patched as expected.
1041 ASSERT(masm_.pc_ == address_ + size_);
1042 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1043}
1044
1045
1046} } // namespace v8::internal