blob: 7636c4ed80f5992745744cb8f324cbc53642ae5d [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
kasperl@chromium.org71affb52009-05-26 05:44:31 +000036namespace v8 {
37namespace internal {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038
kasperl@chromium.org7be3c992009-03-12 07:19:55 +000039// -------------------------------------------------------------------------
40// MacroAssembler implementation.
41
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000042MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000045 generating_stub_(false),
kasperl@chromium.org061ef742009-02-27 12:16:20 +000046 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000048}
49
50
51static void RecordWriteHelper(MacroAssembler* masm,
52 Register object,
53 Register addr,
54 Register scratch) {
55 Label fast;
56
57 // Compute the page address from the heap object pointer, leave it
58 // in 'object'.
59 masm->and_(object, ~Page::kPageAlignmentMask);
60
61 // Compute the bit addr in the remembered set, leave it in "addr".
62 masm->sub(addr, Operand(object));
63 masm->shr(addr, kObjectAlignmentBits);
64
65 // If the bit offset lies beyond the normal remembered set range, it is in
66 // the extra remembered set area of a large object.
67 masm->cmp(addr, Page::kPageSize / kPointerSize);
68 masm->j(less, &fast);
69
70 // Adjust 'addr' to be relative to the start of the extra remembered set
71 // and the page address in 'object' to be the address of the extra
72 // remembered set.
73 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
74 // Load the array length into 'scratch' and multiply by four to get the
75 // size in bytes of the elements.
76 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
77 + FixedArray::kLengthOffset));
78 masm->shl(scratch, kObjectAlignmentBits);
79 // Add the page header, array header, and array body size to the page
80 // address.
81 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
82 + Array::kHeaderSize));
83 masm->add(object, Operand(scratch));
84
85
86 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
87 // to limit code size. We should probably evaluate this decision by
88 // measuring the performance of an equivalent implementation using
89 // "simpler" instructions
90 masm->bind(&fast);
91 masm->bts(Operand(object, 0), addr);
92}
93
94
95class RecordWriteStub : public CodeStub {
96 public:
97 RecordWriteStub(Register object, Register addr, Register scratch)
98 : object_(object), addr_(addr), scratch_(scratch) { }
99
100 void Generate(MacroAssembler* masm);
101
102 private:
103 Register object_;
104 Register addr_;
105 Register scratch_;
106
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000107#ifdef DEBUG
108 void Print() {
109 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
110 object_.code(), addr_.code(), scratch_.code());
111 }
112#endif
113
114 // Minor key encoding in 12 bits of three registers (object, address and
115 // scratch) OOOOAAAASSSS.
116 class ScratchBits: public BitField<uint32_t, 0, 4> {};
117 class AddressBits: public BitField<uint32_t, 4, 4> {};
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000118 class ObjectBits: public BitField<uint32_t, 8, 4> {};
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000119
120 Major MajorKey() { return RecordWrite; }
121
122 int MinorKey() {
123 // Encode the registers.
124 return ObjectBits::encode(object_.code()) |
125 AddressBits::encode(addr_.code()) |
126 ScratchBits::encode(scratch_.code());
127 }
128};
129
130
131void RecordWriteStub::Generate(MacroAssembler* masm) {
132 RecordWriteHelper(masm, object_, addr_, scratch_);
133 masm->ret(0);
134}
135
136
137// Set the remembered set bit for [object+offset].
138// object is the object being stored into, value is the object being stored.
139// If offset is zero, then the scratch register contains the array index into
140// the elements array represented as a Smi.
141// All registers are clobbered by the operation.
142void MacroAssembler::RecordWrite(Register object, int offset,
143 Register value, Register scratch) {
144 // First, check if a remembered set write is even needed. The tests below
145 // catch stores of Smis and stores into young gen (which does not have space
146 // for the remembered set bits.
147 Label done;
148
149 // This optimization cannot survive serialization and deserialization,
150 // so we disable as long as serialization can take place.
151 int32_t new_space_start =
152 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
153 if (Serializer::enabled() || new_space_start < 0) {
154 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
155 // Check for Smi first.
156 test(value, Immediate(kSmiTagMask));
157 j(zero, &done);
158 // Test that the object address is not in the new space. We cannot
159 // set remembered set bits in the new space.
160 mov(value, Operand(object));
161 and_(value, Heap::NewSpaceMask());
162 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
163 j(equal, &done);
164 } else {
165 // move the value SmiTag into the sign bit
166 shl(value, 31);
167 // combine the object with value SmiTag
168 or_(value, Operand(object));
169 // remove the uninteresing bits inside the page
170 and_(value, Heap::NewSpaceMask() | (1 << 31));
171 // xor has two effects:
172 // - if the value was a smi, then the result will be negative
173 // - if the object is pointing into new space area the page bits will
174 // all be zero
175 xor_(value, new_space_start | (1 << 31));
176 // Check for both conditions in one branch
177 j(less_equal, &done);
178 }
179
180 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
181 // Compute the bit offset in the remembered set, leave it in 'value'.
182 mov(value, Operand(object));
183 and_(value, Page::kPageAlignmentMask);
184 add(Operand(value), Immediate(offset));
185 shr(value, kObjectAlignmentBits);
186
187 // Compute the page address from the heap object pointer, leave it in
188 // 'object'.
189 and_(object, ~Page::kPageAlignmentMask);
190
191 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
192 // to limit code size. We should probably evaluate this decision by
193 // measuring the performance of an equivalent implementation using
194 // "simpler" instructions
195 bts(Operand(object, 0), value);
196 } else {
197 Register dst = scratch;
198 if (offset != 0) {
199 lea(dst, Operand(object, offset));
200 } else {
201 // array access: calculate the destination address in the same manner as
202 // KeyedStoreIC::GenerateGeneric
203 lea(dst,
204 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
205 }
206 // If we are already generating a shared stub, not inlining the
207 // record write code isn't going to save us any memory.
208 if (generating_stub()) {
209 RecordWriteHelper(this, object, dst, value);
210 } else {
211 RecordWriteStub stub(object, dst, value);
212 CallStub(&stub);
213 }
214 }
215
216 bind(&done);
217}
218
219
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000220#ifdef ENABLE_DEBUGGER_SUPPORT
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000221void MacroAssembler::SaveRegistersToMemory(RegList regs) {
222 ASSERT((regs & ~kJSCallerSaved) == 0);
223 // Copy the content of registers to memory location.
224 for (int i = 0; i < kNumJSCallerSaved; i++) {
225 int r = JSCallerSavedCode(i);
226 if ((regs & (1 << r)) != 0) {
227 Register reg = { r };
228 ExternalReference reg_addr =
229 ExternalReference(Debug_Address::Register(i));
230 mov(Operand::StaticVariable(reg_addr), reg);
231 }
232 }
233}
234
235
236void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
237 ASSERT((regs & ~kJSCallerSaved) == 0);
238 // Copy the content of memory location to registers.
239 for (int i = kNumJSCallerSaved; --i >= 0;) {
240 int r = JSCallerSavedCode(i);
241 if ((regs & (1 << r)) != 0) {
242 Register reg = { r };
243 ExternalReference reg_addr =
244 ExternalReference(Debug_Address::Register(i));
245 mov(reg, Operand::StaticVariable(reg_addr));
246 }
247 }
248}
249
250
251void MacroAssembler::PushRegistersFromMemory(RegList regs) {
252 ASSERT((regs & ~kJSCallerSaved) == 0);
253 // Push the content of the memory location to the stack.
254 for (int i = 0; i < kNumJSCallerSaved; i++) {
255 int r = JSCallerSavedCode(i);
256 if ((regs & (1 << r)) != 0) {
257 ExternalReference reg_addr =
258 ExternalReference(Debug_Address::Register(i));
259 push(Operand::StaticVariable(reg_addr));
260 }
261 }
262}
263
264
265void MacroAssembler::PopRegistersToMemory(RegList regs) {
266 ASSERT((regs & ~kJSCallerSaved) == 0);
267 // Pop the content from the stack to the memory location.
268 for (int i = kNumJSCallerSaved; --i >= 0;) {
269 int r = JSCallerSavedCode(i);
270 if ((regs & (1 << r)) != 0) {
271 ExternalReference reg_addr =
272 ExternalReference(Debug_Address::Register(i));
273 pop(Operand::StaticVariable(reg_addr));
274 }
275 }
276}
277
278
279void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
280 Register scratch,
281 RegList regs) {
282 ASSERT((regs & ~kJSCallerSaved) == 0);
283 // Copy the content of the stack to the memory location and adjust base.
284 for (int i = kNumJSCallerSaved; --i >= 0;) {
285 int r = JSCallerSavedCode(i);
286 if ((regs & (1 << r)) != 0) {
287 mov(scratch, Operand(base, 0));
288 ExternalReference reg_addr =
289 ExternalReference(Debug_Address::Register(i));
290 mov(Operand::StaticVariable(reg_addr), scratch);
291 lea(base, Operand(base, kPointerSize));
292 }
293 }
294}
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000295#endif
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000296
297void MacroAssembler::Set(Register dst, const Immediate& x) {
298 if (x.is_zero()) {
299 xor_(dst, Operand(dst)); // shorter than mov
300 } else {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000301 mov(dst, x);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000302 }
303}
304
305
306void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
307 mov(dst, x);
308}
309
310
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000311void MacroAssembler::CmpObjectType(Register heap_object,
312 InstanceType type,
313 Register map) {
314 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
315 CmpInstanceType(map, type);
316}
317
318
319void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
320 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
321 static_cast<int8_t>(type));
322}
323
324
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000325void MacroAssembler::FCmp() {
326 fcompp();
327 push(eax);
328 fnstsw_ax();
329 sahf();
330 pop(eax);
331}
332
333
ager@chromium.org7c537e22008-10-16 08:43:32 +0000334void MacroAssembler::EnterFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000335 push(ebp);
336 mov(ebp, Operand(esp));
337 push(esi);
338 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.org061ef742009-02-27 12:16:20 +0000339 push(Immediate(CodeObject()));
340 if (FLAG_debug_code) {
341 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
342 Check(not_equal, "code object not properly patched");
343 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000344}
345
346
ager@chromium.org7c537e22008-10-16 08:43:32 +0000347void MacroAssembler::LeaveFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000348 if (FLAG_debug_code) {
349 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
350 Immediate(Smi::FromInt(type)));
351 Check(equal, "stack frame types must match");
352 }
353 leave();
354}
355
356
ager@chromium.org236ad962008-09-25 09:45:57 +0000357void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
358 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
359
360 // Setup the frame structure on the stack.
361 ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
362 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
363 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
364 push(ebp);
365 mov(ebp, Operand(esp));
366
367 // Reserve room for entry stack pointer and push the debug marker.
368 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
369 push(Immediate(0)); // saved entry sp, patched before call
370 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
371
372 // Save the frame pointer and the context in top.
373 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
374 ExternalReference context_address(Top::k_context_address);
375 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
376 mov(Operand::StaticVariable(context_address), esi);
377
378 // Setup argc and argv in callee-saved registers.
379 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
380 mov(edi, Operand(eax));
381 lea(esi, Operand(ebp, eax, times_4, offset));
382
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000383#ifdef ENABLE_DEBUGGER_SUPPORT
ager@chromium.org236ad962008-09-25 09:45:57 +0000384 // Save the state of all registers to the stack from the memory
385 // location. This is needed to allow nested break points.
386 if (type == StackFrame::EXIT_DEBUG) {
387 // TODO(1243899): This should be symmetric to
388 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
389 // correct here, but computed for the other call. Very error
390 // prone! FIX THIS. Actually there are deeper problems with
391 // register saving than this asymmetry (see the bug report
392 // associated with this issue).
393 PushRegistersFromMemory(kJSCallerSaved);
394 }
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000395#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000396
397 // Reserve space for two arguments: argc and argv.
398 sub(Operand(esp), Immediate(2 * kPointerSize));
399
400 // Get the required frame alignment for the OS.
401 static const int kFrameAlignment = OS::ActivationFrameAlignment();
402 if (kFrameAlignment > 0) {
403 ASSERT(IsPowerOf2(kFrameAlignment));
404 and_(esp, -kFrameAlignment);
405 }
406
407 // Patch the saved entry sp.
408 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
409}
410
411
412void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000413#ifdef ENABLE_DEBUGGER_SUPPORT
ager@chromium.org236ad962008-09-25 09:45:57 +0000414 // Restore the memory copy of the registers by digging them out from
415 // the stack. This is needed to allow nested break points.
416 if (type == StackFrame::EXIT_DEBUG) {
417 // It's okay to clobber register ebx below because we don't need
418 // the function pointer after this.
419 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
420 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
421 lea(ebx, Operand(ebp, kOffset));
422 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
423 }
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000424#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000425
426 // Get the return address from the stack and restore the frame pointer.
427 mov(ecx, Operand(ebp, 1 * kPointerSize));
428 mov(ebp, Operand(ebp, 0 * kPointerSize));
429
430 // Pop the arguments and the receiver from the caller stack.
431 lea(esp, Operand(esi, 1 * kPointerSize));
432
433 // Restore current context from top and clear it in debug mode.
434 ExternalReference context_address(Top::k_context_address);
435 mov(esi, Operand::StaticVariable(context_address));
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000436#ifdef DEBUG
437 mov(Operand::StaticVariable(context_address), Immediate(0));
438#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000439
440 // Push the return address to get ready to return.
441 push(ecx);
442
443 // Clear the top frame.
444 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
445 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
446}
447
448
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000449void MacroAssembler::PushTryHandler(CodeLocation try_location,
450 HandlerType type) {
451 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
452 // The pc (return address) is already on TOS.
453 if (try_location == IN_JAVASCRIPT) {
454 if (type == TRY_CATCH_HANDLER) {
455 push(Immediate(StackHandler::TRY_CATCH));
456 } else {
457 push(Immediate(StackHandler::TRY_FINALLY));
458 }
459 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
460 push(ebp);
461 push(edi);
462 } else {
463 ASSERT(try_location == IN_JS_ENTRY);
464 // The parameter pointer is meaningless here and ebp does not
465 // point to a JS frame. So we save NULL for both pp and ebp. We
466 // expect the code throwing an exception to check ebp before
467 // dereferencing it to restore the context.
468 push(Immediate(StackHandler::ENTRY));
469 push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent)));
470 push(Immediate(0)); // NULL frame pointer
471 push(Immediate(0)); // NULL parameter pointer
472 }
473 // Cached TOS.
474 mov(eax, Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
475 // Link this handler.
476 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
477}
478
479
480Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
481 JSObject* holder, Register holder_reg,
482 Register scratch,
483 Label* miss) {
484 // Make sure there's no overlap between scratch and the other
485 // registers.
486 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
487
488 // Keep track of the current object in register reg.
489 Register reg = object_reg;
490 int depth = 1;
491
492 // Check the maps in the prototype chain.
493 // Traverse the prototype chain from the object and do map checks.
494 while (object != holder) {
495 depth++;
496
497 // Only global objects and objects that do not require access
498 // checks are allowed in stubs.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000499 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000500
501 JSObject* prototype = JSObject::cast(object->GetPrototype());
502 if (Heap::InNewSpace(prototype)) {
503 // Get the map of the current object.
504 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
505 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
506 // Branch on the result of the map check.
507 j(not_equal, miss, not_taken);
508 // Check access rights to the global object. This has to happen
509 // after the map check so that we know that the object is
510 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000511 if (object->IsJSGlobalProxy()) {
512 CheckAccessGlobalProxy(reg, scratch, miss);
513
514 // Restore scratch register to be the map of the object.
515 // We load the prototype from the map in the scratch register.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000516 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
517 }
518 // The prototype is in new space; we cannot store a reference
519 // to it in the code. Load it from the map.
520 reg = holder_reg; // from now the object is in holder_reg
521 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000522
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000523 } else {
524 // Check the map of the current object.
525 cmp(FieldOperand(reg, HeapObject::kMapOffset),
526 Immediate(Handle<Map>(object->map())));
527 // Branch on the result of the map check.
528 j(not_equal, miss, not_taken);
529 // Check access rights to the global object. This has to happen
530 // after the map check so that we know that the object is
531 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000532 if (object->IsJSGlobalProxy()) {
533 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000534 }
535 // The prototype is in old space; load it directly.
536 reg = holder_reg; // from now the object is in holder_reg
537 mov(reg, Handle<JSObject>(prototype));
538 }
539
540 // Go to the next object in the prototype chain.
541 object = prototype;
542 }
543
544 // Check the holder map.
545 cmp(FieldOperand(reg, HeapObject::kMapOffset),
546 Immediate(Handle<Map>(holder->map())));
547 j(not_equal, miss, not_taken);
548
549 // Log the check depth.
550 LOG(IntEvent("check-maps-depth", depth));
551
552 // Perform security check for access to the global object and return
553 // the holder register.
554 ASSERT(object == holder);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000555 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
556 if (object->IsJSGlobalProxy()) {
557 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000558 }
559 return reg;
560}
561
562
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000563void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
ager@chromium.orge2902be2009-06-08 12:21:35 +0000564 Register scratch,
565 Label* miss) {
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000566 Label same_contexts;
567
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000568 ASSERT(!holder_reg.is(scratch));
569
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000570 // Load current lexical context from the stack frame.
571 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
572
573 // When generating debug code, make sure the lexical context is set.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000574 if (FLAG_debug_code) {
575 cmp(Operand(scratch), Immediate(0));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000576 Check(not_equal, "we should not have an empty lexical context");
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000577 }
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000578 // Load the global context of the current context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000579 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
580 mov(scratch, FieldOperand(scratch, offset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000581 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
582
583 // Check the context is a global context.
584 if (FLAG_debug_code) {
585 push(scratch);
586 // Read the first word and compare to global_context_map.
587 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
588 cmp(scratch, Factory::global_context_map());
589 Check(equal, "JSGlobalObject::global_context should be a global context.");
590 pop(scratch);
591 }
592
593 // Check if both contexts are the same.
594 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
595 j(equal, &same_contexts, taken);
596
597 // Compare security tokens, save holder_reg on the stack so we can use it
598 // as a temporary register.
599 //
600 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
601 push(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000602 // Check that the security token in the calling global object is
603 // compatible with the security token in the receiving global
604 // object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000605 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
606
607 // Check the context is a global context.
608 if (FLAG_debug_code) {
609 cmp(holder_reg, Factory::null_value());
610 Check(not_equal, "JSGlobalProxy::context() should not be null.");
611
612 push(holder_reg);
613 // Read the first word and compare to global_context_map(),
614 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
615 cmp(holder_reg, Factory::global_context_map());
616 Check(equal, "JSGlobalObject::global_context should be a global context.");
617 pop(holder_reg);
618 }
619
620 int token_offset = Context::kHeaderSize +
621 Context::SECURITY_TOKEN_INDEX * kPointerSize;
622 mov(scratch, FieldOperand(scratch, token_offset));
623 cmp(scratch, FieldOperand(holder_reg, token_offset));
624 pop(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000625 j(not_equal, miss, not_taken);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000626
627 bind(&same_contexts);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000628}
629
630
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000631void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
632 Register result,
633 Register op,
634 JumpTarget* then_target) {
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000635 JumpTarget ok;
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000636 test(result, Operand(result));
637 ok.Branch(not_zero, taken);
638 test(op, Operand(op));
639 then_target->Branch(sign, not_taken);
640 ok.Bind();
641}
642
643
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000644void MacroAssembler::NegativeZeroTest(Register result,
645 Register op,
646 Label* then_label) {
647 Label ok;
648 test(result, Operand(result));
649 j(not_zero, &ok, taken);
650 test(op, Operand(op));
651 j(sign, then_label, not_taken);
652 bind(&ok);
653}
654
655
656void MacroAssembler::NegativeZeroTest(Register result,
657 Register op1,
658 Register op2,
659 Register scratch,
660 Label* then_label) {
661 Label ok;
662 test(result, Operand(result));
663 j(not_zero, &ok, taken);
664 mov(scratch, Operand(op1));
665 or_(scratch, Operand(op2));
666 j(sign, then_label, not_taken);
667 bind(&ok);
668}
669
670
ager@chromium.org7c537e22008-10-16 08:43:32 +0000671void MacroAssembler::TryGetFunctionPrototype(Register function,
672 Register result,
673 Register scratch,
674 Label* miss) {
675 // Check that the receiver isn't a smi.
676 test(function, Immediate(kSmiTagMask));
677 j(zero, miss, not_taken);
678
679 // Check that the function really is a function.
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000680 CmpObjectType(function, JS_FUNCTION_TYPE, result);
ager@chromium.org7c537e22008-10-16 08:43:32 +0000681 j(not_equal, miss, not_taken);
682
683 // Make sure that the function has an instance prototype.
684 Label non_instance;
685 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
686 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
687 j(not_zero, &non_instance, not_taken);
688
689 // Get the prototype or initial map from the function.
690 mov(result,
691 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
692
693 // If the prototype or initial map is the hole, don't return it and
694 // simply miss the cache instead. This will allow us to allocate a
695 // prototype object on-demand in the runtime system.
696 cmp(Operand(result), Immediate(Factory::the_hole_value()));
697 j(equal, miss, not_taken);
698
699 // If the function does not have an initial map, we're done.
700 Label done;
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000701 CmpObjectType(result, MAP_TYPE, scratch);
ager@chromium.org7c537e22008-10-16 08:43:32 +0000702 j(not_equal, &done);
703
704 // Get the prototype from the initial map.
705 mov(result, FieldOperand(result, Map::kPrototypeOffset));
706 jmp(&done);
707
708 // Non-instance prototype: Fetch prototype from constructor field
709 // in initial map.
710 bind(&non_instance);
711 mov(result, FieldOperand(result, Map::kConstructorOffset));
712
713 // All done.
714 bind(&done);
715}
716
717
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000718void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000719 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000720 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000721}
722
723
724void MacroAssembler::StubReturn(int argc) {
725 ASSERT(argc >= 1 && generating_stub());
726 ret((argc - 1) * kPointerSize);
727}
728
729
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000730void MacroAssembler::IllegalOperation(int num_arguments) {
731 if (num_arguments > 0) {
732 add(Operand(esp), Immediate(num_arguments * kPointerSize));
733 }
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000734 mov(eax, Immediate(Factory::undefined_value()));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000735}
736
737
738void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
739 CallRuntime(Runtime::FunctionForId(id), num_arguments);
740}
741
742
743void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000744 // If the expected number of arguments of the runtime function is
745 // constant, we check that the actual number of arguments match the
746 // expectation.
747 if (f->nargs >= 0 && f->nargs != num_arguments) {
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000748 IllegalOperation(num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000749 return;
750 }
751
mads.s.ager31e71382008-08-13 09:32:07 +0000752 Runtime::FunctionId function_id =
753 static_cast<Runtime::FunctionId>(f->stub_id);
754 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000755 CallStub(&stub);
756}
757
758
mads.s.ager31e71382008-08-13 09:32:07 +0000759void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
760 int num_arguments) {
761 // TODO(1236192): Most runtime routines don't need the number of
762 // arguments passed in because it is constant. At some point we
763 // should remove this need and make the runtime routine entry code
764 // smarter.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000765 Set(eax, Immediate(num_arguments));
mads.s.ager31e71382008-08-13 09:32:07 +0000766 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000767}
768
769
770void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
771 // Set the entry point and jump to the C entry runtime stub.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000772 mov(ebx, Immediate(ext));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000773 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000774 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000775}
776
777
778void MacroAssembler::InvokePrologue(const ParameterCount& expected,
779 const ParameterCount& actual,
780 Handle<Code> code_constant,
781 const Operand& code_operand,
782 Label* done,
783 InvokeFlag flag) {
784 bool definitely_matches = false;
785 Label invoke;
786 if (expected.is_immediate()) {
787 ASSERT(actual.is_immediate());
788 if (expected.immediate() == actual.immediate()) {
789 definitely_matches = true;
790 } else {
791 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000792 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
793 if (expected.immediate() == sentinel) {
794 // Don't worry about adapting arguments for builtins that
795 // don't want that done. Skip adaption code by making it look
796 // like we have a match between expected and actual number of
797 // arguments.
798 definitely_matches = true;
799 } else {
800 mov(ebx, expected.immediate());
801 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000802 }
803 } else {
804 if (actual.is_immediate()) {
805 // Expected is in register, actual is immediate. This is the
806 // case when we invoke function values without going through the
807 // IC mechanism.
808 cmp(expected.reg(), actual.immediate());
809 j(equal, &invoke);
810 ASSERT(expected.reg().is(ebx));
811 mov(eax, actual.immediate());
812 } else if (!expected.reg().is(actual.reg())) {
813 // Both expected and actual are in (different) registers. This
814 // is the case when we invoke functions using call and apply.
815 cmp(expected.reg(), Operand(actual.reg()));
816 j(equal, &invoke);
817 ASSERT(actual.reg().is(eax));
818 ASSERT(expected.reg().is(ebx));
819 }
820 }
821
822 if (!definitely_matches) {
823 Handle<Code> adaptor =
824 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
825 if (!code_constant.is_null()) {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000826 mov(edx, Immediate(code_constant));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000827 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
828 } else if (!code_operand.is_reg(edx)) {
829 mov(edx, code_operand);
830 }
831
832 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000833 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000834 jmp(done);
835 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000836 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000837 }
838 bind(&invoke);
839 }
840}
841
842
843void MacroAssembler::InvokeCode(const Operand& code,
844 const ParameterCount& expected,
845 const ParameterCount& actual,
846 InvokeFlag flag) {
847 Label done;
848 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
849 if (flag == CALL_FUNCTION) {
850 call(code);
851 } else {
852 ASSERT(flag == JUMP_FUNCTION);
853 jmp(code);
854 }
855 bind(&done);
856}
857
858
859void MacroAssembler::InvokeCode(Handle<Code> code,
860 const ParameterCount& expected,
861 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000862 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000863 InvokeFlag flag) {
864 Label done;
865 Operand dummy(eax);
866 InvokePrologue(expected, actual, code, dummy, &done, flag);
867 if (flag == CALL_FUNCTION) {
868 call(code, rmode);
869 } else {
870 ASSERT(flag == JUMP_FUNCTION);
871 jmp(code, rmode);
872 }
873 bind(&done);
874}
875
876
877void MacroAssembler::InvokeFunction(Register fun,
878 const ParameterCount& actual,
879 InvokeFlag flag) {
880 ASSERT(fun.is(edi));
881 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
882 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
883 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
884 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
885 lea(edx, FieldOperand(edx, Code::kHeaderSize));
886
887 ParameterCount expected(ebx);
888 InvokeCode(Operand(edx), expected, actual, flag);
889}
890
891
892void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
893 bool resolved;
894 Handle<Code> code = ResolveBuiltin(id, &resolved);
895
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000896 // Calls are not allowed in some stubs.
kasper.lund7276f142008-07-30 08:49:36 +0000897 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000898
899 // Rely on the assertion to check that the number of provided
900 // arguments match the expected number of arguments. Fake a
901 // parameter count to avoid emitting code to do the check.
902 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000903 InvokeCode(Handle<Code>(code), expected, expected,
904 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000905
906 const char* name = Builtins::GetName(id);
907 int argc = Builtins::GetArgumentsCount(id);
908
909 if (!resolved) {
910 uint32_t flags =
911 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000912 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
913 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000914 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
915 unresolved_.Add(entry);
916 }
917}
918
919
920void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
921 bool resolved;
922 Handle<Code> code = ResolveBuiltin(id, &resolved);
923
924 const char* name = Builtins::GetName(id);
925 int argc = Builtins::GetArgumentsCount(id);
926
927 mov(Operand(target), Immediate(code));
928 if (!resolved) {
929 uint32_t flags =
930 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000931 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
932 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000933 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
934 unresolved_.Add(entry);
935 }
936 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
937}
938
939
940Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
941 bool* resolved) {
942 // Move the builtin function into the temporary function slot by
943 // reading it from the builtins object. NOTE: We should be able to
944 // reduce this to two instructions by putting the function table in
945 // the global object instead of the "builtins" object and by using a
946 // real register for the function.
947 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
948 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
949 int builtins_offset =
950 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
951 mov(edi, FieldOperand(edx, builtins_offset));
952
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000953
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000954 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000955}
956
957
958void MacroAssembler::Ret() {
959 ret(0);
960}
961
962
963void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
964 if (FLAG_native_code_counters && counter->Enabled()) {
965 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
966 }
967}
968
969
970void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
971 ASSERT(value > 0);
972 if (FLAG_native_code_counters && counter->Enabled()) {
973 Operand operand = Operand::StaticVariable(ExternalReference(counter));
974 if (value == 1) {
975 inc(operand);
976 } else {
977 add(operand, Immediate(value));
978 }
979 }
980}
981
982
983void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
984 ASSERT(value > 0);
985 if (FLAG_native_code_counters && counter->Enabled()) {
986 Operand operand = Operand::StaticVariable(ExternalReference(counter));
987 if (value == 1) {
988 dec(operand);
989 } else {
990 sub(operand, Immediate(value));
991 }
992 }
993}
994
995
996void MacroAssembler::Assert(Condition cc, const char* msg) {
997 if (FLAG_debug_code) Check(cc, msg);
998}
999
1000
1001void MacroAssembler::Check(Condition cc, const char* msg) {
1002 Label L;
1003 j(cc, &L, taken);
1004 Abort(msg);
1005 // will not return here
1006 bind(&L);
1007}
1008
1009
1010void MacroAssembler::Abort(const char* msg) {
1011 // We want to pass the msg string like a smi to avoid GC
1012 // problems, however msg is not guaranteed to be aligned
1013 // properly. Instead, we pass an aligned pointer that is
ager@chromium.org32912102009-01-16 10:38:43 +00001014 // a proper v8 smi, but also pass the alignment difference
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001015 // from the real pointer as a smi.
1016 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1017 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1018 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1019#ifdef DEBUG
1020 if (msg != NULL) {
1021 RecordComment("Abort message: ");
1022 RecordComment(msg);
1023 }
1024#endif
1025 push(eax);
1026 push(Immediate(p0));
1027 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1028 CallRuntime(Runtime::kAbort, 2);
1029 // will not return here
1030}
1031
1032
1033CodePatcher::CodePatcher(byte* address, int size)
1034 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
ager@chromium.org32912102009-01-16 10:38:43 +00001035 // Create a new macro assembler pointing to the address of the code to patch.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001036 // The size is adjusted with kGap on order for the assembler to generate size
1037 // bytes of instructions without failing with buffer size constraints.
1038 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1039}
1040
1041
1042CodePatcher::~CodePatcher() {
1043 // Indicate that code has changed.
1044 CPU::FlushICache(address_, size_);
1045
1046 // Check that the code was patched as expected.
1047 ASSERT(masm_.pc_ == address_ + size_);
1048 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1049}
1050
1051
1052} } // namespace v8::internal