blob: 479b8ca0140a4e697eee92bcefb9a851e3353e3e [file] [log] [blame]
ager@chromium.org9258b6b2008-09-11 09:11:10 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
kasperl@chromium.org71affb52009-05-26 05:44:31 +000036namespace v8 {
37namespace internal {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000038
kasperl@chromium.org7be3c992009-03-12 07:19:55 +000039// -------------------------------------------------------------------------
40// MacroAssembler implementation.
41
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000042MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
kasper.lund7276f142008-07-30 08:49:36 +000045 generating_stub_(false),
kasperl@chromium.org061ef742009-02-27 12:16:20 +000046 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +000048}
49
50
51static void RecordWriteHelper(MacroAssembler* masm,
52 Register object,
53 Register addr,
54 Register scratch) {
55 Label fast;
56
57 // Compute the page address from the heap object pointer, leave it
58 // in 'object'.
59 masm->and_(object, ~Page::kPageAlignmentMask);
60
61 // Compute the bit addr in the remembered set, leave it in "addr".
62 masm->sub(addr, Operand(object));
63 masm->shr(addr, kObjectAlignmentBits);
64
65 // If the bit offset lies beyond the normal remembered set range, it is in
66 // the extra remembered set area of a large object.
67 masm->cmp(addr, Page::kPageSize / kPointerSize);
68 masm->j(less, &fast);
69
70 // Adjust 'addr' to be relative to the start of the extra remembered set
71 // and the page address in 'object' to be the address of the extra
72 // remembered set.
73 masm->sub(Operand(addr), Immediate(Page::kPageSize / kPointerSize));
74 // Load the array length into 'scratch' and multiply by four to get the
75 // size in bytes of the elements.
76 masm->mov(scratch, Operand(object, Page::kObjectStartOffset
77 + FixedArray::kLengthOffset));
78 masm->shl(scratch, kObjectAlignmentBits);
79 // Add the page header, array header, and array body size to the page
80 // address.
81 masm->add(Operand(object), Immediate(Page::kObjectStartOffset
82 + Array::kHeaderSize));
83 masm->add(object, Operand(scratch));
84
85
86 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
87 // to limit code size. We should probably evaluate this decision by
88 // measuring the performance of an equivalent implementation using
89 // "simpler" instructions
90 masm->bind(&fast);
91 masm->bts(Operand(object, 0), addr);
92}
93
94
95class RecordWriteStub : public CodeStub {
96 public:
97 RecordWriteStub(Register object, Register addr, Register scratch)
98 : object_(object), addr_(addr), scratch_(scratch) { }
99
100 void Generate(MacroAssembler* masm);
101
102 private:
103 Register object_;
104 Register addr_;
105 Register scratch_;
106
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000107#ifdef DEBUG
108 void Print() {
109 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
110 object_.code(), addr_.code(), scratch_.code());
111 }
112#endif
113
114 // Minor key encoding in 12 bits of three registers (object, address and
115 // scratch) OOOOAAAASSSS.
116 class ScratchBits: public BitField<uint32_t, 0, 4> {};
117 class AddressBits: public BitField<uint32_t, 4, 4> {};
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000118 class ObjectBits: public BitField<uint32_t, 8, 4> {};
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000119
120 Major MajorKey() { return RecordWrite; }
121
122 int MinorKey() {
123 // Encode the registers.
124 return ObjectBits::encode(object_.code()) |
125 AddressBits::encode(addr_.code()) |
126 ScratchBits::encode(scratch_.code());
127 }
128};
129
130
131void RecordWriteStub::Generate(MacroAssembler* masm) {
132 RecordWriteHelper(masm, object_, addr_, scratch_);
133 masm->ret(0);
134}
135
136
137// Set the remembered set bit for [object+offset].
138// object is the object being stored into, value is the object being stored.
139// If offset is zero, then the scratch register contains the array index into
140// the elements array represented as a Smi.
141// All registers are clobbered by the operation.
142void MacroAssembler::RecordWrite(Register object, int offset,
143 Register value, Register scratch) {
144 // First, check if a remembered set write is even needed. The tests below
145 // catch stores of Smis and stores into young gen (which does not have space
146 // for the remembered set bits.
147 Label done;
148
149 // This optimization cannot survive serialization and deserialization,
150 // so we disable as long as serialization can take place.
151 int32_t new_space_start =
152 reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
153 if (Serializer::enabled() || new_space_start < 0) {
154 // Cannot do smart bit-twiddling. Need to do two consecutive checks.
155 // Check for Smi first.
156 test(value, Immediate(kSmiTagMask));
157 j(zero, &done);
158 // Test that the object address is not in the new space. We cannot
159 // set remembered set bits in the new space.
160 mov(value, Operand(object));
161 and_(value, Heap::NewSpaceMask());
162 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
163 j(equal, &done);
164 } else {
165 // move the value SmiTag into the sign bit
166 shl(value, 31);
167 // combine the object with value SmiTag
168 or_(value, Operand(object));
169 // remove the uninteresing bits inside the page
170 and_(value, Heap::NewSpaceMask() | (1 << 31));
171 // xor has two effects:
172 // - if the value was a smi, then the result will be negative
173 // - if the object is pointing into new space area the page bits will
174 // all be zero
175 xor_(value, new_space_start | (1 << 31));
176 // Check for both conditions in one branch
177 j(less_equal, &done);
178 }
179
180 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
181 // Compute the bit offset in the remembered set, leave it in 'value'.
182 mov(value, Operand(object));
183 and_(value, Page::kPageAlignmentMask);
184 add(Operand(value), Immediate(offset));
185 shr(value, kObjectAlignmentBits);
186
187 // Compute the page address from the heap object pointer, leave it in
188 // 'object'.
189 and_(object, ~Page::kPageAlignmentMask);
190
191 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
192 // to limit code size. We should probably evaluate this decision by
193 // measuring the performance of an equivalent implementation using
194 // "simpler" instructions
195 bts(Operand(object, 0), value);
196 } else {
197 Register dst = scratch;
198 if (offset != 0) {
199 lea(dst, Operand(object, offset));
200 } else {
201 // array access: calculate the destination address in the same manner as
202 // KeyedStoreIC::GenerateGeneric
203 lea(dst,
204 Operand(object, dst, times_2, Array::kHeaderSize - kHeapObjectTag));
205 }
206 // If we are already generating a shared stub, not inlining the
207 // record write code isn't going to save us any memory.
208 if (generating_stub()) {
209 RecordWriteHelper(this, object, dst, value);
210 } else {
211 RecordWriteStub stub(object, dst, value);
212 CallStub(&stub);
213 }
214 }
215
216 bind(&done);
217}
218
219
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000220#ifdef ENABLE_DEBUGGER_SUPPORT
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000221void MacroAssembler::SaveRegistersToMemory(RegList regs) {
222 ASSERT((regs & ~kJSCallerSaved) == 0);
223 // Copy the content of registers to memory location.
224 for (int i = 0; i < kNumJSCallerSaved; i++) {
225 int r = JSCallerSavedCode(i);
226 if ((regs & (1 << r)) != 0) {
227 Register reg = { r };
228 ExternalReference reg_addr =
229 ExternalReference(Debug_Address::Register(i));
230 mov(Operand::StaticVariable(reg_addr), reg);
231 }
232 }
233}
234
235
236void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
237 ASSERT((regs & ~kJSCallerSaved) == 0);
238 // Copy the content of memory location to registers.
239 for (int i = kNumJSCallerSaved; --i >= 0;) {
240 int r = JSCallerSavedCode(i);
241 if ((regs & (1 << r)) != 0) {
242 Register reg = { r };
243 ExternalReference reg_addr =
244 ExternalReference(Debug_Address::Register(i));
245 mov(reg, Operand::StaticVariable(reg_addr));
246 }
247 }
248}
249
250
251void MacroAssembler::PushRegistersFromMemory(RegList regs) {
252 ASSERT((regs & ~kJSCallerSaved) == 0);
253 // Push the content of the memory location to the stack.
254 for (int i = 0; i < kNumJSCallerSaved; i++) {
255 int r = JSCallerSavedCode(i);
256 if ((regs & (1 << r)) != 0) {
257 ExternalReference reg_addr =
258 ExternalReference(Debug_Address::Register(i));
259 push(Operand::StaticVariable(reg_addr));
260 }
261 }
262}
263
264
265void MacroAssembler::PopRegistersToMemory(RegList regs) {
266 ASSERT((regs & ~kJSCallerSaved) == 0);
267 // Pop the content from the stack to the memory location.
268 for (int i = kNumJSCallerSaved; --i >= 0;) {
269 int r = JSCallerSavedCode(i);
270 if ((regs & (1 << r)) != 0) {
271 ExternalReference reg_addr =
272 ExternalReference(Debug_Address::Register(i));
273 pop(Operand::StaticVariable(reg_addr));
274 }
275 }
276}
277
278
279void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
280 Register scratch,
281 RegList regs) {
282 ASSERT((regs & ~kJSCallerSaved) == 0);
283 // Copy the content of the stack to the memory location and adjust base.
284 for (int i = kNumJSCallerSaved; --i >= 0;) {
285 int r = JSCallerSavedCode(i);
286 if ((regs & (1 << r)) != 0) {
287 mov(scratch, Operand(base, 0));
288 ExternalReference reg_addr =
289 ExternalReference(Debug_Address::Register(i));
290 mov(Operand::StaticVariable(reg_addr), scratch);
291 lea(base, Operand(base, kPointerSize));
292 }
293 }
294}
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000295#endif
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000296
297void MacroAssembler::Set(Register dst, const Immediate& x) {
298 if (x.is_zero()) {
299 xor_(dst, Operand(dst)); // shorter than mov
300 } else {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000301 mov(dst, x);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000302 }
303}
304
305
306void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
307 mov(dst, x);
308}
309
310
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000311void MacroAssembler::CmpObjectType(Register heap_object,
312 InstanceType type,
313 Register map) {
314 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
315 CmpInstanceType(map, type);
316}
317
318
319void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
320 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
321 static_cast<int8_t>(type));
322}
323
324
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000325void MacroAssembler::FCmp() {
326 fcompp();
327 push(eax);
328 fnstsw_ax();
329 sahf();
330 pop(eax);
331}
332
333
ager@chromium.org7c537e22008-10-16 08:43:32 +0000334void MacroAssembler::EnterFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000335 push(ebp);
336 mov(ebp, Operand(esp));
337 push(esi);
338 push(Immediate(Smi::FromInt(type)));
kasperl@chromium.org061ef742009-02-27 12:16:20 +0000339 push(Immediate(CodeObject()));
340 if (FLAG_debug_code) {
341 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
342 Check(not_equal, "code object not properly patched");
343 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000344}
345
346
ager@chromium.org7c537e22008-10-16 08:43:32 +0000347void MacroAssembler::LeaveFrame(StackFrame::Type type) {
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000348 if (FLAG_debug_code) {
349 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
350 Immediate(Smi::FromInt(type)));
351 Check(equal, "stack frame types must match");
352 }
353 leave();
354}
355
356
ager@chromium.org236ad962008-09-25 09:45:57 +0000357void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
358 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
359
360 // Setup the frame structure on the stack.
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000361 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
ager@chromium.org236ad962008-09-25 09:45:57 +0000362 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
363 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
364 push(ebp);
365 mov(ebp, Operand(esp));
366
367 // Reserve room for entry stack pointer and push the debug marker.
368 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
369 push(Immediate(0)); // saved entry sp, patched before call
370 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
371
372 // Save the frame pointer and the context in top.
373 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
374 ExternalReference context_address(Top::k_context_address);
375 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
376 mov(Operand::StaticVariable(context_address), esi);
377
378 // Setup argc and argv in callee-saved registers.
379 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
380 mov(edi, Operand(eax));
381 lea(esi, Operand(ebp, eax, times_4, offset));
382
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000383#ifdef ENABLE_DEBUGGER_SUPPORT
ager@chromium.org236ad962008-09-25 09:45:57 +0000384 // Save the state of all registers to the stack from the memory
385 // location. This is needed to allow nested break points.
386 if (type == StackFrame::EXIT_DEBUG) {
387 // TODO(1243899): This should be symmetric to
388 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
389 // correct here, but computed for the other call. Very error
390 // prone! FIX THIS. Actually there are deeper problems with
391 // register saving than this asymmetry (see the bug report
392 // associated with this issue).
393 PushRegistersFromMemory(kJSCallerSaved);
394 }
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000395#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000396
397 // Reserve space for two arguments: argc and argv.
398 sub(Operand(esp), Immediate(2 * kPointerSize));
399
400 // Get the required frame alignment for the OS.
401 static const int kFrameAlignment = OS::ActivationFrameAlignment();
402 if (kFrameAlignment > 0) {
403 ASSERT(IsPowerOf2(kFrameAlignment));
404 and_(esp, -kFrameAlignment);
405 }
406
407 // Patch the saved entry sp.
408 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
409}
410
411
412void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000413#ifdef ENABLE_DEBUGGER_SUPPORT
ager@chromium.org236ad962008-09-25 09:45:57 +0000414 // Restore the memory copy of the registers by digging them out from
415 // the stack. This is needed to allow nested break points.
416 if (type == StackFrame::EXIT_DEBUG) {
417 // It's okay to clobber register ebx below because we don't need
418 // the function pointer after this.
419 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
420 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
421 lea(ebx, Operand(ebp, kOffset));
422 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
423 }
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000424#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000425
426 // Get the return address from the stack and restore the frame pointer.
427 mov(ecx, Operand(ebp, 1 * kPointerSize));
428 mov(ebp, Operand(ebp, 0 * kPointerSize));
429
430 // Pop the arguments and the receiver from the caller stack.
431 lea(esp, Operand(esi, 1 * kPointerSize));
432
433 // Restore current context from top and clear it in debug mode.
434 ExternalReference context_address(Top::k_context_address);
435 mov(esi, Operand::StaticVariable(context_address));
ager@chromium.org65dad4b2009-04-23 08:48:43 +0000436#ifdef DEBUG
437 mov(Operand::StaticVariable(context_address), Immediate(0));
438#endif
ager@chromium.org236ad962008-09-25 09:45:57 +0000439
440 // Push the return address to get ready to return.
441 push(ecx);
442
443 // Clear the top frame.
444 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
445 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
446}
447
448
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000449void MacroAssembler::PushTryHandler(CodeLocation try_location,
450 HandlerType type) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000451 // Adjust this code if not the case.
452 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000453 // The pc (return address) is already on TOS.
454 if (try_location == IN_JAVASCRIPT) {
455 if (type == TRY_CATCH_HANDLER) {
456 push(Immediate(StackHandler::TRY_CATCH));
457 } else {
458 push(Immediate(StackHandler::TRY_FINALLY));
459 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000460 push(ebp);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000461 } else {
462 ASSERT(try_location == IN_JS_ENTRY);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000463 // The frame pointer does not point to a JS frame so we save NULL
464 // for ebp. We expect the code throwing an exception to check ebp
465 // before dereferencing it to restore the context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000466 push(Immediate(StackHandler::ENTRY));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000467 push(Immediate(0)); // NULL frame pointer.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000468 }
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000469 // Save the current handler as the next handler.
470 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
471 // Link this handler as the new current one.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000472 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
473}
474
475
476Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
477 JSObject* holder, Register holder_reg,
478 Register scratch,
479 Label* miss) {
480 // Make sure there's no overlap between scratch and the other
481 // registers.
482 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
483
484 // Keep track of the current object in register reg.
485 Register reg = object_reg;
486 int depth = 1;
487
488 // Check the maps in the prototype chain.
489 // Traverse the prototype chain from the object and do map checks.
490 while (object != holder) {
491 depth++;
492
493 // Only global objects and objects that do not require access
494 // checks are allowed in stubs.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000495 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000496
497 JSObject* prototype = JSObject::cast(object->GetPrototype());
498 if (Heap::InNewSpace(prototype)) {
499 // Get the map of the current object.
500 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
501 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
502 // Branch on the result of the map check.
503 j(not_equal, miss, not_taken);
504 // Check access rights to the global object. This has to happen
505 // after the map check so that we know that the object is
506 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000507 if (object->IsJSGlobalProxy()) {
508 CheckAccessGlobalProxy(reg, scratch, miss);
509
510 // Restore scratch register to be the map of the object.
511 // We load the prototype from the map in the scratch register.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000512 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
513 }
514 // The prototype is in new space; we cannot store a reference
515 // to it in the code. Load it from the map.
516 reg = holder_reg; // from now the object is in holder_reg
517 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000518
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000519 } else {
520 // Check the map of the current object.
521 cmp(FieldOperand(reg, HeapObject::kMapOffset),
522 Immediate(Handle<Map>(object->map())));
523 // Branch on the result of the map check.
524 j(not_equal, miss, not_taken);
525 // Check access rights to the global object. This has to happen
526 // after the map check so that we know that the object is
527 // actually a global object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000528 if (object->IsJSGlobalProxy()) {
529 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000530 }
531 // The prototype is in old space; load it directly.
532 reg = holder_reg; // from now the object is in holder_reg
533 mov(reg, Handle<JSObject>(prototype));
534 }
535
536 // Go to the next object in the prototype chain.
537 object = prototype;
538 }
539
540 // Check the holder map.
541 cmp(FieldOperand(reg, HeapObject::kMapOffset),
542 Immediate(Handle<Map>(holder->map())));
543 j(not_equal, miss, not_taken);
544
545 // Log the check depth.
546 LOG(IntEvent("check-maps-depth", depth));
547
548 // Perform security check for access to the global object and return
549 // the holder register.
550 ASSERT(object == holder);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000551 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
552 if (object->IsJSGlobalProxy()) {
553 CheckAccessGlobalProxy(reg, scratch, miss);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000554 }
555 return reg;
556}
557
558
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000559void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
ager@chromium.orge2902be2009-06-08 12:21:35 +0000560 Register scratch,
561 Label* miss) {
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000562 Label same_contexts;
563
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000564 ASSERT(!holder_reg.is(scratch));
565
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000566 // Load current lexical context from the stack frame.
567 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
568
569 // When generating debug code, make sure the lexical context is set.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000570 if (FLAG_debug_code) {
571 cmp(Operand(scratch), Immediate(0));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000572 Check(not_equal, "we should not have an empty lexical context");
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000573 }
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000574 // Load the global context of the current context.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000575 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
576 mov(scratch, FieldOperand(scratch, offset));
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000577 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
578
579 // Check the context is a global context.
580 if (FLAG_debug_code) {
581 push(scratch);
582 // Read the first word and compare to global_context_map.
583 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
584 cmp(scratch, Factory::global_context_map());
585 Check(equal, "JSGlobalObject::global_context should be a global context.");
586 pop(scratch);
587 }
588
589 // Check if both contexts are the same.
590 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
591 j(equal, &same_contexts, taken);
592
593 // Compare security tokens, save holder_reg on the stack so we can use it
594 // as a temporary register.
595 //
596 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
597 push(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000598 // Check that the security token in the calling global object is
599 // compatible with the security token in the receiving global
600 // object.
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000601 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
602
603 // Check the context is a global context.
604 if (FLAG_debug_code) {
605 cmp(holder_reg, Factory::null_value());
606 Check(not_equal, "JSGlobalProxy::context() should not be null.");
607
608 push(holder_reg);
609 // Read the first word and compare to global_context_map(),
610 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
611 cmp(holder_reg, Factory::global_context_map());
612 Check(equal, "JSGlobalObject::global_context should be a global context.");
613 pop(holder_reg);
614 }
615
616 int token_offset = Context::kHeaderSize +
617 Context::SECURITY_TOKEN_INDEX * kPointerSize;
618 mov(scratch, FieldOperand(scratch, token_offset));
619 cmp(scratch, FieldOperand(holder_reg, token_offset));
620 pop(holder_reg);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000621 j(not_equal, miss, not_taken);
kasperl@chromium.org5a8ca6c2008-10-23 13:57:19 +0000622
623 bind(&same_contexts);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000624}
625
626
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000627void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
628 Register result,
629 Register op,
630 JumpTarget* then_target) {
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000631 JumpTarget ok;
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000632 test(result, Operand(result));
633 ok.Branch(not_zero, taken);
634 test(op, Operand(op));
635 then_target->Branch(sign, not_taken);
636 ok.Bind();
637}
638
639
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000640void MacroAssembler::NegativeZeroTest(Register result,
641 Register op,
642 Label* then_label) {
643 Label ok;
644 test(result, Operand(result));
645 j(not_zero, &ok, taken);
646 test(op, Operand(op));
647 j(sign, then_label, not_taken);
648 bind(&ok);
649}
650
651
652void MacroAssembler::NegativeZeroTest(Register result,
653 Register op1,
654 Register op2,
655 Register scratch,
656 Label* then_label) {
657 Label ok;
658 test(result, Operand(result));
659 j(not_zero, &ok, taken);
660 mov(scratch, Operand(op1));
661 or_(scratch, Operand(op2));
662 j(sign, then_label, not_taken);
663 bind(&ok);
664}
665
666
ager@chromium.org7c537e22008-10-16 08:43:32 +0000667void MacroAssembler::TryGetFunctionPrototype(Register function,
668 Register result,
669 Register scratch,
670 Label* miss) {
671 // Check that the receiver isn't a smi.
672 test(function, Immediate(kSmiTagMask));
673 j(zero, miss, not_taken);
674
675 // Check that the function really is a function.
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000676 CmpObjectType(function, JS_FUNCTION_TYPE, result);
ager@chromium.org7c537e22008-10-16 08:43:32 +0000677 j(not_equal, miss, not_taken);
678
679 // Make sure that the function has an instance prototype.
680 Label non_instance;
681 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
682 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
683 j(not_zero, &non_instance, not_taken);
684
685 // Get the prototype or initial map from the function.
686 mov(result,
687 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
688
689 // If the prototype or initial map is the hole, don't return it and
690 // simply miss the cache instead. This will allow us to allocate a
691 // prototype object on-demand in the runtime system.
692 cmp(Operand(result), Immediate(Factory::the_hole_value()));
693 j(equal, miss, not_taken);
694
695 // If the function does not have an initial map, we're done.
696 Label done;
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000697 CmpObjectType(result, MAP_TYPE, scratch);
ager@chromium.org7c537e22008-10-16 08:43:32 +0000698 j(not_equal, &done);
699
700 // Get the prototype from the initial map.
701 mov(result, FieldOperand(result, Map::kPrototypeOffset));
702 jmp(&done);
703
704 // Non-instance prototype: Fetch prototype from constructor field
705 // in initial map.
706 bind(&non_instance);
707 mov(result, FieldOperand(result, Map::kConstructorOffset));
708
709 // All done.
710 bind(&done);
711}
712
713
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000714void MacroAssembler::CallStub(CodeStub* stub) {
kasper.lund7276f142008-07-30 08:49:36 +0000715 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
ager@chromium.org236ad962008-09-25 09:45:57 +0000716 call(stub->GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000717}
718
719
720void MacroAssembler::StubReturn(int argc) {
721 ASSERT(argc >= 1 && generating_stub());
722 ret((argc - 1) * kPointerSize);
723}
724
725
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000726void MacroAssembler::IllegalOperation(int num_arguments) {
727 if (num_arguments > 0) {
728 add(Operand(esp), Immediate(num_arguments * kPointerSize));
729 }
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000730 mov(eax, Immediate(Factory::undefined_value()));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000731}
732
733
734void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
735 CallRuntime(Runtime::FunctionForId(id), num_arguments);
736}
737
738
739void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
mads.s.ager31e71382008-08-13 09:32:07 +0000740 // If the expected number of arguments of the runtime function is
741 // constant, we check that the actual number of arguments match the
742 // expectation.
743 if (f->nargs >= 0 && f->nargs != num_arguments) {
kasperl@chromium.org41044eb2008-10-06 08:24:46 +0000744 IllegalOperation(num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000745 return;
746 }
747
mads.s.ager31e71382008-08-13 09:32:07 +0000748 Runtime::FunctionId function_id =
749 static_cast<Runtime::FunctionId>(f->stub_id);
750 RuntimeStub stub(function_id, num_arguments);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000751 CallStub(&stub);
752}
753
754
mads.s.ager31e71382008-08-13 09:32:07 +0000755void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
756 int num_arguments) {
757 // TODO(1236192): Most runtime routines don't need the number of
758 // arguments passed in because it is constant. At some point we
759 // should remove this need and make the runtime routine entry code
760 // smarter.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000761 Set(eax, Immediate(num_arguments));
mads.s.ager31e71382008-08-13 09:32:07 +0000762 JumpToBuiltin(ext);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000763}
764
765
766void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
767 // Set the entry point and jump to the C entry runtime stub.
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000768 mov(ebx, Immediate(ext));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000769 CEntryStub ces;
ager@chromium.org236ad962008-09-25 09:45:57 +0000770 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000771}
772
773
774void MacroAssembler::InvokePrologue(const ParameterCount& expected,
775 const ParameterCount& actual,
776 Handle<Code> code_constant,
777 const Operand& code_operand,
778 Label* done,
779 InvokeFlag flag) {
780 bool definitely_matches = false;
781 Label invoke;
782 if (expected.is_immediate()) {
783 ASSERT(actual.is_immediate());
784 if (expected.immediate() == actual.immediate()) {
785 definitely_matches = true;
786 } else {
787 mov(eax, actual.immediate());
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000788 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
789 if (expected.immediate() == sentinel) {
790 // Don't worry about adapting arguments for builtins that
791 // don't want that done. Skip adaption code by making it look
792 // like we have a match between expected and actual number of
793 // arguments.
794 definitely_matches = true;
795 } else {
796 mov(ebx, expected.immediate());
797 }
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000798 }
799 } else {
800 if (actual.is_immediate()) {
801 // Expected is in register, actual is immediate. This is the
802 // case when we invoke function values without going through the
803 // IC mechanism.
804 cmp(expected.reg(), actual.immediate());
805 j(equal, &invoke);
806 ASSERT(expected.reg().is(ebx));
807 mov(eax, actual.immediate());
808 } else if (!expected.reg().is(actual.reg())) {
809 // Both expected and actual are in (different) registers. This
810 // is the case when we invoke functions using call and apply.
811 cmp(expected.reg(), Operand(actual.reg()));
812 j(equal, &invoke);
813 ASSERT(actual.reg().is(eax));
814 ASSERT(expected.reg().is(ebx));
815 }
816 }
817
818 if (!definitely_matches) {
819 Handle<Code> adaptor =
820 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
821 if (!code_constant.is_null()) {
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000822 mov(edx, Immediate(code_constant));
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000823 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
824 } else if (!code_operand.is_reg(edx)) {
825 mov(edx, code_operand);
826 }
827
828 if (flag == CALL_FUNCTION) {
ager@chromium.org236ad962008-09-25 09:45:57 +0000829 call(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000830 jmp(done);
831 } else {
ager@chromium.org236ad962008-09-25 09:45:57 +0000832 jmp(adaptor, RelocInfo::CODE_TARGET);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000833 }
834 bind(&invoke);
835 }
836}
837
838
839void MacroAssembler::InvokeCode(const Operand& code,
840 const ParameterCount& expected,
841 const ParameterCount& actual,
842 InvokeFlag flag) {
843 Label done;
844 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
845 if (flag == CALL_FUNCTION) {
846 call(code);
847 } else {
848 ASSERT(flag == JUMP_FUNCTION);
849 jmp(code);
850 }
851 bind(&done);
852}
853
854
855void MacroAssembler::InvokeCode(Handle<Code> code,
856 const ParameterCount& expected,
857 const ParameterCount& actual,
ager@chromium.org236ad962008-09-25 09:45:57 +0000858 RelocInfo::Mode rmode,
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000859 InvokeFlag flag) {
860 Label done;
861 Operand dummy(eax);
862 InvokePrologue(expected, actual, code, dummy, &done, flag);
863 if (flag == CALL_FUNCTION) {
864 call(code, rmode);
865 } else {
866 ASSERT(flag == JUMP_FUNCTION);
867 jmp(code, rmode);
868 }
869 bind(&done);
870}
871
872
873void MacroAssembler::InvokeFunction(Register fun,
874 const ParameterCount& actual,
875 InvokeFlag flag) {
876 ASSERT(fun.is(edi));
877 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
878 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
879 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
880 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
881 lea(edx, FieldOperand(edx, Code::kHeaderSize));
882
883 ParameterCount expected(ebx);
884 InvokeCode(Operand(edx), expected, actual, flag);
885}
886
887
888void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
889 bool resolved;
890 Handle<Code> code = ResolveBuiltin(id, &resolved);
891
kasperl@chromium.org7be3c992009-03-12 07:19:55 +0000892 // Calls are not allowed in some stubs.
kasper.lund7276f142008-07-30 08:49:36 +0000893 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000894
895 // Rely on the assertion to check that the number of provided
896 // arguments match the expected number of arguments. Fake a
897 // parameter count to avoid emitting code to do the check.
898 ParameterCount expected(0);
ager@chromium.org236ad962008-09-25 09:45:57 +0000899 InvokeCode(Handle<Code>(code), expected, expected,
900 RelocInfo::CODE_TARGET, flag);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000901
902 const char* name = Builtins::GetName(id);
903 int argc = Builtins::GetArgumentsCount(id);
904
905 if (!resolved) {
906 uint32_t flags =
907 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000908 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
909 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000910 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
911 unresolved_.Add(entry);
912 }
913}
914
915
916void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
917 bool resolved;
918 Handle<Code> code = ResolveBuiltin(id, &resolved);
919
920 const char* name = Builtins::GetName(id);
921 int argc = Builtins::GetArgumentsCount(id);
922
923 mov(Operand(target), Immediate(code));
924 if (!resolved) {
925 uint32_t flags =
926 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
ager@chromium.org3bf7b912008-11-17 09:09:45 +0000927 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
928 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000929 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
930 unresolved_.Add(entry);
931 }
932 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
933}
934
935
936Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
937 bool* resolved) {
938 // Move the builtin function into the temporary function slot by
939 // reading it from the builtins object. NOTE: We should be able to
940 // reduce this to two instructions by putting the function table in
941 // the global object instead of the "builtins" object and by using a
942 // real register for the function.
943 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
944 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
945 int builtins_offset =
946 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
947 mov(edi, FieldOperand(edx, builtins_offset));
948
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000949
kasperl@chromium.orgb9123622008-09-17 14:05:56 +0000950 return Builtins::GetCode(id, resolved);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000951}
952
953
954void MacroAssembler::Ret() {
955 ret(0);
956}
957
958
959void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
960 if (FLAG_native_code_counters && counter->Enabled()) {
961 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
962 }
963}
964
965
966void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
967 ASSERT(value > 0);
968 if (FLAG_native_code_counters && counter->Enabled()) {
969 Operand operand = Operand::StaticVariable(ExternalReference(counter));
970 if (value == 1) {
971 inc(operand);
972 } else {
973 add(operand, Immediate(value));
974 }
975 }
976}
977
978
979void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
980 ASSERT(value > 0);
981 if (FLAG_native_code_counters && counter->Enabled()) {
982 Operand operand = Operand::StaticVariable(ExternalReference(counter));
983 if (value == 1) {
984 dec(operand);
985 } else {
986 sub(operand, Immediate(value));
987 }
988 }
989}
990
991
992void MacroAssembler::Assert(Condition cc, const char* msg) {
993 if (FLAG_debug_code) Check(cc, msg);
994}
995
996
997void MacroAssembler::Check(Condition cc, const char* msg) {
998 Label L;
999 j(cc, &L, taken);
1000 Abort(msg);
1001 // will not return here
1002 bind(&L);
1003}
1004
1005
1006void MacroAssembler::Abort(const char* msg) {
1007 // We want to pass the msg string like a smi to avoid GC
1008 // problems, however msg is not guaranteed to be aligned
1009 // properly. Instead, we pass an aligned pointer that is
ager@chromium.org32912102009-01-16 10:38:43 +00001010 // a proper v8 smi, but also pass the alignment difference
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001011 // from the real pointer as a smi.
1012 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1013 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1014 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1015#ifdef DEBUG
1016 if (msg != NULL) {
1017 RecordComment("Abort message: ");
1018 RecordComment(msg);
1019 }
1020#endif
1021 push(eax);
1022 push(Immediate(p0));
1023 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1024 CallRuntime(Runtime::kAbort, 2);
1025 // will not return here
1026}
1027
1028
1029CodePatcher::CodePatcher(byte* address, int size)
1030 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
ager@chromium.org32912102009-01-16 10:38:43 +00001031 // Create a new macro assembler pointing to the address of the code to patch.
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001032 // The size is adjusted with kGap on order for the assembler to generate size
1033 // bytes of instructions without failing with buffer size constraints.
1034 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1035}
1036
1037
1038CodePatcher::~CodePatcher() {
1039 // Indicate that code has changed.
1040 CPU::FlushICache(address_, size_);
1041
1042 // Check that the code was patched as expected.
1043 ASSERT(masm_.pc_ == address_ + size_);
1044 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1045}
1046
1047
1048} } // namespace v8::internal