blob: d7c7d3a23c6a7b1780f8f2044a806e689d9e4c44 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 {
37namespace internal {
38
39// -------------------------------------------------------------------------
40// MacroAssembler implementation.
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
45 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
48}
49
50
51static void RecordWriteHelper(MacroAssembler* masm,
52 Register object,
53 Register addr,
54 Register scratch) {
55 Label fast;
56
57 // Compute the page start address from the heap object pointer, and reuse
58 // the 'object' register for it.
59 masm->and_(object, ~Page::kPageAlignmentMask);
60 Register page_start = object;
61
62 // Compute the bit addr in the remembered set/index of the pointer in the
63 // page. Reuse 'addr' as pointer_offset.
64 masm->sub(addr, Operand(page_start));
65 masm->shr(addr, kObjectAlignmentBits);
66 Register pointer_offset = addr;
67
68 // If the bit offset lies beyond the normal remembered set range, it is in
69 // the extra remembered set area of a large object.
70 masm->cmp(pointer_offset, Page::kPageSize / kPointerSize);
71 masm->j(less, &fast);
72
73 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
74 // extra remembered set after the large object.
75
76 // Find the length of the large object (FixedArray).
77 masm->mov(scratch, Operand(page_start, Page::kObjectStartOffset
78 + FixedArray::kLengthOffset));
79 Register array_length = scratch;
80
81 // Extra remembered set starts right after the large object (a FixedArray), at
82 // page_start + kObjectStartOffset + objectSize
83 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
84 // Add the delta between the end of the normal RSet and the start of the
85 // extra RSet to 'page_start', so that addressing the bit using
86 // 'pointer_offset' hits the extra RSet words.
87 masm->lea(page_start,
88 Operand(page_start, array_length, times_pointer_size,
89 Page::kObjectStartOffset + FixedArray::kHeaderSize
90 - Page::kRSetEndOffset));
91
92 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
93 // to limit code size. We should probably evaluate this decision by
94 // measuring the performance of an equivalent implementation using
95 // "simpler" instructions
96 masm->bind(&fast);
97 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
98}
99
100
101class RecordWriteStub : public CodeStub {
102 public:
103 RecordWriteStub(Register object, Register addr, Register scratch)
104 : object_(object), addr_(addr), scratch_(scratch) { }
105
106 void Generate(MacroAssembler* masm);
107
108 private:
109 Register object_;
110 Register addr_;
111 Register scratch_;
112
113#ifdef DEBUG
114 void Print() {
115 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
116 object_.code(), addr_.code(), scratch_.code());
117 }
118#endif
119
120 // Minor key encoding in 12 bits of three registers (object, address and
121 // scratch) OOOOAAAASSSS.
122 class ScratchBits: public BitField<uint32_t, 0, 4> {};
123 class AddressBits: public BitField<uint32_t, 4, 4> {};
124 class ObjectBits: public BitField<uint32_t, 8, 4> {};
125
126 Major MajorKey() { return RecordWrite; }
127
128 int MinorKey() {
129 // Encode the registers.
130 return ObjectBits::encode(object_.code()) |
131 AddressBits::encode(addr_.code()) |
132 ScratchBits::encode(scratch_.code());
133 }
134};
135
136
137void RecordWriteStub::Generate(MacroAssembler* masm) {
138 RecordWriteHelper(masm, object_, addr_, scratch_);
139 masm->ret(0);
140}
141
142
143// Set the remembered set bit for [object+offset].
144// object is the object being stored into, value is the object being stored.
145// If offset is zero, then the scratch register contains the array index into
146// the elements array represented as a Smi.
147// All registers are clobbered by the operation.
148void MacroAssembler::RecordWrite(Register object, int offset,
149 Register value, Register scratch) {
150 // First, check if a remembered set write is even needed. The tests below
151 // catch stores of Smis and stores into young gen (which does not have space
152 // for the remembered set bits.
153 Label done;
154
155 // Skip barrier if writing a smi.
156 ASSERT_EQ(0, kSmiTag);
157 test(value, Immediate(kSmiTagMask));
158 j(zero, &done);
159
160 if (Serializer::enabled()) {
161 // Can't do arithmetic on external references if it might get serialized.
162 mov(value, Operand(object));
163 and_(value, Heap::NewSpaceMask());
164 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
165 j(equal, &done);
166 } else {
167 int32_t new_space_start = reinterpret_cast<int32_t>(
168 ExternalReference::new_space_start().address());
169 lea(value, Operand(object, -new_space_start));
170 and_(value, Heap::NewSpaceMask());
171 j(equal, &done);
172 }
173
174 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
175 // Compute the bit offset in the remembered set, leave it in 'value'.
176 lea(value, Operand(object, offset));
177 and_(value, Page::kPageAlignmentMask);
178 shr(value, kPointerSizeLog2);
179
180 // Compute the page address from the heap object pointer, leave it in
181 // 'object'.
182 and_(object, ~Page::kPageAlignmentMask);
183
184 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
185 // to limit code size. We should probably evaluate this decision by
186 // measuring the performance of an equivalent implementation using
187 // "simpler" instructions
188 bts(Operand(object, Page::kRSetOffset), value);
189 } else {
190 Register dst = scratch;
191 if (offset != 0) {
192 lea(dst, Operand(object, offset));
193 } else {
194 // array access: calculate the destination address in the same manner as
195 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
196 // into an array of words.
197 ASSERT_EQ(1, kSmiTagSize);
198 ASSERT_EQ(0, kSmiTag);
199 lea(dst, Operand(object, dst, times_half_pointer_size,
200 FixedArray::kHeaderSize - kHeapObjectTag));
201 }
202 // If we are already generating a shared stub, not inlining the
203 // record write code isn't going to save us any memory.
204 if (generating_stub()) {
205 RecordWriteHelper(this, object, dst, value);
206 } else {
207 RecordWriteStub stub(object, dst, value);
208 CallStub(&stub);
209 }
210 }
211
212 bind(&done);
213}
214
215
Steve Blockd0582a62009-12-15 09:54:21 +0000216void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
217 cmp(esp,
218 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
219 j(below, on_stack_overflow);
220}
221
222
Steve Blocka7e24c12009-10-30 11:49:00 +0000223#ifdef ENABLE_DEBUGGER_SUPPORT
224void MacroAssembler::SaveRegistersToMemory(RegList regs) {
225 ASSERT((regs & ~kJSCallerSaved) == 0);
226 // Copy the content of registers to memory location.
227 for (int i = 0; i < kNumJSCallerSaved; i++) {
228 int r = JSCallerSavedCode(i);
229 if ((regs & (1 << r)) != 0) {
230 Register reg = { r };
231 ExternalReference reg_addr =
232 ExternalReference(Debug_Address::Register(i));
233 mov(Operand::StaticVariable(reg_addr), reg);
234 }
235 }
236}
237
238
239void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
240 ASSERT((regs & ~kJSCallerSaved) == 0);
241 // Copy the content of memory location to registers.
242 for (int i = kNumJSCallerSaved; --i >= 0;) {
243 int r = JSCallerSavedCode(i);
244 if ((regs & (1 << r)) != 0) {
245 Register reg = { r };
246 ExternalReference reg_addr =
247 ExternalReference(Debug_Address::Register(i));
248 mov(reg, Operand::StaticVariable(reg_addr));
249 }
250 }
251}
252
253
254void MacroAssembler::PushRegistersFromMemory(RegList regs) {
255 ASSERT((regs & ~kJSCallerSaved) == 0);
256 // Push the content of the memory location to the stack.
257 for (int i = 0; i < kNumJSCallerSaved; i++) {
258 int r = JSCallerSavedCode(i);
259 if ((regs & (1 << r)) != 0) {
260 ExternalReference reg_addr =
261 ExternalReference(Debug_Address::Register(i));
262 push(Operand::StaticVariable(reg_addr));
263 }
264 }
265}
266
267
268void MacroAssembler::PopRegistersToMemory(RegList regs) {
269 ASSERT((regs & ~kJSCallerSaved) == 0);
270 // Pop the content from the stack to the memory location.
271 for (int i = kNumJSCallerSaved; --i >= 0;) {
272 int r = JSCallerSavedCode(i);
273 if ((regs & (1 << r)) != 0) {
274 ExternalReference reg_addr =
275 ExternalReference(Debug_Address::Register(i));
276 pop(Operand::StaticVariable(reg_addr));
277 }
278 }
279}
280
281
282void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
283 Register scratch,
284 RegList regs) {
285 ASSERT((regs & ~kJSCallerSaved) == 0);
286 // Copy the content of the stack to the memory location and adjust base.
287 for (int i = kNumJSCallerSaved; --i >= 0;) {
288 int r = JSCallerSavedCode(i);
289 if ((regs & (1 << r)) != 0) {
290 mov(scratch, Operand(base, 0));
291 ExternalReference reg_addr =
292 ExternalReference(Debug_Address::Register(i));
293 mov(Operand::StaticVariable(reg_addr), scratch);
294 lea(base, Operand(base, kPointerSize));
295 }
296 }
297}
298#endif
299
300void MacroAssembler::Set(Register dst, const Immediate& x) {
301 if (x.is_zero()) {
302 xor_(dst, Operand(dst)); // shorter than mov
303 } else {
304 mov(dst, x);
305 }
306}
307
308
309void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
310 mov(dst, x);
311}
312
313
314void MacroAssembler::CmpObjectType(Register heap_object,
315 InstanceType type,
316 Register map) {
317 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
318 CmpInstanceType(map, type);
319}
320
321
322void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
323 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
324 static_cast<int8_t>(type));
325}
326
327
Leon Clarkee46be812010-01-19 14:06:41 +0000328Condition MacroAssembler::IsObjectStringType(Register heap_object,
329 Register map,
330 Register instance_type) {
331 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
332 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
333 ASSERT(kNotStringTag != 0);
334 test(instance_type, Immediate(kIsNotStringMask));
335 return zero;
336}
337
338
Steve Blocka7e24c12009-10-30 11:49:00 +0000339void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000340 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000341 fucomip();
342 ffree(0);
343 fincstp();
344 } else {
345 fucompp();
346 push(eax);
347 fnstsw_ax();
348 sahf();
349 pop(eax);
350 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000351}
352
353
354void MacroAssembler::EnterFrame(StackFrame::Type type) {
355 push(ebp);
356 mov(ebp, Operand(esp));
357 push(esi);
358 push(Immediate(Smi::FromInt(type)));
359 push(Immediate(CodeObject()));
360 if (FLAG_debug_code) {
361 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
362 Check(not_equal, "code object not properly patched");
363 }
364}
365
366
367void MacroAssembler::LeaveFrame(StackFrame::Type type) {
368 if (FLAG_debug_code) {
369 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
370 Immediate(Smi::FromInt(type)));
371 Check(equal, "stack frame types must match");
372 }
373 leave();
374}
375
Steve Blockd0582a62009-12-15 09:54:21 +0000376void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000377 // Setup the frame structure on the stack.
378 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
379 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
380 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
381 push(ebp);
382 mov(ebp, Operand(esp));
383
384 // Reserve room for entry stack pointer and push the debug marker.
385 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
386 push(Immediate(0)); // saved entry sp, patched before call
Steve Blockd0582a62009-12-15 09:54:21 +0000387 if (mode == ExitFrame::MODE_DEBUG) {
388 push(Immediate(0));
389 } else {
390 push(Immediate(CodeObject()));
391 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000392
393 // Save the frame pointer and the context in top.
394 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
395 ExternalReference context_address(Top::k_context_address);
396 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
397 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000398}
Steve Blocka7e24c12009-10-30 11:49:00 +0000399
Steve Blockd0582a62009-12-15 09:54:21 +0000400void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000401#ifdef ENABLE_DEBUGGER_SUPPORT
402 // Save the state of all registers to the stack from the memory
403 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000404 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000405 // TODO(1243899): This should be symmetric to
406 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
407 // correct here, but computed for the other call. Very error
408 // prone! FIX THIS. Actually there are deeper problems with
409 // register saving than this asymmetry (see the bug report
410 // associated with this issue).
411 PushRegistersFromMemory(kJSCallerSaved);
412 }
413#endif
414
Steve Blockd0582a62009-12-15 09:54:21 +0000415 // Reserve space for arguments.
416 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000417
418 // Get the required frame alignment for the OS.
419 static const int kFrameAlignment = OS::ActivationFrameAlignment();
420 if (kFrameAlignment > 0) {
421 ASSERT(IsPowerOf2(kFrameAlignment));
422 and_(esp, -kFrameAlignment);
423 }
424
425 // Patch the saved entry sp.
426 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
427}
428
429
Steve Blockd0582a62009-12-15 09:54:21 +0000430void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
431 EnterExitFramePrologue(mode);
432
433 // Setup argc and argv in callee-saved registers.
434 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
435 mov(edi, Operand(eax));
436 lea(esi, Operand(ebp, eax, times_4, offset));
437
438 EnterExitFrameEpilogue(mode, 2);
439}
440
441
442void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
443 int stack_space,
444 int argc) {
445 EnterExitFramePrologue(mode);
446
447 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
448 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
449
450 EnterExitFrameEpilogue(mode, argc);
451}
452
453
454void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000455#ifdef ENABLE_DEBUGGER_SUPPORT
456 // Restore the memory copy of the registers by digging them out from
457 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000458 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000459 // It's okay to clobber register ebx below because we don't need
460 // the function pointer after this.
461 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000462 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000463 lea(ebx, Operand(ebp, kOffset));
464 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
465 }
466#endif
467
468 // Get the return address from the stack and restore the frame pointer.
469 mov(ecx, Operand(ebp, 1 * kPointerSize));
470 mov(ebp, Operand(ebp, 0 * kPointerSize));
471
472 // Pop the arguments and the receiver from the caller stack.
473 lea(esp, Operand(esi, 1 * kPointerSize));
474
475 // Restore current context from top and clear it in debug mode.
476 ExternalReference context_address(Top::k_context_address);
477 mov(esi, Operand::StaticVariable(context_address));
478#ifdef DEBUG
479 mov(Operand::StaticVariable(context_address), Immediate(0));
480#endif
481
482 // Push the return address to get ready to return.
483 push(ecx);
484
485 // Clear the top frame.
486 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
487 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
488}
489
490
491void MacroAssembler::PushTryHandler(CodeLocation try_location,
492 HandlerType type) {
493 // Adjust this code if not the case.
494 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
495 // The pc (return address) is already on TOS.
496 if (try_location == IN_JAVASCRIPT) {
497 if (type == TRY_CATCH_HANDLER) {
498 push(Immediate(StackHandler::TRY_CATCH));
499 } else {
500 push(Immediate(StackHandler::TRY_FINALLY));
501 }
502 push(ebp);
503 } else {
504 ASSERT(try_location == IN_JS_ENTRY);
505 // The frame pointer does not point to a JS frame so we save NULL
506 // for ebp. We expect the code throwing an exception to check ebp
507 // before dereferencing it to restore the context.
508 push(Immediate(StackHandler::ENTRY));
509 push(Immediate(0)); // NULL frame pointer.
510 }
511 // Save the current handler as the next handler.
512 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
513 // Link this handler as the new current one.
514 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
515}
516
517
Leon Clarkee46be812010-01-19 14:06:41 +0000518void MacroAssembler::PopTryHandler() {
519 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
520 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
521 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
522}
523
524
Steve Blocka7e24c12009-10-30 11:49:00 +0000525Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
526 JSObject* holder, Register holder_reg,
527 Register scratch,
528 Label* miss) {
529 // Make sure there's no overlap between scratch and the other
530 // registers.
531 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
532
533 // Keep track of the current object in register reg.
534 Register reg = object_reg;
535 int depth = 1;
536
537 // Check the maps in the prototype chain.
538 // Traverse the prototype chain from the object and do map checks.
539 while (object != holder) {
540 depth++;
541
542 // Only global objects and objects that do not require access
543 // checks are allowed in stubs.
544 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
545
546 JSObject* prototype = JSObject::cast(object->GetPrototype());
547 if (Heap::InNewSpace(prototype)) {
548 // Get the map of the current object.
549 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
550 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
551 // Branch on the result of the map check.
552 j(not_equal, miss, not_taken);
553 // Check access rights to the global object. This has to happen
554 // after the map check so that we know that the object is
555 // actually a global object.
556 if (object->IsJSGlobalProxy()) {
557 CheckAccessGlobalProxy(reg, scratch, miss);
558
559 // Restore scratch register to be the map of the object.
560 // We load the prototype from the map in the scratch register.
561 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
562 }
563 // The prototype is in new space; we cannot store a reference
564 // to it in the code. Load it from the map.
565 reg = holder_reg; // from now the object is in holder_reg
566 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
567
568 } else {
569 // Check the map of the current object.
570 cmp(FieldOperand(reg, HeapObject::kMapOffset),
571 Immediate(Handle<Map>(object->map())));
572 // Branch on the result of the map check.
573 j(not_equal, miss, not_taken);
574 // Check access rights to the global object. This has to happen
575 // after the map check so that we know that the object is
576 // actually a global object.
577 if (object->IsJSGlobalProxy()) {
578 CheckAccessGlobalProxy(reg, scratch, miss);
579 }
580 // The prototype is in old space; load it directly.
581 reg = holder_reg; // from now the object is in holder_reg
582 mov(reg, Handle<JSObject>(prototype));
583 }
584
585 // Go to the next object in the prototype chain.
586 object = prototype;
587 }
588
589 // Check the holder map.
590 cmp(FieldOperand(reg, HeapObject::kMapOffset),
591 Immediate(Handle<Map>(holder->map())));
592 j(not_equal, miss, not_taken);
593
594 // Log the check depth.
595 LOG(IntEvent("check-maps-depth", depth));
596
597 // Perform security check for access to the global object and return
598 // the holder register.
599 ASSERT(object == holder);
600 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
601 if (object->IsJSGlobalProxy()) {
602 CheckAccessGlobalProxy(reg, scratch, miss);
603 }
604 return reg;
605}
606
607
608void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
609 Register scratch,
610 Label* miss) {
611 Label same_contexts;
612
613 ASSERT(!holder_reg.is(scratch));
614
615 // Load current lexical context from the stack frame.
616 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
617
618 // When generating debug code, make sure the lexical context is set.
619 if (FLAG_debug_code) {
620 cmp(Operand(scratch), Immediate(0));
621 Check(not_equal, "we should not have an empty lexical context");
622 }
623 // Load the global context of the current context.
624 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
625 mov(scratch, FieldOperand(scratch, offset));
626 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
627
628 // Check the context is a global context.
629 if (FLAG_debug_code) {
630 push(scratch);
631 // Read the first word and compare to global_context_map.
632 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
633 cmp(scratch, Factory::global_context_map());
634 Check(equal, "JSGlobalObject::global_context should be a global context.");
635 pop(scratch);
636 }
637
638 // Check if both contexts are the same.
639 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
640 j(equal, &same_contexts, taken);
641
642 // Compare security tokens, save holder_reg on the stack so we can use it
643 // as a temporary register.
644 //
645 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
646 push(holder_reg);
647 // Check that the security token in the calling global object is
648 // compatible with the security token in the receiving global
649 // object.
650 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
651
652 // Check the context is a global context.
653 if (FLAG_debug_code) {
654 cmp(holder_reg, Factory::null_value());
655 Check(not_equal, "JSGlobalProxy::context() should not be null.");
656
657 push(holder_reg);
658 // Read the first word and compare to global_context_map(),
659 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
660 cmp(holder_reg, Factory::global_context_map());
661 Check(equal, "JSGlobalObject::global_context should be a global context.");
662 pop(holder_reg);
663 }
664
665 int token_offset = Context::kHeaderSize +
666 Context::SECURITY_TOKEN_INDEX * kPointerSize;
667 mov(scratch, FieldOperand(scratch, token_offset));
668 cmp(scratch, FieldOperand(holder_reg, token_offset));
669 pop(holder_reg);
670 j(not_equal, miss, not_taken);
671
672 bind(&same_contexts);
673}
674
675
676void MacroAssembler::LoadAllocationTopHelper(Register result,
677 Register result_end,
678 Register scratch,
679 AllocationFlags flags) {
680 ExternalReference new_space_allocation_top =
681 ExternalReference::new_space_allocation_top_address();
682
683 // Just return if allocation top is already known.
684 if ((flags & RESULT_CONTAINS_TOP) != 0) {
685 // No use of scratch if allocation top is provided.
686 ASSERT(scratch.is(no_reg));
687#ifdef DEBUG
688 // Assert that result actually contains top on entry.
689 cmp(result, Operand::StaticVariable(new_space_allocation_top));
690 Check(equal, "Unexpected allocation top");
691#endif
692 return;
693 }
694
695 // Move address of new object to result. Use scratch register if available.
696 if (scratch.is(no_reg)) {
697 mov(result, Operand::StaticVariable(new_space_allocation_top));
698 } else {
699 ASSERT(!scratch.is(result_end));
700 mov(Operand(scratch), Immediate(new_space_allocation_top));
701 mov(result, Operand(scratch, 0));
702 }
703}
704
705
706void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
707 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000708 if (FLAG_debug_code) {
709 test(result_end, Immediate(kObjectAlignmentMask));
710 Check(zero, "Unaligned allocation in new space");
711 }
712
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 ExternalReference new_space_allocation_top =
714 ExternalReference::new_space_allocation_top_address();
715
716 // Update new top. Use scratch if available.
717 if (scratch.is(no_reg)) {
718 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
719 } else {
720 mov(Operand(scratch, 0), result_end);
721 }
722}
723
724
725void MacroAssembler::AllocateInNewSpace(int object_size,
726 Register result,
727 Register result_end,
728 Register scratch,
729 Label* gc_required,
730 AllocationFlags flags) {
731 ASSERT(!result.is(result_end));
732
733 // Load address of new object into result.
734 LoadAllocationTopHelper(result, result_end, scratch, flags);
735
736 // Calculate new top and bail out if new space is exhausted.
737 ExternalReference new_space_allocation_limit =
738 ExternalReference::new_space_allocation_limit_address();
739 lea(result_end, Operand(result, object_size));
740 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
741 j(above, gc_required, not_taken);
742
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 // Tag result if requested.
744 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000745 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 }
Leon Clarkee46be812010-01-19 14:06:41 +0000747
748 // Update allocation top.
749 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000750}
751
752
753void MacroAssembler::AllocateInNewSpace(int header_size,
754 ScaleFactor element_size,
755 Register element_count,
756 Register result,
757 Register result_end,
758 Register scratch,
759 Label* gc_required,
760 AllocationFlags flags) {
761 ASSERT(!result.is(result_end));
762
763 // Load address of new object into result.
764 LoadAllocationTopHelper(result, result_end, scratch, flags);
765
766 // Calculate new top and bail out if new space is exhausted.
767 ExternalReference new_space_allocation_limit =
768 ExternalReference::new_space_allocation_limit_address();
769 lea(result_end, Operand(result, element_count, element_size, header_size));
770 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
771 j(above, gc_required);
772
Steve Blocka7e24c12009-10-30 11:49:00 +0000773 // Tag result if requested.
774 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000775 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000776 }
Leon Clarkee46be812010-01-19 14:06:41 +0000777
778 // Update allocation top.
779 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000780}
781
782
783void MacroAssembler::AllocateInNewSpace(Register object_size,
784 Register result,
785 Register result_end,
786 Register scratch,
787 Label* gc_required,
788 AllocationFlags flags) {
789 ASSERT(!result.is(result_end));
790
791 // Load address of new object into result.
792 LoadAllocationTopHelper(result, result_end, scratch, flags);
793
794 // Calculate new top and bail out if new space is exhausted.
795 ExternalReference new_space_allocation_limit =
796 ExternalReference::new_space_allocation_limit_address();
797 if (!object_size.is(result_end)) {
798 mov(result_end, object_size);
799 }
800 add(result_end, Operand(result));
801 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
802 j(above, gc_required, not_taken);
803
Steve Blocka7e24c12009-10-30 11:49:00 +0000804 // Tag result if requested.
805 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000806 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000807 }
Leon Clarkee46be812010-01-19 14:06:41 +0000808
809 // Update allocation top.
810 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000811}
812
813
814void MacroAssembler::UndoAllocationInNewSpace(Register object) {
815 ExternalReference new_space_allocation_top =
816 ExternalReference::new_space_allocation_top_address();
817
818 // Make sure the object has no tag before resetting top.
819 and_(Operand(object), Immediate(~kHeapObjectTagMask));
820#ifdef DEBUG
821 cmp(object, Operand::StaticVariable(new_space_allocation_top));
822 Check(below, "Undo allocation of non allocated memory");
823#endif
824 mov(Operand::StaticVariable(new_space_allocation_top), object);
825}
826
827
Steve Block3ce2e202009-11-05 08:53:23 +0000828void MacroAssembler::AllocateHeapNumber(Register result,
829 Register scratch1,
830 Register scratch2,
831 Label* gc_required) {
832 // Allocate heap number in new space.
833 AllocateInNewSpace(HeapNumber::kSize,
834 result,
835 scratch1,
836 scratch2,
837 gc_required,
838 TAG_OBJECT);
839
840 // Set the map.
841 mov(FieldOperand(result, HeapObject::kMapOffset),
842 Immediate(Factory::heap_number_map()));
843}
844
845
Steve Blockd0582a62009-12-15 09:54:21 +0000846void MacroAssembler::AllocateTwoByteString(Register result,
847 Register length,
848 Register scratch1,
849 Register scratch2,
850 Register scratch3,
851 Label* gc_required) {
852 // Calculate the number of bytes needed for the characters in the string while
853 // observing object alignment.
854 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000855 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000856 // scratch1 = length * 2 + kObjectAlignmentMask.
857 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000858 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
859
860 // Allocate two byte string in new space.
861 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
862 times_1,
863 scratch1,
864 result,
865 scratch2,
866 scratch3,
867 gc_required,
868 TAG_OBJECT);
869
870 // Set the map, length and hash field.
871 mov(FieldOperand(result, HeapObject::kMapOffset),
872 Immediate(Factory::string_map()));
873 mov(FieldOperand(result, String::kLengthOffset), length);
874 mov(FieldOperand(result, String::kHashFieldOffset),
875 Immediate(String::kEmptyHashField));
876}
877
878
879void MacroAssembler::AllocateAsciiString(Register result,
880 Register length,
881 Register scratch1,
882 Register scratch2,
883 Register scratch3,
884 Label* gc_required) {
885 // Calculate the number of bytes needed for the characters in the string while
886 // observing object alignment.
887 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
888 mov(scratch1, length);
889 ASSERT(kCharSize == 1);
890 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
891 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
892
893 // Allocate ascii string in new space.
894 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
895 times_1,
896 scratch1,
897 result,
898 scratch2,
899 scratch3,
900 gc_required,
901 TAG_OBJECT);
902
903 // Set the map, length and hash field.
904 mov(FieldOperand(result, HeapObject::kMapOffset),
905 Immediate(Factory::ascii_string_map()));
906 mov(FieldOperand(result, String::kLengthOffset), length);
907 mov(FieldOperand(result, String::kHashFieldOffset),
908 Immediate(String::kEmptyHashField));
909}
910
911
912void MacroAssembler::AllocateConsString(Register result,
913 Register scratch1,
914 Register scratch2,
915 Label* gc_required) {
916 // Allocate heap number in new space.
917 AllocateInNewSpace(ConsString::kSize,
918 result,
919 scratch1,
920 scratch2,
921 gc_required,
922 TAG_OBJECT);
923
924 // Set the map. The other fields are left uninitialized.
925 mov(FieldOperand(result, HeapObject::kMapOffset),
926 Immediate(Factory::cons_string_map()));
927}
928
929
930void MacroAssembler::AllocateAsciiConsString(Register result,
931 Register scratch1,
932 Register scratch2,
933 Label* gc_required) {
934 // Allocate heap number in new space.
935 AllocateInNewSpace(ConsString::kSize,
936 result,
937 scratch1,
938 scratch2,
939 gc_required,
940 TAG_OBJECT);
941
942 // Set the map. The other fields are left uninitialized.
943 mov(FieldOperand(result, HeapObject::kMapOffset),
944 Immediate(Factory::cons_ascii_string_map()));
945}
946
947
Steve Blocka7e24c12009-10-30 11:49:00 +0000948void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
949 Register result,
950 Register op,
951 JumpTarget* then_target) {
952 JumpTarget ok;
953 test(result, Operand(result));
954 ok.Branch(not_zero, taken);
955 test(op, Operand(op));
956 then_target->Branch(sign, not_taken);
957 ok.Bind();
958}
959
960
961void MacroAssembler::NegativeZeroTest(Register result,
962 Register op,
963 Label* then_label) {
964 Label ok;
965 test(result, Operand(result));
966 j(not_zero, &ok, taken);
967 test(op, Operand(op));
968 j(sign, then_label, not_taken);
969 bind(&ok);
970}
971
972
973void MacroAssembler::NegativeZeroTest(Register result,
974 Register op1,
975 Register op2,
976 Register scratch,
977 Label* then_label) {
978 Label ok;
979 test(result, Operand(result));
980 j(not_zero, &ok, taken);
981 mov(scratch, Operand(op1));
982 or_(scratch, Operand(op2));
983 j(sign, then_label, not_taken);
984 bind(&ok);
985}
986
987
988void MacroAssembler::TryGetFunctionPrototype(Register function,
989 Register result,
990 Register scratch,
991 Label* miss) {
992 // Check that the receiver isn't a smi.
993 test(function, Immediate(kSmiTagMask));
994 j(zero, miss, not_taken);
995
996 // Check that the function really is a function.
997 CmpObjectType(function, JS_FUNCTION_TYPE, result);
998 j(not_equal, miss, not_taken);
999
1000 // Make sure that the function has an instance prototype.
1001 Label non_instance;
1002 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1003 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1004 j(not_zero, &non_instance, not_taken);
1005
1006 // Get the prototype or initial map from the function.
1007 mov(result,
1008 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1009
1010 // If the prototype or initial map is the hole, don't return it and
1011 // simply miss the cache instead. This will allow us to allocate a
1012 // prototype object on-demand in the runtime system.
1013 cmp(Operand(result), Immediate(Factory::the_hole_value()));
1014 j(equal, miss, not_taken);
1015
1016 // If the function does not have an initial map, we're done.
1017 Label done;
1018 CmpObjectType(result, MAP_TYPE, scratch);
1019 j(not_equal, &done);
1020
1021 // Get the prototype from the initial map.
1022 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1023 jmp(&done);
1024
1025 // Non-instance prototype: Fetch prototype from constructor field
1026 // in initial map.
1027 bind(&non_instance);
1028 mov(result, FieldOperand(result, Map::kConstructorOffset));
1029
1030 // All done.
1031 bind(&done);
1032}
1033
1034
1035void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001036 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +00001037 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1038}
1039
1040
Leon Clarkee46be812010-01-19 14:06:41 +00001041Object* MacroAssembler::TryCallStub(CodeStub* stub) {
1042 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1043 Object* result = stub->TryGetCode();
1044 if (!result->IsFailure()) {
1045 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1046 }
1047 return result;
1048}
1049
1050
Steve Blockd0582a62009-12-15 09:54:21 +00001051void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001052 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001053 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1054}
1055
1056
Leon Clarkee46be812010-01-19 14:06:41 +00001057Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1058 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1059 Object* result = stub->TryGetCode();
1060 if (!result->IsFailure()) {
1061 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1062 }
1063 return result;
1064}
1065
1066
Steve Blocka7e24c12009-10-30 11:49:00 +00001067void MacroAssembler::StubReturn(int argc) {
1068 ASSERT(argc >= 1 && generating_stub());
1069 ret((argc - 1) * kPointerSize);
1070}
1071
1072
1073void MacroAssembler::IllegalOperation(int num_arguments) {
1074 if (num_arguments > 0) {
1075 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1076 }
1077 mov(eax, Immediate(Factory::undefined_value()));
1078}
1079
1080
1081void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1082 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1083}
1084
1085
Leon Clarkee46be812010-01-19 14:06:41 +00001086Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1087 int num_arguments) {
1088 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1089}
1090
1091
Steve Blocka7e24c12009-10-30 11:49:00 +00001092void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1093 // If the expected number of arguments of the runtime function is
1094 // constant, we check that the actual number of arguments match the
1095 // expectation.
1096 if (f->nargs >= 0 && f->nargs != num_arguments) {
1097 IllegalOperation(num_arguments);
1098 return;
1099 }
1100
1101 Runtime::FunctionId function_id =
1102 static_cast<Runtime::FunctionId>(f->stub_id);
1103 RuntimeStub stub(function_id, num_arguments);
1104 CallStub(&stub);
1105}
1106
1107
Leon Clarkee46be812010-01-19 14:06:41 +00001108Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1109 int num_arguments) {
1110 if (f->nargs >= 0 && f->nargs != num_arguments) {
1111 IllegalOperation(num_arguments);
1112 // Since we did not call the stub, there was no allocation failure.
1113 // Return some non-failure object.
1114 return Heap::undefined_value();
1115 }
1116
1117 Runtime::FunctionId function_id =
1118 static_cast<Runtime::FunctionId>(f->stub_id);
1119 RuntimeStub stub(function_id, num_arguments);
1120 return TryCallStub(&stub);
1121}
1122
1123
Steve Blocka7e24c12009-10-30 11:49:00 +00001124void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
1125 int num_arguments,
1126 int result_size) {
1127 // TODO(1236192): Most runtime routines don't need the number of
1128 // arguments passed in because it is constant. At some point we
1129 // should remove this need and make the runtime routine entry code
1130 // smarter.
1131 Set(eax, Immediate(num_arguments));
1132 JumpToRuntime(ext);
1133}
1134
1135
Steve Blockd0582a62009-12-15 09:54:21 +00001136void MacroAssembler::PushHandleScope(Register scratch) {
1137 // Push the number of extensions, smi-tagged so the gc will ignore it.
1138 ExternalReference extensions_address =
1139 ExternalReference::handle_scope_extensions_address();
1140 mov(scratch, Operand::StaticVariable(extensions_address));
1141 ASSERT_EQ(0, kSmiTag);
1142 shl(scratch, kSmiTagSize);
1143 push(scratch);
1144 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1145 // Push next and limit pointers which will be wordsize aligned and
1146 // hence automatically smi tagged.
1147 ExternalReference next_address =
1148 ExternalReference::handle_scope_next_address();
1149 push(Operand::StaticVariable(next_address));
1150 ExternalReference limit_address =
1151 ExternalReference::handle_scope_limit_address();
1152 push(Operand::StaticVariable(limit_address));
1153}
1154
1155
Leon Clarkee46be812010-01-19 14:06:41 +00001156Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1157 Register scratch,
1158 bool gc_allowed) {
1159 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001160 ExternalReference extensions_address =
1161 ExternalReference::handle_scope_extensions_address();
1162 Label write_back;
1163 mov(scratch, Operand::StaticVariable(extensions_address));
1164 cmp(Operand(scratch), Immediate(0));
1165 j(equal, &write_back);
1166 // Calling a runtime function messes with registers so we save and
1167 // restore any one we're asked not to change
1168 if (saved.is_valid()) push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001169 if (gc_allowed) {
1170 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1171 } else {
1172 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1173 if (result->IsFailure()) return result;
1174 }
Steve Blockd0582a62009-12-15 09:54:21 +00001175 if (saved.is_valid()) pop(saved);
1176
1177 bind(&write_back);
1178 ExternalReference limit_address =
1179 ExternalReference::handle_scope_limit_address();
1180 pop(Operand::StaticVariable(limit_address));
1181 ExternalReference next_address =
1182 ExternalReference::handle_scope_next_address();
1183 pop(Operand::StaticVariable(next_address));
1184 pop(scratch);
1185 shr(scratch, kSmiTagSize);
1186 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001187
1188 return result;
1189}
1190
1191
1192void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1193 PopHandleScopeHelper(saved, scratch, true);
1194}
1195
1196
1197Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1198 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001199}
1200
1201
Steve Blocka7e24c12009-10-30 11:49:00 +00001202void MacroAssembler::JumpToRuntime(const ExternalReference& ext) {
1203 // Set the entry point and jump to the C entry runtime stub.
1204 mov(ebx, Immediate(ext));
1205 CEntryStub ces(1);
1206 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1207}
1208
1209
1210void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1211 const ParameterCount& actual,
1212 Handle<Code> code_constant,
1213 const Operand& code_operand,
1214 Label* done,
1215 InvokeFlag flag) {
1216 bool definitely_matches = false;
1217 Label invoke;
1218 if (expected.is_immediate()) {
1219 ASSERT(actual.is_immediate());
1220 if (expected.immediate() == actual.immediate()) {
1221 definitely_matches = true;
1222 } else {
1223 mov(eax, actual.immediate());
1224 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1225 if (expected.immediate() == sentinel) {
1226 // Don't worry about adapting arguments for builtins that
1227 // don't want that done. Skip adaption code by making it look
1228 // like we have a match between expected and actual number of
1229 // arguments.
1230 definitely_matches = true;
1231 } else {
1232 mov(ebx, expected.immediate());
1233 }
1234 }
1235 } else {
1236 if (actual.is_immediate()) {
1237 // Expected is in register, actual is immediate. This is the
1238 // case when we invoke function values without going through the
1239 // IC mechanism.
1240 cmp(expected.reg(), actual.immediate());
1241 j(equal, &invoke);
1242 ASSERT(expected.reg().is(ebx));
1243 mov(eax, actual.immediate());
1244 } else if (!expected.reg().is(actual.reg())) {
1245 // Both expected and actual are in (different) registers. This
1246 // is the case when we invoke functions using call and apply.
1247 cmp(expected.reg(), Operand(actual.reg()));
1248 j(equal, &invoke);
1249 ASSERT(actual.reg().is(eax));
1250 ASSERT(expected.reg().is(ebx));
1251 }
1252 }
1253
1254 if (!definitely_matches) {
1255 Handle<Code> adaptor =
1256 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1257 if (!code_constant.is_null()) {
1258 mov(edx, Immediate(code_constant));
1259 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1260 } else if (!code_operand.is_reg(edx)) {
1261 mov(edx, code_operand);
1262 }
1263
1264 if (flag == CALL_FUNCTION) {
1265 call(adaptor, RelocInfo::CODE_TARGET);
1266 jmp(done);
1267 } else {
1268 jmp(adaptor, RelocInfo::CODE_TARGET);
1269 }
1270 bind(&invoke);
1271 }
1272}
1273
1274
1275void MacroAssembler::InvokeCode(const Operand& code,
1276 const ParameterCount& expected,
1277 const ParameterCount& actual,
1278 InvokeFlag flag) {
1279 Label done;
1280 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1281 if (flag == CALL_FUNCTION) {
1282 call(code);
1283 } else {
1284 ASSERT(flag == JUMP_FUNCTION);
1285 jmp(code);
1286 }
1287 bind(&done);
1288}
1289
1290
1291void MacroAssembler::InvokeCode(Handle<Code> code,
1292 const ParameterCount& expected,
1293 const ParameterCount& actual,
1294 RelocInfo::Mode rmode,
1295 InvokeFlag flag) {
1296 Label done;
1297 Operand dummy(eax);
1298 InvokePrologue(expected, actual, code, dummy, &done, flag);
1299 if (flag == CALL_FUNCTION) {
1300 call(code, rmode);
1301 } else {
1302 ASSERT(flag == JUMP_FUNCTION);
1303 jmp(code, rmode);
1304 }
1305 bind(&done);
1306}
1307
1308
1309void MacroAssembler::InvokeFunction(Register fun,
1310 const ParameterCount& actual,
1311 InvokeFlag flag) {
1312 ASSERT(fun.is(edi));
1313 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1314 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1315 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1316 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1317 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1318
1319 ParameterCount expected(ebx);
1320 InvokeCode(Operand(edx), expected, actual, flag);
1321}
1322
1323
1324void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1325 bool resolved;
1326 Handle<Code> code = ResolveBuiltin(id, &resolved);
1327
1328 // Calls are not allowed in some stubs.
1329 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1330
1331 // Rely on the assertion to check that the number of provided
1332 // arguments match the expected number of arguments. Fake a
1333 // parameter count to avoid emitting code to do the check.
1334 ParameterCount expected(0);
1335 InvokeCode(Handle<Code>(code), expected, expected,
1336 RelocInfo::CODE_TARGET, flag);
1337
1338 const char* name = Builtins::GetName(id);
1339 int argc = Builtins::GetArgumentsCount(id);
1340
1341 if (!resolved) {
1342 uint32_t flags =
1343 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001344 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
1345 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1346 unresolved_.Add(entry);
1347 }
1348}
1349
1350
1351void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1352 bool resolved;
1353 Handle<Code> code = ResolveBuiltin(id, &resolved);
1354
1355 const char* name = Builtins::GetName(id);
1356 int argc = Builtins::GetArgumentsCount(id);
1357
1358 mov(Operand(target), Immediate(code));
1359 if (!resolved) {
1360 uint32_t flags =
1361 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001362 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
1363 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1364 unresolved_.Add(entry);
1365 }
1366 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
1367}
1368
1369
1370Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
1371 bool* resolved) {
1372 // Move the builtin function into the temporary function slot by
1373 // reading it from the builtins object. NOTE: We should be able to
1374 // reduce this to two instructions by putting the function table in
1375 // the global object instead of the "builtins" object and by using a
1376 // real register for the function.
1377 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1378 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
1379 int builtins_offset =
1380 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
1381 mov(edi, FieldOperand(edx, builtins_offset));
1382
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 return Builtins::GetCode(id, resolved);
1384}
1385
1386
Steve Blockd0582a62009-12-15 09:54:21 +00001387void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1388 if (context_chain_length > 0) {
1389 // Move up the chain of contexts to the context containing the slot.
1390 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1391 // Load the function context (which is the incoming, outer context).
1392 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1393 for (int i = 1; i < context_chain_length; i++) {
1394 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1395 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1396 }
1397 // The context may be an intermediate context, not a function context.
1398 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1399 } else { // Slot is in the current function context.
1400 // The context may be an intermediate context, not a function context.
1401 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1402 }
1403}
1404
1405
1406
Steve Blocka7e24c12009-10-30 11:49:00 +00001407void MacroAssembler::Ret() {
1408 ret(0);
1409}
1410
1411
Leon Clarkee46be812010-01-19 14:06:41 +00001412void MacroAssembler::Drop(int stack_elements) {
1413 if (stack_elements > 0) {
1414 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1415 }
1416}
1417
1418
1419void MacroAssembler::Move(Register dst, Handle<Object> value) {
1420 mov(dst, value);
1421}
1422
1423
Steve Blocka7e24c12009-10-30 11:49:00 +00001424void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1425 if (FLAG_native_code_counters && counter->Enabled()) {
1426 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1427 }
1428}
1429
1430
1431void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1432 ASSERT(value > 0);
1433 if (FLAG_native_code_counters && counter->Enabled()) {
1434 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1435 if (value == 1) {
1436 inc(operand);
1437 } else {
1438 add(operand, Immediate(value));
1439 }
1440 }
1441}
1442
1443
1444void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1445 ASSERT(value > 0);
1446 if (FLAG_native_code_counters && counter->Enabled()) {
1447 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1448 if (value == 1) {
1449 dec(operand);
1450 } else {
1451 sub(operand, Immediate(value));
1452 }
1453 }
1454}
1455
1456
1457void MacroAssembler::Assert(Condition cc, const char* msg) {
1458 if (FLAG_debug_code) Check(cc, msg);
1459}
1460
1461
1462void MacroAssembler::Check(Condition cc, const char* msg) {
1463 Label L;
1464 j(cc, &L, taken);
1465 Abort(msg);
1466 // will not return here
1467 bind(&L);
1468}
1469
1470
1471void MacroAssembler::Abort(const char* msg) {
1472 // We want to pass the msg string like a smi to avoid GC
1473 // problems, however msg is not guaranteed to be aligned
1474 // properly. Instead, we pass an aligned pointer that is
1475 // a proper v8 smi, but also pass the alignment difference
1476 // from the real pointer as a smi.
1477 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1478 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1479 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1480#ifdef DEBUG
1481 if (msg != NULL) {
1482 RecordComment("Abort message: ");
1483 RecordComment(msg);
1484 }
1485#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001486 // Disable stub call restrictions to always allow calls to abort.
1487 set_allow_stub_calls(true);
1488
Steve Blocka7e24c12009-10-30 11:49:00 +00001489 push(eax);
1490 push(Immediate(p0));
1491 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1492 CallRuntime(Runtime::kAbort, 2);
1493 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001494 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001495}
1496
1497
1498CodePatcher::CodePatcher(byte* address, int size)
1499 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1500 // Create a new macro assembler pointing to the address of the code to patch.
1501 // The size is adjusted with kGap on order for the assembler to generate size
1502 // bytes of instructions without failing with buffer size constraints.
1503 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1504}
1505
1506
1507CodePatcher::~CodePatcher() {
1508 // Indicate that code has changed.
1509 CPU::FlushICache(address_, size_);
1510
1511 // Check that the code was patched as expected.
1512 ASSERT(masm_.pc_ == address_ + size_);
1513 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1514}
1515
1516
1517} } // namespace v8::internal