blob: e83bb92e29d4d9b7d045ef5626f7d88af72507e7 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 {
37namespace internal {
38
39// -------------------------------------------------------------------------
40// MacroAssembler implementation.
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
45 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
48}
49
50
51static void RecordWriteHelper(MacroAssembler* masm,
52 Register object,
53 Register addr,
54 Register scratch) {
55 Label fast;
56
57 // Compute the page start address from the heap object pointer, and reuse
58 // the 'object' register for it.
59 masm->and_(object, ~Page::kPageAlignmentMask);
60 Register page_start = object;
61
62 // Compute the bit addr in the remembered set/index of the pointer in the
63 // page. Reuse 'addr' as pointer_offset.
64 masm->sub(addr, Operand(page_start));
65 masm->shr(addr, kObjectAlignmentBits);
66 Register pointer_offset = addr;
67
68 // If the bit offset lies beyond the normal remembered set range, it is in
69 // the extra remembered set area of a large object.
70 masm->cmp(pointer_offset, Page::kPageSize / kPointerSize);
71 masm->j(less, &fast);
72
73 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
74 // extra remembered set after the large object.
75
76 // Find the length of the large object (FixedArray).
77 masm->mov(scratch, Operand(page_start, Page::kObjectStartOffset
78 + FixedArray::kLengthOffset));
79 Register array_length = scratch;
80
81 // Extra remembered set starts right after the large object (a FixedArray), at
82 // page_start + kObjectStartOffset + objectSize
83 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
84 // Add the delta between the end of the normal RSet and the start of the
85 // extra RSet to 'page_start', so that addressing the bit using
86 // 'pointer_offset' hits the extra RSet words.
87 masm->lea(page_start,
88 Operand(page_start, array_length, times_pointer_size,
89 Page::kObjectStartOffset + FixedArray::kHeaderSize
90 - Page::kRSetEndOffset));
91
92 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
93 // to limit code size. We should probably evaluate this decision by
94 // measuring the performance of an equivalent implementation using
95 // "simpler" instructions
96 masm->bind(&fast);
97 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
98}
99
100
101class RecordWriteStub : public CodeStub {
102 public:
103 RecordWriteStub(Register object, Register addr, Register scratch)
104 : object_(object), addr_(addr), scratch_(scratch) { }
105
106 void Generate(MacroAssembler* masm);
107
108 private:
109 Register object_;
110 Register addr_;
111 Register scratch_;
112
113#ifdef DEBUG
114 void Print() {
115 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
116 object_.code(), addr_.code(), scratch_.code());
117 }
118#endif
119
120 // Minor key encoding in 12 bits of three registers (object, address and
121 // scratch) OOOOAAAASSSS.
122 class ScratchBits: public BitField<uint32_t, 0, 4> {};
123 class AddressBits: public BitField<uint32_t, 4, 4> {};
124 class ObjectBits: public BitField<uint32_t, 8, 4> {};
125
126 Major MajorKey() { return RecordWrite; }
127
128 int MinorKey() {
129 // Encode the registers.
130 return ObjectBits::encode(object_.code()) |
131 AddressBits::encode(addr_.code()) |
132 ScratchBits::encode(scratch_.code());
133 }
134};
135
136
137void RecordWriteStub::Generate(MacroAssembler* masm) {
138 RecordWriteHelper(masm, object_, addr_, scratch_);
139 masm->ret(0);
140}
141
142
143// Set the remembered set bit for [object+offset].
144// object is the object being stored into, value is the object being stored.
145// If offset is zero, then the scratch register contains the array index into
146// the elements array represented as a Smi.
147// All registers are clobbered by the operation.
148void MacroAssembler::RecordWrite(Register object, int offset,
149 Register value, Register scratch) {
150 // First, check if a remembered set write is even needed. The tests below
151 // catch stores of Smis and stores into young gen (which does not have space
152 // for the remembered set bits.
153 Label done;
154
155 // Skip barrier if writing a smi.
156 ASSERT_EQ(0, kSmiTag);
157 test(value, Immediate(kSmiTagMask));
158 j(zero, &done);
159
160 if (Serializer::enabled()) {
161 // Can't do arithmetic on external references if it might get serialized.
162 mov(value, Operand(object));
163 and_(value, Heap::NewSpaceMask());
164 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
165 j(equal, &done);
166 } else {
167 int32_t new_space_start = reinterpret_cast<int32_t>(
168 ExternalReference::new_space_start().address());
169 lea(value, Operand(object, -new_space_start));
170 and_(value, Heap::NewSpaceMask());
171 j(equal, &done);
172 }
173
174 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
175 // Compute the bit offset in the remembered set, leave it in 'value'.
176 lea(value, Operand(object, offset));
177 and_(value, Page::kPageAlignmentMask);
178 shr(value, kPointerSizeLog2);
179
180 // Compute the page address from the heap object pointer, leave it in
181 // 'object'.
182 and_(object, ~Page::kPageAlignmentMask);
183
184 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
185 // to limit code size. We should probably evaluate this decision by
186 // measuring the performance of an equivalent implementation using
187 // "simpler" instructions
188 bts(Operand(object, Page::kRSetOffset), value);
189 } else {
190 Register dst = scratch;
191 if (offset != 0) {
192 lea(dst, Operand(object, offset));
193 } else {
194 // array access: calculate the destination address in the same manner as
195 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
196 // into an array of words.
197 ASSERT_EQ(1, kSmiTagSize);
198 ASSERT_EQ(0, kSmiTag);
199 lea(dst, Operand(object, dst, times_half_pointer_size,
200 FixedArray::kHeaderSize - kHeapObjectTag));
201 }
202 // If we are already generating a shared stub, not inlining the
203 // record write code isn't going to save us any memory.
204 if (generating_stub()) {
205 RecordWriteHelper(this, object, dst, value);
206 } else {
207 RecordWriteStub stub(object, dst, value);
208 CallStub(&stub);
209 }
210 }
211
212 bind(&done);
213}
214
215
216#ifdef ENABLE_DEBUGGER_SUPPORT
217void MacroAssembler::SaveRegistersToMemory(RegList regs) {
218 ASSERT((regs & ~kJSCallerSaved) == 0);
219 // Copy the content of registers to memory location.
220 for (int i = 0; i < kNumJSCallerSaved; i++) {
221 int r = JSCallerSavedCode(i);
222 if ((regs & (1 << r)) != 0) {
223 Register reg = { r };
224 ExternalReference reg_addr =
225 ExternalReference(Debug_Address::Register(i));
226 mov(Operand::StaticVariable(reg_addr), reg);
227 }
228 }
229}
230
231
232void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
233 ASSERT((regs & ~kJSCallerSaved) == 0);
234 // Copy the content of memory location to registers.
235 for (int i = kNumJSCallerSaved; --i >= 0;) {
236 int r = JSCallerSavedCode(i);
237 if ((regs & (1 << r)) != 0) {
238 Register reg = { r };
239 ExternalReference reg_addr =
240 ExternalReference(Debug_Address::Register(i));
241 mov(reg, Operand::StaticVariable(reg_addr));
242 }
243 }
244}
245
246
247void MacroAssembler::PushRegistersFromMemory(RegList regs) {
248 ASSERT((regs & ~kJSCallerSaved) == 0);
249 // Push the content of the memory location to the stack.
250 for (int i = 0; i < kNumJSCallerSaved; i++) {
251 int r = JSCallerSavedCode(i);
252 if ((regs & (1 << r)) != 0) {
253 ExternalReference reg_addr =
254 ExternalReference(Debug_Address::Register(i));
255 push(Operand::StaticVariable(reg_addr));
256 }
257 }
258}
259
260
261void MacroAssembler::PopRegistersToMemory(RegList regs) {
262 ASSERT((regs & ~kJSCallerSaved) == 0);
263 // Pop the content from the stack to the memory location.
264 for (int i = kNumJSCallerSaved; --i >= 0;) {
265 int r = JSCallerSavedCode(i);
266 if ((regs & (1 << r)) != 0) {
267 ExternalReference reg_addr =
268 ExternalReference(Debug_Address::Register(i));
269 pop(Operand::StaticVariable(reg_addr));
270 }
271 }
272}
273
274
275void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
276 Register scratch,
277 RegList regs) {
278 ASSERT((regs & ~kJSCallerSaved) == 0);
279 // Copy the content of the stack to the memory location and adjust base.
280 for (int i = kNumJSCallerSaved; --i >= 0;) {
281 int r = JSCallerSavedCode(i);
282 if ((regs & (1 << r)) != 0) {
283 mov(scratch, Operand(base, 0));
284 ExternalReference reg_addr =
285 ExternalReference(Debug_Address::Register(i));
286 mov(Operand::StaticVariable(reg_addr), scratch);
287 lea(base, Operand(base, kPointerSize));
288 }
289 }
290}
291#endif
292
293void MacroAssembler::Set(Register dst, const Immediate& x) {
294 if (x.is_zero()) {
295 xor_(dst, Operand(dst)); // shorter than mov
296 } else {
297 mov(dst, x);
298 }
299}
300
301
302void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
303 mov(dst, x);
304}
305
306
307void MacroAssembler::CmpObjectType(Register heap_object,
308 InstanceType type,
309 Register map) {
310 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
311 CmpInstanceType(map, type);
312}
313
314
315void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
316 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
317 static_cast<int8_t>(type));
318}
319
320
321void MacroAssembler::FCmp() {
322 fucompp();
323 push(eax);
324 fnstsw_ax();
325 sahf();
326 pop(eax);
327}
328
329
330void MacroAssembler::EnterFrame(StackFrame::Type type) {
331 push(ebp);
332 mov(ebp, Operand(esp));
333 push(esi);
334 push(Immediate(Smi::FromInt(type)));
335 push(Immediate(CodeObject()));
336 if (FLAG_debug_code) {
337 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
338 Check(not_equal, "code object not properly patched");
339 }
340}
341
342
343void MacroAssembler::LeaveFrame(StackFrame::Type type) {
344 if (FLAG_debug_code) {
345 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
346 Immediate(Smi::FromInt(type)));
347 Check(equal, "stack frame types must match");
348 }
349 leave();
350}
351
352
353void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
354 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
355
356 // Setup the frame structure on the stack.
357 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
358 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
359 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
360 push(ebp);
361 mov(ebp, Operand(esp));
362
363 // Reserve room for entry stack pointer and push the debug marker.
364 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
365 push(Immediate(0)); // saved entry sp, patched before call
366 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
367
368 // Save the frame pointer and the context in top.
369 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
370 ExternalReference context_address(Top::k_context_address);
371 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
372 mov(Operand::StaticVariable(context_address), esi);
373
374 // Setup argc and argv in callee-saved registers.
375 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
376 mov(edi, Operand(eax));
377 lea(esi, Operand(ebp, eax, times_4, offset));
378
379#ifdef ENABLE_DEBUGGER_SUPPORT
380 // Save the state of all registers to the stack from the memory
381 // location. This is needed to allow nested break points.
382 if (type == StackFrame::EXIT_DEBUG) {
383 // TODO(1243899): This should be symmetric to
384 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
385 // correct here, but computed for the other call. Very error
386 // prone! FIX THIS. Actually there are deeper problems with
387 // register saving than this asymmetry (see the bug report
388 // associated with this issue).
389 PushRegistersFromMemory(kJSCallerSaved);
390 }
391#endif
392
393 // Reserve space for two arguments: argc and argv.
394 sub(Operand(esp), Immediate(2 * kPointerSize));
395
396 // Get the required frame alignment for the OS.
397 static const int kFrameAlignment = OS::ActivationFrameAlignment();
398 if (kFrameAlignment > 0) {
399 ASSERT(IsPowerOf2(kFrameAlignment));
400 and_(esp, -kFrameAlignment);
401 }
402
403 // Patch the saved entry sp.
404 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
405}
406
407
408void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
409#ifdef ENABLE_DEBUGGER_SUPPORT
410 // Restore the memory copy of the registers by digging them out from
411 // the stack. This is needed to allow nested break points.
412 if (type == StackFrame::EXIT_DEBUG) {
413 // It's okay to clobber register ebx below because we don't need
414 // the function pointer after this.
415 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
416 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
417 lea(ebx, Operand(ebp, kOffset));
418 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
419 }
420#endif
421
422 // Get the return address from the stack and restore the frame pointer.
423 mov(ecx, Operand(ebp, 1 * kPointerSize));
424 mov(ebp, Operand(ebp, 0 * kPointerSize));
425
426 // Pop the arguments and the receiver from the caller stack.
427 lea(esp, Operand(esi, 1 * kPointerSize));
428
429 // Restore current context from top and clear it in debug mode.
430 ExternalReference context_address(Top::k_context_address);
431 mov(esi, Operand::StaticVariable(context_address));
432#ifdef DEBUG
433 mov(Operand::StaticVariable(context_address), Immediate(0));
434#endif
435
436 // Push the return address to get ready to return.
437 push(ecx);
438
439 // Clear the top frame.
440 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
441 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
442}
443
444
445void MacroAssembler::PushTryHandler(CodeLocation try_location,
446 HandlerType type) {
447 // Adjust this code if not the case.
448 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
449 // The pc (return address) is already on TOS.
450 if (try_location == IN_JAVASCRIPT) {
451 if (type == TRY_CATCH_HANDLER) {
452 push(Immediate(StackHandler::TRY_CATCH));
453 } else {
454 push(Immediate(StackHandler::TRY_FINALLY));
455 }
456 push(ebp);
457 } else {
458 ASSERT(try_location == IN_JS_ENTRY);
459 // The frame pointer does not point to a JS frame so we save NULL
460 // for ebp. We expect the code throwing an exception to check ebp
461 // before dereferencing it to restore the context.
462 push(Immediate(StackHandler::ENTRY));
463 push(Immediate(0)); // NULL frame pointer.
464 }
465 // Save the current handler as the next handler.
466 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
467 // Link this handler as the new current one.
468 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
469}
470
471
472Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
473 JSObject* holder, Register holder_reg,
474 Register scratch,
475 Label* miss) {
476 // Make sure there's no overlap between scratch and the other
477 // registers.
478 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
479
480 // Keep track of the current object in register reg.
481 Register reg = object_reg;
482 int depth = 1;
483
484 // Check the maps in the prototype chain.
485 // Traverse the prototype chain from the object and do map checks.
486 while (object != holder) {
487 depth++;
488
489 // Only global objects and objects that do not require access
490 // checks are allowed in stubs.
491 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
492
493 JSObject* prototype = JSObject::cast(object->GetPrototype());
494 if (Heap::InNewSpace(prototype)) {
495 // Get the map of the current object.
496 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
497 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
498 // Branch on the result of the map check.
499 j(not_equal, miss, not_taken);
500 // Check access rights to the global object. This has to happen
501 // after the map check so that we know that the object is
502 // actually a global object.
503 if (object->IsJSGlobalProxy()) {
504 CheckAccessGlobalProxy(reg, scratch, miss);
505
506 // Restore scratch register to be the map of the object.
507 // We load the prototype from the map in the scratch register.
508 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
509 }
510 // The prototype is in new space; we cannot store a reference
511 // to it in the code. Load it from the map.
512 reg = holder_reg; // from now the object is in holder_reg
513 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
514
515 } else {
516 // Check the map of the current object.
517 cmp(FieldOperand(reg, HeapObject::kMapOffset),
518 Immediate(Handle<Map>(object->map())));
519 // Branch on the result of the map check.
520 j(not_equal, miss, not_taken);
521 // Check access rights to the global object. This has to happen
522 // after the map check so that we know that the object is
523 // actually a global object.
524 if (object->IsJSGlobalProxy()) {
525 CheckAccessGlobalProxy(reg, scratch, miss);
526 }
527 // The prototype is in old space; load it directly.
528 reg = holder_reg; // from now the object is in holder_reg
529 mov(reg, Handle<JSObject>(prototype));
530 }
531
532 // Go to the next object in the prototype chain.
533 object = prototype;
534 }
535
536 // Check the holder map.
537 cmp(FieldOperand(reg, HeapObject::kMapOffset),
538 Immediate(Handle<Map>(holder->map())));
539 j(not_equal, miss, not_taken);
540
541 // Log the check depth.
542 LOG(IntEvent("check-maps-depth", depth));
543
544 // Perform security check for access to the global object and return
545 // the holder register.
546 ASSERT(object == holder);
547 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
548 if (object->IsJSGlobalProxy()) {
549 CheckAccessGlobalProxy(reg, scratch, miss);
550 }
551 return reg;
552}
553
554
555void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
556 Register scratch,
557 Label* miss) {
558 Label same_contexts;
559
560 ASSERT(!holder_reg.is(scratch));
561
562 // Load current lexical context from the stack frame.
563 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
564
565 // When generating debug code, make sure the lexical context is set.
566 if (FLAG_debug_code) {
567 cmp(Operand(scratch), Immediate(0));
568 Check(not_equal, "we should not have an empty lexical context");
569 }
570 // Load the global context of the current context.
571 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
572 mov(scratch, FieldOperand(scratch, offset));
573 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
574
575 // Check the context is a global context.
576 if (FLAG_debug_code) {
577 push(scratch);
578 // Read the first word and compare to global_context_map.
579 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
580 cmp(scratch, Factory::global_context_map());
581 Check(equal, "JSGlobalObject::global_context should be a global context.");
582 pop(scratch);
583 }
584
585 // Check if both contexts are the same.
586 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
587 j(equal, &same_contexts, taken);
588
589 // Compare security tokens, save holder_reg on the stack so we can use it
590 // as a temporary register.
591 //
592 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
593 push(holder_reg);
594 // Check that the security token in the calling global object is
595 // compatible with the security token in the receiving global
596 // object.
597 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
598
599 // Check the context is a global context.
600 if (FLAG_debug_code) {
601 cmp(holder_reg, Factory::null_value());
602 Check(not_equal, "JSGlobalProxy::context() should not be null.");
603
604 push(holder_reg);
605 // Read the first word and compare to global_context_map(),
606 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
607 cmp(holder_reg, Factory::global_context_map());
608 Check(equal, "JSGlobalObject::global_context should be a global context.");
609 pop(holder_reg);
610 }
611
612 int token_offset = Context::kHeaderSize +
613 Context::SECURITY_TOKEN_INDEX * kPointerSize;
614 mov(scratch, FieldOperand(scratch, token_offset));
615 cmp(scratch, FieldOperand(holder_reg, token_offset));
616 pop(holder_reg);
617 j(not_equal, miss, not_taken);
618
619 bind(&same_contexts);
620}
621
622
623void MacroAssembler::LoadAllocationTopHelper(Register result,
624 Register result_end,
625 Register scratch,
626 AllocationFlags flags) {
627 ExternalReference new_space_allocation_top =
628 ExternalReference::new_space_allocation_top_address();
629
630 // Just return if allocation top is already known.
631 if ((flags & RESULT_CONTAINS_TOP) != 0) {
632 // No use of scratch if allocation top is provided.
633 ASSERT(scratch.is(no_reg));
634#ifdef DEBUG
635 // Assert that result actually contains top on entry.
636 cmp(result, Operand::StaticVariable(new_space_allocation_top));
637 Check(equal, "Unexpected allocation top");
638#endif
639 return;
640 }
641
642 // Move address of new object to result. Use scratch register if available.
643 if (scratch.is(no_reg)) {
644 mov(result, Operand::StaticVariable(new_space_allocation_top));
645 } else {
646 ASSERT(!scratch.is(result_end));
647 mov(Operand(scratch), Immediate(new_space_allocation_top));
648 mov(result, Operand(scratch, 0));
649 }
650}
651
652
653void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
654 Register scratch) {
655 ExternalReference new_space_allocation_top =
656 ExternalReference::new_space_allocation_top_address();
657
658 // Update new top. Use scratch if available.
659 if (scratch.is(no_reg)) {
660 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
661 } else {
662 mov(Operand(scratch, 0), result_end);
663 }
664}
665
666
667void MacroAssembler::AllocateInNewSpace(int object_size,
668 Register result,
669 Register result_end,
670 Register scratch,
671 Label* gc_required,
672 AllocationFlags flags) {
673 ASSERT(!result.is(result_end));
674
675 // Load address of new object into result.
676 LoadAllocationTopHelper(result, result_end, scratch, flags);
677
678 // Calculate new top and bail out if new space is exhausted.
679 ExternalReference new_space_allocation_limit =
680 ExternalReference::new_space_allocation_limit_address();
681 lea(result_end, Operand(result, object_size));
682 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
683 j(above, gc_required, not_taken);
684
685 // Update allocation top.
686 UpdateAllocationTopHelper(result_end, scratch);
687
688 // Tag result if requested.
689 if ((flags & TAG_OBJECT) != 0) {
690 or_(Operand(result), Immediate(kHeapObjectTag));
691 }
692}
693
694
695void MacroAssembler::AllocateInNewSpace(int header_size,
696 ScaleFactor element_size,
697 Register element_count,
698 Register result,
699 Register result_end,
700 Register scratch,
701 Label* gc_required,
702 AllocationFlags flags) {
703 ASSERT(!result.is(result_end));
704
705 // Load address of new object into result.
706 LoadAllocationTopHelper(result, result_end, scratch, flags);
707
708 // Calculate new top and bail out if new space is exhausted.
709 ExternalReference new_space_allocation_limit =
710 ExternalReference::new_space_allocation_limit_address();
711 lea(result_end, Operand(result, element_count, element_size, header_size));
712 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
713 j(above, gc_required);
714
715 // Update allocation top.
716 UpdateAllocationTopHelper(result_end, scratch);
717
718 // Tag result if requested.
719 if ((flags & TAG_OBJECT) != 0) {
720 or_(Operand(result), Immediate(kHeapObjectTag));
721 }
722}
723
724
725void MacroAssembler::AllocateInNewSpace(Register object_size,
726 Register result,
727 Register result_end,
728 Register scratch,
729 Label* gc_required,
730 AllocationFlags flags) {
731 ASSERT(!result.is(result_end));
732
733 // Load address of new object into result.
734 LoadAllocationTopHelper(result, result_end, scratch, flags);
735
736 // Calculate new top and bail out if new space is exhausted.
737 ExternalReference new_space_allocation_limit =
738 ExternalReference::new_space_allocation_limit_address();
739 if (!object_size.is(result_end)) {
740 mov(result_end, object_size);
741 }
742 add(result_end, Operand(result));
743 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
744 j(above, gc_required, not_taken);
745
746 // Update allocation top.
747 UpdateAllocationTopHelper(result_end, scratch);
748
749 // Tag result if requested.
750 if ((flags & TAG_OBJECT) != 0) {
751 or_(Operand(result), Immediate(kHeapObjectTag));
752 }
753}
754
755
756void MacroAssembler::UndoAllocationInNewSpace(Register object) {
757 ExternalReference new_space_allocation_top =
758 ExternalReference::new_space_allocation_top_address();
759
760 // Make sure the object has no tag before resetting top.
761 and_(Operand(object), Immediate(~kHeapObjectTagMask));
762#ifdef DEBUG
763 cmp(object, Operand::StaticVariable(new_space_allocation_top));
764 Check(below, "Undo allocation of non allocated memory");
765#endif
766 mov(Operand::StaticVariable(new_space_allocation_top), object);
767}
768
769
770void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
771 Register result,
772 Register op,
773 JumpTarget* then_target) {
774 JumpTarget ok;
775 test(result, Operand(result));
776 ok.Branch(not_zero, taken);
777 test(op, Operand(op));
778 then_target->Branch(sign, not_taken);
779 ok.Bind();
780}
781
782
783void MacroAssembler::NegativeZeroTest(Register result,
784 Register op,
785 Label* then_label) {
786 Label ok;
787 test(result, Operand(result));
788 j(not_zero, &ok, taken);
789 test(op, Operand(op));
790 j(sign, then_label, not_taken);
791 bind(&ok);
792}
793
794
795void MacroAssembler::NegativeZeroTest(Register result,
796 Register op1,
797 Register op2,
798 Register scratch,
799 Label* then_label) {
800 Label ok;
801 test(result, Operand(result));
802 j(not_zero, &ok, taken);
803 mov(scratch, Operand(op1));
804 or_(scratch, Operand(op2));
805 j(sign, then_label, not_taken);
806 bind(&ok);
807}
808
809
810void MacroAssembler::TryGetFunctionPrototype(Register function,
811 Register result,
812 Register scratch,
813 Label* miss) {
814 // Check that the receiver isn't a smi.
815 test(function, Immediate(kSmiTagMask));
816 j(zero, miss, not_taken);
817
818 // Check that the function really is a function.
819 CmpObjectType(function, JS_FUNCTION_TYPE, result);
820 j(not_equal, miss, not_taken);
821
822 // Make sure that the function has an instance prototype.
823 Label non_instance;
824 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
825 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
826 j(not_zero, &non_instance, not_taken);
827
828 // Get the prototype or initial map from the function.
829 mov(result,
830 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
831
832 // If the prototype or initial map is the hole, don't return it and
833 // simply miss the cache instead. This will allow us to allocate a
834 // prototype object on-demand in the runtime system.
835 cmp(Operand(result), Immediate(Factory::the_hole_value()));
836 j(equal, miss, not_taken);
837
838 // If the function does not have an initial map, we're done.
839 Label done;
840 CmpObjectType(result, MAP_TYPE, scratch);
841 j(not_equal, &done);
842
843 // Get the prototype from the initial map.
844 mov(result, FieldOperand(result, Map::kPrototypeOffset));
845 jmp(&done);
846
847 // Non-instance prototype: Fetch prototype from constructor field
848 // in initial map.
849 bind(&non_instance);
850 mov(result, FieldOperand(result, Map::kConstructorOffset));
851
852 // All done.
853 bind(&done);
854}
855
856
857void MacroAssembler::CallStub(CodeStub* stub) {
858 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
859 call(stub->GetCode(), RelocInfo::CODE_TARGET);
860}
861
862
863void MacroAssembler::StubReturn(int argc) {
864 ASSERT(argc >= 1 && generating_stub());
865 ret((argc - 1) * kPointerSize);
866}
867
868
869void MacroAssembler::IllegalOperation(int num_arguments) {
870 if (num_arguments > 0) {
871 add(Operand(esp), Immediate(num_arguments * kPointerSize));
872 }
873 mov(eax, Immediate(Factory::undefined_value()));
874}
875
876
877void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
878 CallRuntime(Runtime::FunctionForId(id), num_arguments);
879}
880
881
882void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
883 // If the expected number of arguments of the runtime function is
884 // constant, we check that the actual number of arguments match the
885 // expectation.
886 if (f->nargs >= 0 && f->nargs != num_arguments) {
887 IllegalOperation(num_arguments);
888 return;
889 }
890
891 Runtime::FunctionId function_id =
892 static_cast<Runtime::FunctionId>(f->stub_id);
893 RuntimeStub stub(function_id, num_arguments);
894 CallStub(&stub);
895}
896
897
898void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
899 int num_arguments,
900 int result_size) {
901 // TODO(1236192): Most runtime routines don't need the number of
902 // arguments passed in because it is constant. At some point we
903 // should remove this need and make the runtime routine entry code
904 // smarter.
905 Set(eax, Immediate(num_arguments));
906 JumpToRuntime(ext);
907}
908
909
910void MacroAssembler::JumpToRuntime(const ExternalReference& ext) {
911 // Set the entry point and jump to the C entry runtime stub.
912 mov(ebx, Immediate(ext));
913 CEntryStub ces(1);
914 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
915}
916
917
918void MacroAssembler::InvokePrologue(const ParameterCount& expected,
919 const ParameterCount& actual,
920 Handle<Code> code_constant,
921 const Operand& code_operand,
922 Label* done,
923 InvokeFlag flag) {
924 bool definitely_matches = false;
925 Label invoke;
926 if (expected.is_immediate()) {
927 ASSERT(actual.is_immediate());
928 if (expected.immediate() == actual.immediate()) {
929 definitely_matches = true;
930 } else {
931 mov(eax, actual.immediate());
932 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
933 if (expected.immediate() == sentinel) {
934 // Don't worry about adapting arguments for builtins that
935 // don't want that done. Skip adaption code by making it look
936 // like we have a match between expected and actual number of
937 // arguments.
938 definitely_matches = true;
939 } else {
940 mov(ebx, expected.immediate());
941 }
942 }
943 } else {
944 if (actual.is_immediate()) {
945 // Expected is in register, actual is immediate. This is the
946 // case when we invoke function values without going through the
947 // IC mechanism.
948 cmp(expected.reg(), actual.immediate());
949 j(equal, &invoke);
950 ASSERT(expected.reg().is(ebx));
951 mov(eax, actual.immediate());
952 } else if (!expected.reg().is(actual.reg())) {
953 // Both expected and actual are in (different) registers. This
954 // is the case when we invoke functions using call and apply.
955 cmp(expected.reg(), Operand(actual.reg()));
956 j(equal, &invoke);
957 ASSERT(actual.reg().is(eax));
958 ASSERT(expected.reg().is(ebx));
959 }
960 }
961
962 if (!definitely_matches) {
963 Handle<Code> adaptor =
964 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
965 if (!code_constant.is_null()) {
966 mov(edx, Immediate(code_constant));
967 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
968 } else if (!code_operand.is_reg(edx)) {
969 mov(edx, code_operand);
970 }
971
972 if (flag == CALL_FUNCTION) {
973 call(adaptor, RelocInfo::CODE_TARGET);
974 jmp(done);
975 } else {
976 jmp(adaptor, RelocInfo::CODE_TARGET);
977 }
978 bind(&invoke);
979 }
980}
981
982
983void MacroAssembler::InvokeCode(const Operand& code,
984 const ParameterCount& expected,
985 const ParameterCount& actual,
986 InvokeFlag flag) {
987 Label done;
988 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
989 if (flag == CALL_FUNCTION) {
990 call(code);
991 } else {
992 ASSERT(flag == JUMP_FUNCTION);
993 jmp(code);
994 }
995 bind(&done);
996}
997
998
999void MacroAssembler::InvokeCode(Handle<Code> code,
1000 const ParameterCount& expected,
1001 const ParameterCount& actual,
1002 RelocInfo::Mode rmode,
1003 InvokeFlag flag) {
1004 Label done;
1005 Operand dummy(eax);
1006 InvokePrologue(expected, actual, code, dummy, &done, flag);
1007 if (flag == CALL_FUNCTION) {
1008 call(code, rmode);
1009 } else {
1010 ASSERT(flag == JUMP_FUNCTION);
1011 jmp(code, rmode);
1012 }
1013 bind(&done);
1014}
1015
1016
1017void MacroAssembler::InvokeFunction(Register fun,
1018 const ParameterCount& actual,
1019 InvokeFlag flag) {
1020 ASSERT(fun.is(edi));
1021 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1022 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1023 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1024 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1025 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1026
1027 ParameterCount expected(ebx);
1028 InvokeCode(Operand(edx), expected, actual, flag);
1029}
1030
1031
1032void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1033 bool resolved;
1034 Handle<Code> code = ResolveBuiltin(id, &resolved);
1035
1036 // Calls are not allowed in some stubs.
1037 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1038
1039 // Rely on the assertion to check that the number of provided
1040 // arguments match the expected number of arguments. Fake a
1041 // parameter count to avoid emitting code to do the check.
1042 ParameterCount expected(0);
1043 InvokeCode(Handle<Code>(code), expected, expected,
1044 RelocInfo::CODE_TARGET, flag);
1045
1046 const char* name = Builtins::GetName(id);
1047 int argc = Builtins::GetArgumentsCount(id);
1048
1049 if (!resolved) {
1050 uint32_t flags =
1051 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
1052 Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
1053 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
1054 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1055 unresolved_.Add(entry);
1056 }
1057}
1058
1059
1060void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1061 bool resolved;
1062 Handle<Code> code = ResolveBuiltin(id, &resolved);
1063
1064 const char* name = Builtins::GetName(id);
1065 int argc = Builtins::GetArgumentsCount(id);
1066
1067 mov(Operand(target), Immediate(code));
1068 if (!resolved) {
1069 uint32_t flags =
1070 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
1071 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
1072 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
1073 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1074 unresolved_.Add(entry);
1075 }
1076 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
1077}
1078
1079
1080Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
1081 bool* resolved) {
1082 // Move the builtin function into the temporary function slot by
1083 // reading it from the builtins object. NOTE: We should be able to
1084 // reduce this to two instructions by putting the function table in
1085 // the global object instead of the "builtins" object and by using a
1086 // real register for the function.
1087 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1088 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
1089 int builtins_offset =
1090 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
1091 mov(edi, FieldOperand(edx, builtins_offset));
1092
1093
1094 return Builtins::GetCode(id, resolved);
1095}
1096
1097
1098void MacroAssembler::Ret() {
1099 ret(0);
1100}
1101
1102
1103void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1104 if (FLAG_native_code_counters && counter->Enabled()) {
1105 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1106 }
1107}
1108
1109
1110void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1111 ASSERT(value > 0);
1112 if (FLAG_native_code_counters && counter->Enabled()) {
1113 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1114 if (value == 1) {
1115 inc(operand);
1116 } else {
1117 add(operand, Immediate(value));
1118 }
1119 }
1120}
1121
1122
1123void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1124 ASSERT(value > 0);
1125 if (FLAG_native_code_counters && counter->Enabled()) {
1126 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1127 if (value == 1) {
1128 dec(operand);
1129 } else {
1130 sub(operand, Immediate(value));
1131 }
1132 }
1133}
1134
1135
1136void MacroAssembler::Assert(Condition cc, const char* msg) {
1137 if (FLAG_debug_code) Check(cc, msg);
1138}
1139
1140
1141void MacroAssembler::Check(Condition cc, const char* msg) {
1142 Label L;
1143 j(cc, &L, taken);
1144 Abort(msg);
1145 // will not return here
1146 bind(&L);
1147}
1148
1149
1150void MacroAssembler::Abort(const char* msg) {
1151 // We want to pass the msg string like a smi to avoid GC
1152 // problems, however msg is not guaranteed to be aligned
1153 // properly. Instead, we pass an aligned pointer that is
1154 // a proper v8 smi, but also pass the alignment difference
1155 // from the real pointer as a smi.
1156 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1157 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1158 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1159#ifdef DEBUG
1160 if (msg != NULL) {
1161 RecordComment("Abort message: ");
1162 RecordComment(msg);
1163 }
1164#endif
1165 push(eax);
1166 push(Immediate(p0));
1167 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1168 CallRuntime(Runtime::kAbort, 2);
1169 // will not return here
1170}
1171
1172
1173CodePatcher::CodePatcher(byte* address, int size)
1174 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1175 // Create a new macro assembler pointing to the address of the code to patch.
1176 // The size is adjusted with kGap on order for the assembler to generate size
1177 // bytes of instructions without failing with buffer size constraints.
1178 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1179}
1180
1181
1182CodePatcher::~CodePatcher() {
1183 // Indicate that code has changed.
1184 CPU::FlushICache(address_, size_);
1185
1186 // Check that the code was patched as expected.
1187 ASSERT(masm_.pc_ == address_ + size_);
1188 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1189}
1190
1191
1192} } // namespace v8::internal