blob: 08c4c0c51bcc83931561b2d66c3310c6d32b25c2 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 {
37namespace internal {
38
39// -------------------------------------------------------------------------
40// MacroAssembler implementation.
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
45 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
48}
49
50
51static void RecordWriteHelper(MacroAssembler* masm,
52 Register object,
53 Register addr,
54 Register scratch) {
55 Label fast;
56
57 // Compute the page start address from the heap object pointer, and reuse
58 // the 'object' register for it.
59 masm->and_(object, ~Page::kPageAlignmentMask);
60 Register page_start = object;
61
62 // Compute the bit addr in the remembered set/index of the pointer in the
63 // page. Reuse 'addr' as pointer_offset.
64 masm->sub(addr, Operand(page_start));
65 masm->shr(addr, kObjectAlignmentBits);
66 Register pointer_offset = addr;
67
68 // If the bit offset lies beyond the normal remembered set range, it is in
69 // the extra remembered set area of a large object.
70 masm->cmp(pointer_offset, Page::kPageSize / kPointerSize);
71 masm->j(less, &fast);
72
73 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
74 // extra remembered set after the large object.
75
76 // Find the length of the large object (FixedArray).
77 masm->mov(scratch, Operand(page_start, Page::kObjectStartOffset
78 + FixedArray::kLengthOffset));
79 Register array_length = scratch;
80
81 // Extra remembered set starts right after the large object (a FixedArray), at
82 // page_start + kObjectStartOffset + objectSize
83 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
84 // Add the delta between the end of the normal RSet and the start of the
85 // extra RSet to 'page_start', so that addressing the bit using
86 // 'pointer_offset' hits the extra RSet words.
87 masm->lea(page_start,
88 Operand(page_start, array_length, times_pointer_size,
89 Page::kObjectStartOffset + FixedArray::kHeaderSize
90 - Page::kRSetEndOffset));
91
92 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
93 // to limit code size. We should probably evaluate this decision by
94 // measuring the performance of an equivalent implementation using
95 // "simpler" instructions
96 masm->bind(&fast);
97 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
98}
99
100
101class RecordWriteStub : public CodeStub {
102 public:
103 RecordWriteStub(Register object, Register addr, Register scratch)
104 : object_(object), addr_(addr), scratch_(scratch) { }
105
106 void Generate(MacroAssembler* masm);
107
108 private:
109 Register object_;
110 Register addr_;
111 Register scratch_;
112
113#ifdef DEBUG
114 void Print() {
115 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
116 object_.code(), addr_.code(), scratch_.code());
117 }
118#endif
119
120 // Minor key encoding in 12 bits of three registers (object, address and
121 // scratch) OOOOAAAASSSS.
122 class ScratchBits: public BitField<uint32_t, 0, 4> {};
123 class AddressBits: public BitField<uint32_t, 4, 4> {};
124 class ObjectBits: public BitField<uint32_t, 8, 4> {};
125
126 Major MajorKey() { return RecordWrite; }
127
128 int MinorKey() {
129 // Encode the registers.
130 return ObjectBits::encode(object_.code()) |
131 AddressBits::encode(addr_.code()) |
132 ScratchBits::encode(scratch_.code());
133 }
134};
135
136
137void RecordWriteStub::Generate(MacroAssembler* masm) {
138 RecordWriteHelper(masm, object_, addr_, scratch_);
139 masm->ret(0);
140}
141
142
143// Set the remembered set bit for [object+offset].
144// object is the object being stored into, value is the object being stored.
145// If offset is zero, then the scratch register contains the array index into
146// the elements array represented as a Smi.
147// All registers are clobbered by the operation.
148void MacroAssembler::RecordWrite(Register object, int offset,
149 Register value, Register scratch) {
150 // First, check if a remembered set write is even needed. The tests below
151 // catch stores of Smis and stores into young gen (which does not have space
152 // for the remembered set bits.
153 Label done;
154
155 // Skip barrier if writing a smi.
156 ASSERT_EQ(0, kSmiTag);
157 test(value, Immediate(kSmiTagMask));
158 j(zero, &done);
159
160 if (Serializer::enabled()) {
161 // Can't do arithmetic on external references if it might get serialized.
162 mov(value, Operand(object));
163 and_(value, Heap::NewSpaceMask());
164 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
165 j(equal, &done);
166 } else {
167 int32_t new_space_start = reinterpret_cast<int32_t>(
168 ExternalReference::new_space_start().address());
169 lea(value, Operand(object, -new_space_start));
170 and_(value, Heap::NewSpaceMask());
171 j(equal, &done);
172 }
173
174 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
175 // Compute the bit offset in the remembered set, leave it in 'value'.
176 lea(value, Operand(object, offset));
177 and_(value, Page::kPageAlignmentMask);
178 shr(value, kPointerSizeLog2);
179
180 // Compute the page address from the heap object pointer, leave it in
181 // 'object'.
182 and_(object, ~Page::kPageAlignmentMask);
183
184 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
185 // to limit code size. We should probably evaluate this decision by
186 // measuring the performance of an equivalent implementation using
187 // "simpler" instructions
188 bts(Operand(object, Page::kRSetOffset), value);
189 } else {
190 Register dst = scratch;
191 if (offset != 0) {
192 lea(dst, Operand(object, offset));
193 } else {
194 // array access: calculate the destination address in the same manner as
195 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
196 // into an array of words.
197 ASSERT_EQ(1, kSmiTagSize);
198 ASSERT_EQ(0, kSmiTag);
199 lea(dst, Operand(object, dst, times_half_pointer_size,
200 FixedArray::kHeaderSize - kHeapObjectTag));
201 }
202 // If we are already generating a shared stub, not inlining the
203 // record write code isn't going to save us any memory.
204 if (generating_stub()) {
205 RecordWriteHelper(this, object, dst, value);
206 } else {
207 RecordWriteStub stub(object, dst, value);
208 CallStub(&stub);
209 }
210 }
211
212 bind(&done);
213}
214
215
216#ifdef ENABLE_DEBUGGER_SUPPORT
217void MacroAssembler::SaveRegistersToMemory(RegList regs) {
218 ASSERT((regs & ~kJSCallerSaved) == 0);
219 // Copy the content of registers to memory location.
220 for (int i = 0; i < kNumJSCallerSaved; i++) {
221 int r = JSCallerSavedCode(i);
222 if ((regs & (1 << r)) != 0) {
223 Register reg = { r };
224 ExternalReference reg_addr =
225 ExternalReference(Debug_Address::Register(i));
226 mov(Operand::StaticVariable(reg_addr), reg);
227 }
228 }
229}
230
231
232void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
233 ASSERT((regs & ~kJSCallerSaved) == 0);
234 // Copy the content of memory location to registers.
235 for (int i = kNumJSCallerSaved; --i >= 0;) {
236 int r = JSCallerSavedCode(i);
237 if ((regs & (1 << r)) != 0) {
238 Register reg = { r };
239 ExternalReference reg_addr =
240 ExternalReference(Debug_Address::Register(i));
241 mov(reg, Operand::StaticVariable(reg_addr));
242 }
243 }
244}
245
246
247void MacroAssembler::PushRegistersFromMemory(RegList regs) {
248 ASSERT((regs & ~kJSCallerSaved) == 0);
249 // Push the content of the memory location to the stack.
250 for (int i = 0; i < kNumJSCallerSaved; i++) {
251 int r = JSCallerSavedCode(i);
252 if ((regs & (1 << r)) != 0) {
253 ExternalReference reg_addr =
254 ExternalReference(Debug_Address::Register(i));
255 push(Operand::StaticVariable(reg_addr));
256 }
257 }
258}
259
260
261void MacroAssembler::PopRegistersToMemory(RegList regs) {
262 ASSERT((regs & ~kJSCallerSaved) == 0);
263 // Pop the content from the stack to the memory location.
264 for (int i = kNumJSCallerSaved; --i >= 0;) {
265 int r = JSCallerSavedCode(i);
266 if ((regs & (1 << r)) != 0) {
267 ExternalReference reg_addr =
268 ExternalReference(Debug_Address::Register(i));
269 pop(Operand::StaticVariable(reg_addr));
270 }
271 }
272}
273
274
275void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
276 Register scratch,
277 RegList regs) {
278 ASSERT((regs & ~kJSCallerSaved) == 0);
279 // Copy the content of the stack to the memory location and adjust base.
280 for (int i = kNumJSCallerSaved; --i >= 0;) {
281 int r = JSCallerSavedCode(i);
282 if ((regs & (1 << r)) != 0) {
283 mov(scratch, Operand(base, 0));
284 ExternalReference reg_addr =
285 ExternalReference(Debug_Address::Register(i));
286 mov(Operand::StaticVariable(reg_addr), scratch);
287 lea(base, Operand(base, kPointerSize));
288 }
289 }
290}
291#endif
292
293void MacroAssembler::Set(Register dst, const Immediate& x) {
294 if (x.is_zero()) {
295 xor_(dst, Operand(dst)); // shorter than mov
296 } else {
297 mov(dst, x);
298 }
299}
300
301
302void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
303 mov(dst, x);
304}
305
306
307void MacroAssembler::CmpObjectType(Register heap_object,
308 InstanceType type,
309 Register map) {
310 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
311 CmpInstanceType(map, type);
312}
313
314
315void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
316 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
317 static_cast<int8_t>(type));
318}
319
320
321void MacroAssembler::FCmp() {
Steve Block3ce2e202009-11-05 08:53:23 +0000322 if (CpuFeatures::IsSupported(CpuFeatures::CMOV)) {
323 fucomip();
324 ffree(0);
325 fincstp();
326 } else {
327 fucompp();
328 push(eax);
329 fnstsw_ax();
330 sahf();
331 pop(eax);
332 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000333}
334
335
336void MacroAssembler::EnterFrame(StackFrame::Type type) {
337 push(ebp);
338 mov(ebp, Operand(esp));
339 push(esi);
340 push(Immediate(Smi::FromInt(type)));
341 push(Immediate(CodeObject()));
342 if (FLAG_debug_code) {
343 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
344 Check(not_equal, "code object not properly patched");
345 }
346}
347
348
349void MacroAssembler::LeaveFrame(StackFrame::Type type) {
350 if (FLAG_debug_code) {
351 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
352 Immediate(Smi::FromInt(type)));
353 Check(equal, "stack frame types must match");
354 }
355 leave();
356}
357
358
359void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
360 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
361
362 // Setup the frame structure on the stack.
363 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
364 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
365 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
366 push(ebp);
367 mov(ebp, Operand(esp));
368
369 // Reserve room for entry stack pointer and push the debug marker.
370 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
371 push(Immediate(0)); // saved entry sp, patched before call
372 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
373
374 // Save the frame pointer and the context in top.
375 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
376 ExternalReference context_address(Top::k_context_address);
377 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
378 mov(Operand::StaticVariable(context_address), esi);
379
380 // Setup argc and argv in callee-saved registers.
381 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
382 mov(edi, Operand(eax));
383 lea(esi, Operand(ebp, eax, times_4, offset));
384
385#ifdef ENABLE_DEBUGGER_SUPPORT
386 // Save the state of all registers to the stack from the memory
387 // location. This is needed to allow nested break points.
388 if (type == StackFrame::EXIT_DEBUG) {
389 // TODO(1243899): This should be symmetric to
390 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
391 // correct here, but computed for the other call. Very error
392 // prone! FIX THIS. Actually there are deeper problems with
393 // register saving than this asymmetry (see the bug report
394 // associated with this issue).
395 PushRegistersFromMemory(kJSCallerSaved);
396 }
397#endif
398
399 // Reserve space for two arguments: argc and argv.
400 sub(Operand(esp), Immediate(2 * kPointerSize));
401
402 // Get the required frame alignment for the OS.
403 static const int kFrameAlignment = OS::ActivationFrameAlignment();
404 if (kFrameAlignment > 0) {
405 ASSERT(IsPowerOf2(kFrameAlignment));
406 and_(esp, -kFrameAlignment);
407 }
408
409 // Patch the saved entry sp.
410 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
411}
412
413
414void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
415#ifdef ENABLE_DEBUGGER_SUPPORT
416 // Restore the memory copy of the registers by digging them out from
417 // the stack. This is needed to allow nested break points.
418 if (type == StackFrame::EXIT_DEBUG) {
419 // It's okay to clobber register ebx below because we don't need
420 // the function pointer after this.
421 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
422 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
423 lea(ebx, Operand(ebp, kOffset));
424 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
425 }
426#endif
427
428 // Get the return address from the stack and restore the frame pointer.
429 mov(ecx, Operand(ebp, 1 * kPointerSize));
430 mov(ebp, Operand(ebp, 0 * kPointerSize));
431
432 // Pop the arguments and the receiver from the caller stack.
433 lea(esp, Operand(esi, 1 * kPointerSize));
434
435 // Restore current context from top and clear it in debug mode.
436 ExternalReference context_address(Top::k_context_address);
437 mov(esi, Operand::StaticVariable(context_address));
438#ifdef DEBUG
439 mov(Operand::StaticVariable(context_address), Immediate(0));
440#endif
441
442 // Push the return address to get ready to return.
443 push(ecx);
444
445 // Clear the top frame.
446 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
447 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
448}
449
450
451void MacroAssembler::PushTryHandler(CodeLocation try_location,
452 HandlerType type) {
453 // Adjust this code if not the case.
454 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
455 // The pc (return address) is already on TOS.
456 if (try_location == IN_JAVASCRIPT) {
457 if (type == TRY_CATCH_HANDLER) {
458 push(Immediate(StackHandler::TRY_CATCH));
459 } else {
460 push(Immediate(StackHandler::TRY_FINALLY));
461 }
462 push(ebp);
463 } else {
464 ASSERT(try_location == IN_JS_ENTRY);
465 // The frame pointer does not point to a JS frame so we save NULL
466 // for ebp. We expect the code throwing an exception to check ebp
467 // before dereferencing it to restore the context.
468 push(Immediate(StackHandler::ENTRY));
469 push(Immediate(0)); // NULL frame pointer.
470 }
471 // Save the current handler as the next handler.
472 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
473 // Link this handler as the new current one.
474 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
475}
476
477
478Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
479 JSObject* holder, Register holder_reg,
480 Register scratch,
481 Label* miss) {
482 // Make sure there's no overlap between scratch and the other
483 // registers.
484 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
485
486 // Keep track of the current object in register reg.
487 Register reg = object_reg;
488 int depth = 1;
489
490 // Check the maps in the prototype chain.
491 // Traverse the prototype chain from the object and do map checks.
492 while (object != holder) {
493 depth++;
494
495 // Only global objects and objects that do not require access
496 // checks are allowed in stubs.
497 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
498
499 JSObject* prototype = JSObject::cast(object->GetPrototype());
500 if (Heap::InNewSpace(prototype)) {
501 // Get the map of the current object.
502 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
503 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
504 // Branch on the result of the map check.
505 j(not_equal, miss, not_taken);
506 // Check access rights to the global object. This has to happen
507 // after the map check so that we know that the object is
508 // actually a global object.
509 if (object->IsJSGlobalProxy()) {
510 CheckAccessGlobalProxy(reg, scratch, miss);
511
512 // Restore scratch register to be the map of the object.
513 // We load the prototype from the map in the scratch register.
514 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
515 }
516 // The prototype is in new space; we cannot store a reference
517 // to it in the code. Load it from the map.
518 reg = holder_reg; // from now the object is in holder_reg
519 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
520
521 } else {
522 // Check the map of the current object.
523 cmp(FieldOperand(reg, HeapObject::kMapOffset),
524 Immediate(Handle<Map>(object->map())));
525 // Branch on the result of the map check.
526 j(not_equal, miss, not_taken);
527 // Check access rights to the global object. This has to happen
528 // after the map check so that we know that the object is
529 // actually a global object.
530 if (object->IsJSGlobalProxy()) {
531 CheckAccessGlobalProxy(reg, scratch, miss);
532 }
533 // The prototype is in old space; load it directly.
534 reg = holder_reg; // from now the object is in holder_reg
535 mov(reg, Handle<JSObject>(prototype));
536 }
537
538 // Go to the next object in the prototype chain.
539 object = prototype;
540 }
541
542 // Check the holder map.
543 cmp(FieldOperand(reg, HeapObject::kMapOffset),
544 Immediate(Handle<Map>(holder->map())));
545 j(not_equal, miss, not_taken);
546
547 // Log the check depth.
548 LOG(IntEvent("check-maps-depth", depth));
549
550 // Perform security check for access to the global object and return
551 // the holder register.
552 ASSERT(object == holder);
553 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
554 if (object->IsJSGlobalProxy()) {
555 CheckAccessGlobalProxy(reg, scratch, miss);
556 }
557 return reg;
558}
559
560
561void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
562 Register scratch,
563 Label* miss) {
564 Label same_contexts;
565
566 ASSERT(!holder_reg.is(scratch));
567
568 // Load current lexical context from the stack frame.
569 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
570
571 // When generating debug code, make sure the lexical context is set.
572 if (FLAG_debug_code) {
573 cmp(Operand(scratch), Immediate(0));
574 Check(not_equal, "we should not have an empty lexical context");
575 }
576 // Load the global context of the current context.
577 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
578 mov(scratch, FieldOperand(scratch, offset));
579 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
580
581 // Check the context is a global context.
582 if (FLAG_debug_code) {
583 push(scratch);
584 // Read the first word and compare to global_context_map.
585 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
586 cmp(scratch, Factory::global_context_map());
587 Check(equal, "JSGlobalObject::global_context should be a global context.");
588 pop(scratch);
589 }
590
591 // Check if both contexts are the same.
592 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
593 j(equal, &same_contexts, taken);
594
595 // Compare security tokens, save holder_reg on the stack so we can use it
596 // as a temporary register.
597 //
598 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
599 push(holder_reg);
600 // Check that the security token in the calling global object is
601 // compatible with the security token in the receiving global
602 // object.
603 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
604
605 // Check the context is a global context.
606 if (FLAG_debug_code) {
607 cmp(holder_reg, Factory::null_value());
608 Check(not_equal, "JSGlobalProxy::context() should not be null.");
609
610 push(holder_reg);
611 // Read the first word and compare to global_context_map(),
612 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
613 cmp(holder_reg, Factory::global_context_map());
614 Check(equal, "JSGlobalObject::global_context should be a global context.");
615 pop(holder_reg);
616 }
617
618 int token_offset = Context::kHeaderSize +
619 Context::SECURITY_TOKEN_INDEX * kPointerSize;
620 mov(scratch, FieldOperand(scratch, token_offset));
621 cmp(scratch, FieldOperand(holder_reg, token_offset));
622 pop(holder_reg);
623 j(not_equal, miss, not_taken);
624
625 bind(&same_contexts);
626}
627
628
629void MacroAssembler::LoadAllocationTopHelper(Register result,
630 Register result_end,
631 Register scratch,
632 AllocationFlags flags) {
633 ExternalReference new_space_allocation_top =
634 ExternalReference::new_space_allocation_top_address();
635
636 // Just return if allocation top is already known.
637 if ((flags & RESULT_CONTAINS_TOP) != 0) {
638 // No use of scratch if allocation top is provided.
639 ASSERT(scratch.is(no_reg));
640#ifdef DEBUG
641 // Assert that result actually contains top on entry.
642 cmp(result, Operand::StaticVariable(new_space_allocation_top));
643 Check(equal, "Unexpected allocation top");
644#endif
645 return;
646 }
647
648 // Move address of new object to result. Use scratch register if available.
649 if (scratch.is(no_reg)) {
650 mov(result, Operand::StaticVariable(new_space_allocation_top));
651 } else {
652 ASSERT(!scratch.is(result_end));
653 mov(Operand(scratch), Immediate(new_space_allocation_top));
654 mov(result, Operand(scratch, 0));
655 }
656}
657
658
659void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
660 Register scratch) {
661 ExternalReference new_space_allocation_top =
662 ExternalReference::new_space_allocation_top_address();
663
664 // Update new top. Use scratch if available.
665 if (scratch.is(no_reg)) {
666 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
667 } else {
668 mov(Operand(scratch, 0), result_end);
669 }
670}
671
672
673void MacroAssembler::AllocateInNewSpace(int object_size,
674 Register result,
675 Register result_end,
676 Register scratch,
677 Label* gc_required,
678 AllocationFlags flags) {
679 ASSERT(!result.is(result_end));
680
681 // Load address of new object into result.
682 LoadAllocationTopHelper(result, result_end, scratch, flags);
683
684 // Calculate new top and bail out if new space is exhausted.
685 ExternalReference new_space_allocation_limit =
686 ExternalReference::new_space_allocation_limit_address();
687 lea(result_end, Operand(result, object_size));
688 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
689 j(above, gc_required, not_taken);
690
691 // Update allocation top.
692 UpdateAllocationTopHelper(result_end, scratch);
693
694 // Tag result if requested.
695 if ((flags & TAG_OBJECT) != 0) {
696 or_(Operand(result), Immediate(kHeapObjectTag));
697 }
698}
699
700
701void MacroAssembler::AllocateInNewSpace(int header_size,
702 ScaleFactor element_size,
703 Register element_count,
704 Register result,
705 Register result_end,
706 Register scratch,
707 Label* gc_required,
708 AllocationFlags flags) {
709 ASSERT(!result.is(result_end));
710
711 // Load address of new object into result.
712 LoadAllocationTopHelper(result, result_end, scratch, flags);
713
714 // Calculate new top and bail out if new space is exhausted.
715 ExternalReference new_space_allocation_limit =
716 ExternalReference::new_space_allocation_limit_address();
717 lea(result_end, Operand(result, element_count, element_size, header_size));
718 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
719 j(above, gc_required);
720
721 // Update allocation top.
722 UpdateAllocationTopHelper(result_end, scratch);
723
724 // Tag result if requested.
725 if ((flags & TAG_OBJECT) != 0) {
726 or_(Operand(result), Immediate(kHeapObjectTag));
727 }
728}
729
730
731void MacroAssembler::AllocateInNewSpace(Register object_size,
732 Register result,
733 Register result_end,
734 Register scratch,
735 Label* gc_required,
736 AllocationFlags flags) {
737 ASSERT(!result.is(result_end));
738
739 // Load address of new object into result.
740 LoadAllocationTopHelper(result, result_end, scratch, flags);
741
742 // Calculate new top and bail out if new space is exhausted.
743 ExternalReference new_space_allocation_limit =
744 ExternalReference::new_space_allocation_limit_address();
745 if (!object_size.is(result_end)) {
746 mov(result_end, object_size);
747 }
748 add(result_end, Operand(result));
749 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
750 j(above, gc_required, not_taken);
751
752 // Update allocation top.
753 UpdateAllocationTopHelper(result_end, scratch);
754
755 // Tag result if requested.
756 if ((flags & TAG_OBJECT) != 0) {
757 or_(Operand(result), Immediate(kHeapObjectTag));
758 }
759}
760
761
762void MacroAssembler::UndoAllocationInNewSpace(Register object) {
763 ExternalReference new_space_allocation_top =
764 ExternalReference::new_space_allocation_top_address();
765
766 // Make sure the object has no tag before resetting top.
767 and_(Operand(object), Immediate(~kHeapObjectTagMask));
768#ifdef DEBUG
769 cmp(object, Operand::StaticVariable(new_space_allocation_top));
770 Check(below, "Undo allocation of non allocated memory");
771#endif
772 mov(Operand::StaticVariable(new_space_allocation_top), object);
773}
774
775
Steve Block3ce2e202009-11-05 08:53:23 +0000776void MacroAssembler::AllocateHeapNumber(Register result,
777 Register scratch1,
778 Register scratch2,
779 Label* gc_required) {
780 // Allocate heap number in new space.
781 AllocateInNewSpace(HeapNumber::kSize,
782 result,
783 scratch1,
784 scratch2,
785 gc_required,
786 TAG_OBJECT);
787
788 // Set the map.
789 mov(FieldOperand(result, HeapObject::kMapOffset),
790 Immediate(Factory::heap_number_map()));
791}
792
793
Steve Blocka7e24c12009-10-30 11:49:00 +0000794void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
795 Register result,
796 Register op,
797 JumpTarget* then_target) {
798 JumpTarget ok;
799 test(result, Operand(result));
800 ok.Branch(not_zero, taken);
801 test(op, Operand(op));
802 then_target->Branch(sign, not_taken);
803 ok.Bind();
804}
805
806
807void MacroAssembler::NegativeZeroTest(Register result,
808 Register op,
809 Label* then_label) {
810 Label ok;
811 test(result, Operand(result));
812 j(not_zero, &ok, taken);
813 test(op, Operand(op));
814 j(sign, then_label, not_taken);
815 bind(&ok);
816}
817
818
819void MacroAssembler::NegativeZeroTest(Register result,
820 Register op1,
821 Register op2,
822 Register scratch,
823 Label* then_label) {
824 Label ok;
825 test(result, Operand(result));
826 j(not_zero, &ok, taken);
827 mov(scratch, Operand(op1));
828 or_(scratch, Operand(op2));
829 j(sign, then_label, not_taken);
830 bind(&ok);
831}
832
833
834void MacroAssembler::TryGetFunctionPrototype(Register function,
835 Register result,
836 Register scratch,
837 Label* miss) {
838 // Check that the receiver isn't a smi.
839 test(function, Immediate(kSmiTagMask));
840 j(zero, miss, not_taken);
841
842 // Check that the function really is a function.
843 CmpObjectType(function, JS_FUNCTION_TYPE, result);
844 j(not_equal, miss, not_taken);
845
846 // Make sure that the function has an instance prototype.
847 Label non_instance;
848 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
849 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
850 j(not_zero, &non_instance, not_taken);
851
852 // Get the prototype or initial map from the function.
853 mov(result,
854 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
855
856 // If the prototype or initial map is the hole, don't return it and
857 // simply miss the cache instead. This will allow us to allocate a
858 // prototype object on-demand in the runtime system.
859 cmp(Operand(result), Immediate(Factory::the_hole_value()));
860 j(equal, miss, not_taken);
861
862 // If the function does not have an initial map, we're done.
863 Label done;
864 CmpObjectType(result, MAP_TYPE, scratch);
865 j(not_equal, &done);
866
867 // Get the prototype from the initial map.
868 mov(result, FieldOperand(result, Map::kPrototypeOffset));
869 jmp(&done);
870
871 // Non-instance prototype: Fetch prototype from constructor field
872 // in initial map.
873 bind(&non_instance);
874 mov(result, FieldOperand(result, Map::kConstructorOffset));
875
876 // All done.
877 bind(&done);
878}
879
880
881void MacroAssembler::CallStub(CodeStub* stub) {
882 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
883 call(stub->GetCode(), RelocInfo::CODE_TARGET);
884}
885
886
887void MacroAssembler::StubReturn(int argc) {
888 ASSERT(argc >= 1 && generating_stub());
889 ret((argc - 1) * kPointerSize);
890}
891
892
893void MacroAssembler::IllegalOperation(int num_arguments) {
894 if (num_arguments > 0) {
895 add(Operand(esp), Immediate(num_arguments * kPointerSize));
896 }
897 mov(eax, Immediate(Factory::undefined_value()));
898}
899
900
901void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
902 CallRuntime(Runtime::FunctionForId(id), num_arguments);
903}
904
905
906void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
907 // If the expected number of arguments of the runtime function is
908 // constant, we check that the actual number of arguments match the
909 // expectation.
910 if (f->nargs >= 0 && f->nargs != num_arguments) {
911 IllegalOperation(num_arguments);
912 return;
913 }
914
915 Runtime::FunctionId function_id =
916 static_cast<Runtime::FunctionId>(f->stub_id);
917 RuntimeStub stub(function_id, num_arguments);
918 CallStub(&stub);
919}
920
921
922void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
923 int num_arguments,
924 int result_size) {
925 // TODO(1236192): Most runtime routines don't need the number of
926 // arguments passed in because it is constant. At some point we
927 // should remove this need and make the runtime routine entry code
928 // smarter.
929 Set(eax, Immediate(num_arguments));
930 JumpToRuntime(ext);
931}
932
933
934void MacroAssembler::JumpToRuntime(const ExternalReference& ext) {
935 // Set the entry point and jump to the C entry runtime stub.
936 mov(ebx, Immediate(ext));
937 CEntryStub ces(1);
938 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
939}
940
941
942void MacroAssembler::InvokePrologue(const ParameterCount& expected,
943 const ParameterCount& actual,
944 Handle<Code> code_constant,
945 const Operand& code_operand,
946 Label* done,
947 InvokeFlag flag) {
948 bool definitely_matches = false;
949 Label invoke;
950 if (expected.is_immediate()) {
951 ASSERT(actual.is_immediate());
952 if (expected.immediate() == actual.immediate()) {
953 definitely_matches = true;
954 } else {
955 mov(eax, actual.immediate());
956 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
957 if (expected.immediate() == sentinel) {
958 // Don't worry about adapting arguments for builtins that
959 // don't want that done. Skip adaption code by making it look
960 // like we have a match between expected and actual number of
961 // arguments.
962 definitely_matches = true;
963 } else {
964 mov(ebx, expected.immediate());
965 }
966 }
967 } else {
968 if (actual.is_immediate()) {
969 // Expected is in register, actual is immediate. This is the
970 // case when we invoke function values without going through the
971 // IC mechanism.
972 cmp(expected.reg(), actual.immediate());
973 j(equal, &invoke);
974 ASSERT(expected.reg().is(ebx));
975 mov(eax, actual.immediate());
976 } else if (!expected.reg().is(actual.reg())) {
977 // Both expected and actual are in (different) registers. This
978 // is the case when we invoke functions using call and apply.
979 cmp(expected.reg(), Operand(actual.reg()));
980 j(equal, &invoke);
981 ASSERT(actual.reg().is(eax));
982 ASSERT(expected.reg().is(ebx));
983 }
984 }
985
986 if (!definitely_matches) {
987 Handle<Code> adaptor =
988 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
989 if (!code_constant.is_null()) {
990 mov(edx, Immediate(code_constant));
991 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
992 } else if (!code_operand.is_reg(edx)) {
993 mov(edx, code_operand);
994 }
995
996 if (flag == CALL_FUNCTION) {
997 call(adaptor, RelocInfo::CODE_TARGET);
998 jmp(done);
999 } else {
1000 jmp(adaptor, RelocInfo::CODE_TARGET);
1001 }
1002 bind(&invoke);
1003 }
1004}
1005
1006
1007void MacroAssembler::InvokeCode(const Operand& code,
1008 const ParameterCount& expected,
1009 const ParameterCount& actual,
1010 InvokeFlag flag) {
1011 Label done;
1012 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1013 if (flag == CALL_FUNCTION) {
1014 call(code);
1015 } else {
1016 ASSERT(flag == JUMP_FUNCTION);
1017 jmp(code);
1018 }
1019 bind(&done);
1020}
1021
1022
1023void MacroAssembler::InvokeCode(Handle<Code> code,
1024 const ParameterCount& expected,
1025 const ParameterCount& actual,
1026 RelocInfo::Mode rmode,
1027 InvokeFlag flag) {
1028 Label done;
1029 Operand dummy(eax);
1030 InvokePrologue(expected, actual, code, dummy, &done, flag);
1031 if (flag == CALL_FUNCTION) {
1032 call(code, rmode);
1033 } else {
1034 ASSERT(flag == JUMP_FUNCTION);
1035 jmp(code, rmode);
1036 }
1037 bind(&done);
1038}
1039
1040
1041void MacroAssembler::InvokeFunction(Register fun,
1042 const ParameterCount& actual,
1043 InvokeFlag flag) {
1044 ASSERT(fun.is(edi));
1045 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1046 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1047 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1048 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1049 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1050
1051 ParameterCount expected(ebx);
1052 InvokeCode(Operand(edx), expected, actual, flag);
1053}
1054
1055
1056void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1057 bool resolved;
1058 Handle<Code> code = ResolveBuiltin(id, &resolved);
1059
1060 // Calls are not allowed in some stubs.
1061 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1062
1063 // Rely on the assertion to check that the number of provided
1064 // arguments match the expected number of arguments. Fake a
1065 // parameter count to avoid emitting code to do the check.
1066 ParameterCount expected(0);
1067 InvokeCode(Handle<Code>(code), expected, expected,
1068 RelocInfo::CODE_TARGET, flag);
1069
1070 const char* name = Builtins::GetName(id);
1071 int argc = Builtins::GetArgumentsCount(id);
1072
1073 if (!resolved) {
1074 uint32_t flags =
1075 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001076 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
1077 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1078 unresolved_.Add(entry);
1079 }
1080}
1081
1082
1083void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1084 bool resolved;
1085 Handle<Code> code = ResolveBuiltin(id, &resolved);
1086
1087 const char* name = Builtins::GetName(id);
1088 int argc = Builtins::GetArgumentsCount(id);
1089
1090 mov(Operand(target), Immediate(code));
1091 if (!resolved) {
1092 uint32_t flags =
1093 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001094 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
1095 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1096 unresolved_.Add(entry);
1097 }
1098 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
1099}
1100
1101
1102Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
1103 bool* resolved) {
1104 // Move the builtin function into the temporary function slot by
1105 // reading it from the builtins object. NOTE: We should be able to
1106 // reduce this to two instructions by putting the function table in
1107 // the global object instead of the "builtins" object and by using a
1108 // real register for the function.
1109 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1110 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
1111 int builtins_offset =
1112 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
1113 mov(edi, FieldOperand(edx, builtins_offset));
1114
1115
1116 return Builtins::GetCode(id, resolved);
1117}
1118
1119
1120void MacroAssembler::Ret() {
1121 ret(0);
1122}
1123
1124
1125void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1126 if (FLAG_native_code_counters && counter->Enabled()) {
1127 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1128 }
1129}
1130
1131
1132void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1133 ASSERT(value > 0);
1134 if (FLAG_native_code_counters && counter->Enabled()) {
1135 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1136 if (value == 1) {
1137 inc(operand);
1138 } else {
1139 add(operand, Immediate(value));
1140 }
1141 }
1142}
1143
1144
1145void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1146 ASSERT(value > 0);
1147 if (FLAG_native_code_counters && counter->Enabled()) {
1148 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1149 if (value == 1) {
1150 dec(operand);
1151 } else {
1152 sub(operand, Immediate(value));
1153 }
1154 }
1155}
1156
1157
1158void MacroAssembler::Assert(Condition cc, const char* msg) {
1159 if (FLAG_debug_code) Check(cc, msg);
1160}
1161
1162
1163void MacroAssembler::Check(Condition cc, const char* msg) {
1164 Label L;
1165 j(cc, &L, taken);
1166 Abort(msg);
1167 // will not return here
1168 bind(&L);
1169}
1170
1171
1172void MacroAssembler::Abort(const char* msg) {
1173 // We want to pass the msg string like a smi to avoid GC
1174 // problems, however msg is not guaranteed to be aligned
1175 // properly. Instead, we pass an aligned pointer that is
1176 // a proper v8 smi, but also pass the alignment difference
1177 // from the real pointer as a smi.
1178 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1179 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1180 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1181#ifdef DEBUG
1182 if (msg != NULL) {
1183 RecordComment("Abort message: ");
1184 RecordComment(msg);
1185 }
1186#endif
1187 push(eax);
1188 push(Immediate(p0));
1189 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1190 CallRuntime(Runtime::kAbort, 2);
1191 // will not return here
1192}
1193
1194
1195CodePatcher::CodePatcher(byte* address, int size)
1196 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1197 // Create a new macro assembler pointing to the address of the code to patch.
1198 // The size is adjusted with kGap on order for the assembler to generate size
1199 // bytes of instructions without failing with buffer size constraints.
1200 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1201}
1202
1203
1204CodePatcher::~CodePatcher() {
1205 // Indicate that code has changed.
1206 CPU::FlushICache(address_, size_);
1207
1208 // Check that the code was patched as expected.
1209 ASSERT(masm_.pc_ == address_ + size_);
1210 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1211}
1212
1213
1214} } // namespace v8::internal