blob: b91caa8cccb3a6a3df4720ce7350c27f19a76dd7 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 {
37namespace internal {
38
39// -------------------------------------------------------------------------
40// MacroAssembler implementation.
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
44 unresolved_(0),
45 generating_stub_(false),
46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) {
48}
49
50
51static void RecordWriteHelper(MacroAssembler* masm,
52 Register object,
53 Register addr,
54 Register scratch) {
55 Label fast;
56
57 // Compute the page start address from the heap object pointer, and reuse
58 // the 'object' register for it.
59 masm->and_(object, ~Page::kPageAlignmentMask);
60 Register page_start = object;
61
62 // Compute the bit addr in the remembered set/index of the pointer in the
63 // page. Reuse 'addr' as pointer_offset.
64 masm->sub(addr, Operand(page_start));
65 masm->shr(addr, kObjectAlignmentBits);
66 Register pointer_offset = addr;
67
68 // If the bit offset lies beyond the normal remembered set range, it is in
69 // the extra remembered set area of a large object.
70 masm->cmp(pointer_offset, Page::kPageSize / kPointerSize);
71 masm->j(less, &fast);
72
73 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
74 // extra remembered set after the large object.
75
76 // Find the length of the large object (FixedArray).
77 masm->mov(scratch, Operand(page_start, Page::kObjectStartOffset
78 + FixedArray::kLengthOffset));
79 Register array_length = scratch;
80
81 // Extra remembered set starts right after the large object (a FixedArray), at
82 // page_start + kObjectStartOffset + objectSize
83 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
84 // Add the delta between the end of the normal RSet and the start of the
85 // extra RSet to 'page_start', so that addressing the bit using
86 // 'pointer_offset' hits the extra RSet words.
87 masm->lea(page_start,
88 Operand(page_start, array_length, times_pointer_size,
89 Page::kObjectStartOffset + FixedArray::kHeaderSize
90 - Page::kRSetEndOffset));
91
92 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
93 // to limit code size. We should probably evaluate this decision by
94 // measuring the performance of an equivalent implementation using
95 // "simpler" instructions
96 masm->bind(&fast);
97 masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
98}
99
100
101class RecordWriteStub : public CodeStub {
102 public:
103 RecordWriteStub(Register object, Register addr, Register scratch)
104 : object_(object), addr_(addr), scratch_(scratch) { }
105
106 void Generate(MacroAssembler* masm);
107
108 private:
109 Register object_;
110 Register addr_;
111 Register scratch_;
112
113#ifdef DEBUG
114 void Print() {
115 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
116 object_.code(), addr_.code(), scratch_.code());
117 }
118#endif
119
120 // Minor key encoding in 12 bits of three registers (object, address and
121 // scratch) OOOOAAAASSSS.
122 class ScratchBits: public BitField<uint32_t, 0, 4> {};
123 class AddressBits: public BitField<uint32_t, 4, 4> {};
124 class ObjectBits: public BitField<uint32_t, 8, 4> {};
125
126 Major MajorKey() { return RecordWrite; }
127
128 int MinorKey() {
129 // Encode the registers.
130 return ObjectBits::encode(object_.code()) |
131 AddressBits::encode(addr_.code()) |
132 ScratchBits::encode(scratch_.code());
133 }
134};
135
136
137void RecordWriteStub::Generate(MacroAssembler* masm) {
138 RecordWriteHelper(masm, object_, addr_, scratch_);
139 masm->ret(0);
140}
141
142
143// Set the remembered set bit for [object+offset].
144// object is the object being stored into, value is the object being stored.
145// If offset is zero, then the scratch register contains the array index into
146// the elements array represented as a Smi.
147// All registers are clobbered by the operation.
148void MacroAssembler::RecordWrite(Register object, int offset,
149 Register value, Register scratch) {
150 // First, check if a remembered set write is even needed. The tests below
151 // catch stores of Smis and stores into young gen (which does not have space
152 // for the remembered set bits.
153 Label done;
154
155 // Skip barrier if writing a smi.
156 ASSERT_EQ(0, kSmiTag);
157 test(value, Immediate(kSmiTagMask));
158 j(zero, &done);
159
160 if (Serializer::enabled()) {
161 // Can't do arithmetic on external references if it might get serialized.
162 mov(value, Operand(object));
163 and_(value, Heap::NewSpaceMask());
164 cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
165 j(equal, &done);
166 } else {
167 int32_t new_space_start = reinterpret_cast<int32_t>(
168 ExternalReference::new_space_start().address());
169 lea(value, Operand(object, -new_space_start));
170 and_(value, Heap::NewSpaceMask());
171 j(equal, &done);
172 }
173
174 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
175 // Compute the bit offset in the remembered set, leave it in 'value'.
176 lea(value, Operand(object, offset));
177 and_(value, Page::kPageAlignmentMask);
178 shr(value, kPointerSizeLog2);
179
180 // Compute the page address from the heap object pointer, leave it in
181 // 'object'.
182 and_(object, ~Page::kPageAlignmentMask);
183
184 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
185 // to limit code size. We should probably evaluate this decision by
186 // measuring the performance of an equivalent implementation using
187 // "simpler" instructions
188 bts(Operand(object, Page::kRSetOffset), value);
189 } else {
190 Register dst = scratch;
191 if (offset != 0) {
192 lea(dst, Operand(object, offset));
193 } else {
194 // array access: calculate the destination address in the same manner as
195 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
196 // into an array of words.
197 ASSERT_EQ(1, kSmiTagSize);
198 ASSERT_EQ(0, kSmiTag);
199 lea(dst, Operand(object, dst, times_half_pointer_size,
200 FixedArray::kHeaderSize - kHeapObjectTag));
201 }
202 // If we are already generating a shared stub, not inlining the
203 // record write code isn't going to save us any memory.
204 if (generating_stub()) {
205 RecordWriteHelper(this, object, dst, value);
206 } else {
207 RecordWriteStub stub(object, dst, value);
208 CallStub(&stub);
209 }
210 }
211
212 bind(&done);
213}
214
215
Steve Blockd0582a62009-12-15 09:54:21 +0000216void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
217 cmp(esp,
218 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
219 j(below, on_stack_overflow);
220}
221
222
Steve Blocka7e24c12009-10-30 11:49:00 +0000223#ifdef ENABLE_DEBUGGER_SUPPORT
224void MacroAssembler::SaveRegistersToMemory(RegList regs) {
225 ASSERT((regs & ~kJSCallerSaved) == 0);
226 // Copy the content of registers to memory location.
227 for (int i = 0; i < kNumJSCallerSaved; i++) {
228 int r = JSCallerSavedCode(i);
229 if ((regs & (1 << r)) != 0) {
230 Register reg = { r };
231 ExternalReference reg_addr =
232 ExternalReference(Debug_Address::Register(i));
233 mov(Operand::StaticVariable(reg_addr), reg);
234 }
235 }
236}
237
238
239void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
240 ASSERT((regs & ~kJSCallerSaved) == 0);
241 // Copy the content of memory location to registers.
242 for (int i = kNumJSCallerSaved; --i >= 0;) {
243 int r = JSCallerSavedCode(i);
244 if ((regs & (1 << r)) != 0) {
245 Register reg = { r };
246 ExternalReference reg_addr =
247 ExternalReference(Debug_Address::Register(i));
248 mov(reg, Operand::StaticVariable(reg_addr));
249 }
250 }
251}
252
253
254void MacroAssembler::PushRegistersFromMemory(RegList regs) {
255 ASSERT((regs & ~kJSCallerSaved) == 0);
256 // Push the content of the memory location to the stack.
257 for (int i = 0; i < kNumJSCallerSaved; i++) {
258 int r = JSCallerSavedCode(i);
259 if ((regs & (1 << r)) != 0) {
260 ExternalReference reg_addr =
261 ExternalReference(Debug_Address::Register(i));
262 push(Operand::StaticVariable(reg_addr));
263 }
264 }
265}
266
267
268void MacroAssembler::PopRegistersToMemory(RegList regs) {
269 ASSERT((regs & ~kJSCallerSaved) == 0);
270 // Pop the content from the stack to the memory location.
271 for (int i = kNumJSCallerSaved; --i >= 0;) {
272 int r = JSCallerSavedCode(i);
273 if ((regs & (1 << r)) != 0) {
274 ExternalReference reg_addr =
275 ExternalReference(Debug_Address::Register(i));
276 pop(Operand::StaticVariable(reg_addr));
277 }
278 }
279}
280
281
282void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
283 Register scratch,
284 RegList regs) {
285 ASSERT((regs & ~kJSCallerSaved) == 0);
286 // Copy the content of the stack to the memory location and adjust base.
287 for (int i = kNumJSCallerSaved; --i >= 0;) {
288 int r = JSCallerSavedCode(i);
289 if ((regs & (1 << r)) != 0) {
290 mov(scratch, Operand(base, 0));
291 ExternalReference reg_addr =
292 ExternalReference(Debug_Address::Register(i));
293 mov(Operand::StaticVariable(reg_addr), scratch);
294 lea(base, Operand(base, kPointerSize));
295 }
296 }
297}
298#endif
299
300void MacroAssembler::Set(Register dst, const Immediate& x) {
301 if (x.is_zero()) {
302 xor_(dst, Operand(dst)); // shorter than mov
303 } else {
304 mov(dst, x);
305 }
306}
307
308
309void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
310 mov(dst, x);
311}
312
313
314void MacroAssembler::CmpObjectType(Register heap_object,
315 InstanceType type,
316 Register map) {
317 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
318 CmpInstanceType(map, type);
319}
320
321
322void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
323 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
324 static_cast<int8_t>(type));
325}
326
327
328void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000329 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000330 fucomip();
331 ffree(0);
332 fincstp();
333 } else {
334 fucompp();
335 push(eax);
336 fnstsw_ax();
337 sahf();
338 pop(eax);
339 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000340}
341
342
343void MacroAssembler::EnterFrame(StackFrame::Type type) {
344 push(ebp);
345 mov(ebp, Operand(esp));
346 push(esi);
347 push(Immediate(Smi::FromInt(type)));
348 push(Immediate(CodeObject()));
349 if (FLAG_debug_code) {
350 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
351 Check(not_equal, "code object not properly patched");
352 }
353}
354
355
356void MacroAssembler::LeaveFrame(StackFrame::Type type) {
357 if (FLAG_debug_code) {
358 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
359 Immediate(Smi::FromInt(type)));
360 Check(equal, "stack frame types must match");
361 }
362 leave();
363}
364
Steve Blockd0582a62009-12-15 09:54:21 +0000365void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000366 // Setup the frame structure on the stack.
367 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
368 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
369 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
370 push(ebp);
371 mov(ebp, Operand(esp));
372
373 // Reserve room for entry stack pointer and push the debug marker.
374 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
375 push(Immediate(0)); // saved entry sp, patched before call
Steve Blockd0582a62009-12-15 09:54:21 +0000376 if (mode == ExitFrame::MODE_DEBUG) {
377 push(Immediate(0));
378 } else {
379 push(Immediate(CodeObject()));
380 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000381
382 // Save the frame pointer and the context in top.
383 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
384 ExternalReference context_address(Top::k_context_address);
385 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
386 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000387}
Steve Blocka7e24c12009-10-30 11:49:00 +0000388
Steve Blockd0582a62009-12-15 09:54:21 +0000389void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000390#ifdef ENABLE_DEBUGGER_SUPPORT
391 // Save the state of all registers to the stack from the memory
392 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000393 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000394 // TODO(1243899): This should be symmetric to
395 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
396 // correct here, but computed for the other call. Very error
397 // prone! FIX THIS. Actually there are deeper problems with
398 // register saving than this asymmetry (see the bug report
399 // associated with this issue).
400 PushRegistersFromMemory(kJSCallerSaved);
401 }
402#endif
403
Steve Blockd0582a62009-12-15 09:54:21 +0000404 // Reserve space for arguments.
405 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000406
407 // Get the required frame alignment for the OS.
408 static const int kFrameAlignment = OS::ActivationFrameAlignment();
409 if (kFrameAlignment > 0) {
410 ASSERT(IsPowerOf2(kFrameAlignment));
411 and_(esp, -kFrameAlignment);
412 }
413
414 // Patch the saved entry sp.
415 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
416}
417
418
Steve Blockd0582a62009-12-15 09:54:21 +0000419void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
420 EnterExitFramePrologue(mode);
421
422 // Setup argc and argv in callee-saved registers.
423 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
424 mov(edi, Operand(eax));
425 lea(esi, Operand(ebp, eax, times_4, offset));
426
427 EnterExitFrameEpilogue(mode, 2);
428}
429
430
431void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
432 int stack_space,
433 int argc) {
434 EnterExitFramePrologue(mode);
435
436 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
437 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
438
439 EnterExitFrameEpilogue(mode, argc);
440}
441
442
443void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000444#ifdef ENABLE_DEBUGGER_SUPPORT
445 // Restore the memory copy of the registers by digging them out from
446 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000447 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000448 // It's okay to clobber register ebx below because we don't need
449 // the function pointer after this.
450 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000451 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000452 lea(ebx, Operand(ebp, kOffset));
453 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
454 }
455#endif
456
457 // Get the return address from the stack and restore the frame pointer.
458 mov(ecx, Operand(ebp, 1 * kPointerSize));
459 mov(ebp, Operand(ebp, 0 * kPointerSize));
460
461 // Pop the arguments and the receiver from the caller stack.
462 lea(esp, Operand(esi, 1 * kPointerSize));
463
464 // Restore current context from top and clear it in debug mode.
465 ExternalReference context_address(Top::k_context_address);
466 mov(esi, Operand::StaticVariable(context_address));
467#ifdef DEBUG
468 mov(Operand::StaticVariable(context_address), Immediate(0));
469#endif
470
471 // Push the return address to get ready to return.
472 push(ecx);
473
474 // Clear the top frame.
475 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
476 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
477}
478
479
480void MacroAssembler::PushTryHandler(CodeLocation try_location,
481 HandlerType type) {
482 // Adjust this code if not the case.
483 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
484 // The pc (return address) is already on TOS.
485 if (try_location == IN_JAVASCRIPT) {
486 if (type == TRY_CATCH_HANDLER) {
487 push(Immediate(StackHandler::TRY_CATCH));
488 } else {
489 push(Immediate(StackHandler::TRY_FINALLY));
490 }
491 push(ebp);
492 } else {
493 ASSERT(try_location == IN_JS_ENTRY);
494 // The frame pointer does not point to a JS frame so we save NULL
495 // for ebp. We expect the code throwing an exception to check ebp
496 // before dereferencing it to restore the context.
497 push(Immediate(StackHandler::ENTRY));
498 push(Immediate(0)); // NULL frame pointer.
499 }
500 // Save the current handler as the next handler.
501 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
502 // Link this handler as the new current one.
503 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
504}
505
506
507Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
508 JSObject* holder, Register holder_reg,
509 Register scratch,
510 Label* miss) {
511 // Make sure there's no overlap between scratch and the other
512 // registers.
513 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
514
515 // Keep track of the current object in register reg.
516 Register reg = object_reg;
517 int depth = 1;
518
519 // Check the maps in the prototype chain.
520 // Traverse the prototype chain from the object and do map checks.
521 while (object != holder) {
522 depth++;
523
524 // Only global objects and objects that do not require access
525 // checks are allowed in stubs.
526 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
527
528 JSObject* prototype = JSObject::cast(object->GetPrototype());
529 if (Heap::InNewSpace(prototype)) {
530 // Get the map of the current object.
531 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
532 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
533 // Branch on the result of the map check.
534 j(not_equal, miss, not_taken);
535 // Check access rights to the global object. This has to happen
536 // after the map check so that we know that the object is
537 // actually a global object.
538 if (object->IsJSGlobalProxy()) {
539 CheckAccessGlobalProxy(reg, scratch, miss);
540
541 // Restore scratch register to be the map of the object.
542 // We load the prototype from the map in the scratch register.
543 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
544 }
545 // The prototype is in new space; we cannot store a reference
546 // to it in the code. Load it from the map.
547 reg = holder_reg; // from now the object is in holder_reg
548 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
549
550 } else {
551 // Check the map of the current object.
552 cmp(FieldOperand(reg, HeapObject::kMapOffset),
553 Immediate(Handle<Map>(object->map())));
554 // Branch on the result of the map check.
555 j(not_equal, miss, not_taken);
556 // Check access rights to the global object. This has to happen
557 // after the map check so that we know that the object is
558 // actually a global object.
559 if (object->IsJSGlobalProxy()) {
560 CheckAccessGlobalProxy(reg, scratch, miss);
561 }
562 // The prototype is in old space; load it directly.
563 reg = holder_reg; // from now the object is in holder_reg
564 mov(reg, Handle<JSObject>(prototype));
565 }
566
567 // Go to the next object in the prototype chain.
568 object = prototype;
569 }
570
571 // Check the holder map.
572 cmp(FieldOperand(reg, HeapObject::kMapOffset),
573 Immediate(Handle<Map>(holder->map())));
574 j(not_equal, miss, not_taken);
575
576 // Log the check depth.
577 LOG(IntEvent("check-maps-depth", depth));
578
579 // Perform security check for access to the global object and return
580 // the holder register.
581 ASSERT(object == holder);
582 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
583 if (object->IsJSGlobalProxy()) {
584 CheckAccessGlobalProxy(reg, scratch, miss);
585 }
586 return reg;
587}
588
589
590void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
591 Register scratch,
592 Label* miss) {
593 Label same_contexts;
594
595 ASSERT(!holder_reg.is(scratch));
596
597 // Load current lexical context from the stack frame.
598 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
599
600 // When generating debug code, make sure the lexical context is set.
601 if (FLAG_debug_code) {
602 cmp(Operand(scratch), Immediate(0));
603 Check(not_equal, "we should not have an empty lexical context");
604 }
605 // Load the global context of the current context.
606 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
607 mov(scratch, FieldOperand(scratch, offset));
608 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
609
610 // Check the context is a global context.
611 if (FLAG_debug_code) {
612 push(scratch);
613 // Read the first word and compare to global_context_map.
614 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
615 cmp(scratch, Factory::global_context_map());
616 Check(equal, "JSGlobalObject::global_context should be a global context.");
617 pop(scratch);
618 }
619
620 // Check if both contexts are the same.
621 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
622 j(equal, &same_contexts, taken);
623
624 // Compare security tokens, save holder_reg on the stack so we can use it
625 // as a temporary register.
626 //
627 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
628 push(holder_reg);
629 // Check that the security token in the calling global object is
630 // compatible with the security token in the receiving global
631 // object.
632 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
633
634 // Check the context is a global context.
635 if (FLAG_debug_code) {
636 cmp(holder_reg, Factory::null_value());
637 Check(not_equal, "JSGlobalProxy::context() should not be null.");
638
639 push(holder_reg);
640 // Read the first word and compare to global_context_map(),
641 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
642 cmp(holder_reg, Factory::global_context_map());
643 Check(equal, "JSGlobalObject::global_context should be a global context.");
644 pop(holder_reg);
645 }
646
647 int token_offset = Context::kHeaderSize +
648 Context::SECURITY_TOKEN_INDEX * kPointerSize;
649 mov(scratch, FieldOperand(scratch, token_offset));
650 cmp(scratch, FieldOperand(holder_reg, token_offset));
651 pop(holder_reg);
652 j(not_equal, miss, not_taken);
653
654 bind(&same_contexts);
655}
656
657
658void MacroAssembler::LoadAllocationTopHelper(Register result,
659 Register result_end,
660 Register scratch,
661 AllocationFlags flags) {
662 ExternalReference new_space_allocation_top =
663 ExternalReference::new_space_allocation_top_address();
664
665 // Just return if allocation top is already known.
666 if ((flags & RESULT_CONTAINS_TOP) != 0) {
667 // No use of scratch if allocation top is provided.
668 ASSERT(scratch.is(no_reg));
669#ifdef DEBUG
670 // Assert that result actually contains top on entry.
671 cmp(result, Operand::StaticVariable(new_space_allocation_top));
672 Check(equal, "Unexpected allocation top");
673#endif
674 return;
675 }
676
677 // Move address of new object to result. Use scratch register if available.
678 if (scratch.is(no_reg)) {
679 mov(result, Operand::StaticVariable(new_space_allocation_top));
680 } else {
681 ASSERT(!scratch.is(result_end));
682 mov(Operand(scratch), Immediate(new_space_allocation_top));
683 mov(result, Operand(scratch, 0));
684 }
685}
686
687
688void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
689 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000690 if (FLAG_debug_code) {
691 test(result_end, Immediate(kObjectAlignmentMask));
692 Check(zero, "Unaligned allocation in new space");
693 }
694
Steve Blocka7e24c12009-10-30 11:49:00 +0000695 ExternalReference new_space_allocation_top =
696 ExternalReference::new_space_allocation_top_address();
697
698 // Update new top. Use scratch if available.
699 if (scratch.is(no_reg)) {
700 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
701 } else {
702 mov(Operand(scratch, 0), result_end);
703 }
704}
705
706
707void MacroAssembler::AllocateInNewSpace(int object_size,
708 Register result,
709 Register result_end,
710 Register scratch,
711 Label* gc_required,
712 AllocationFlags flags) {
713 ASSERT(!result.is(result_end));
714
715 // Load address of new object into result.
716 LoadAllocationTopHelper(result, result_end, scratch, flags);
717
718 // Calculate new top and bail out if new space is exhausted.
719 ExternalReference new_space_allocation_limit =
720 ExternalReference::new_space_allocation_limit_address();
721 lea(result_end, Operand(result, object_size));
722 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
723 j(above, gc_required, not_taken);
724
725 // Update allocation top.
726 UpdateAllocationTopHelper(result_end, scratch);
727
728 // Tag result if requested.
729 if ((flags & TAG_OBJECT) != 0) {
730 or_(Operand(result), Immediate(kHeapObjectTag));
731 }
732}
733
734
735void MacroAssembler::AllocateInNewSpace(int header_size,
736 ScaleFactor element_size,
737 Register element_count,
738 Register result,
739 Register result_end,
740 Register scratch,
741 Label* gc_required,
742 AllocationFlags flags) {
743 ASSERT(!result.is(result_end));
744
745 // Load address of new object into result.
746 LoadAllocationTopHelper(result, result_end, scratch, flags);
747
748 // Calculate new top and bail out if new space is exhausted.
749 ExternalReference new_space_allocation_limit =
750 ExternalReference::new_space_allocation_limit_address();
751 lea(result_end, Operand(result, element_count, element_size, header_size));
752 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
753 j(above, gc_required);
754
755 // Update allocation top.
756 UpdateAllocationTopHelper(result_end, scratch);
757
758 // Tag result if requested.
759 if ((flags & TAG_OBJECT) != 0) {
760 or_(Operand(result), Immediate(kHeapObjectTag));
761 }
762}
763
764
765void MacroAssembler::AllocateInNewSpace(Register object_size,
766 Register result,
767 Register result_end,
768 Register scratch,
769 Label* gc_required,
770 AllocationFlags flags) {
771 ASSERT(!result.is(result_end));
772
773 // Load address of new object into result.
774 LoadAllocationTopHelper(result, result_end, scratch, flags);
775
776 // Calculate new top and bail out if new space is exhausted.
777 ExternalReference new_space_allocation_limit =
778 ExternalReference::new_space_allocation_limit_address();
779 if (!object_size.is(result_end)) {
780 mov(result_end, object_size);
781 }
782 add(result_end, Operand(result));
783 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
784 j(above, gc_required, not_taken);
785
786 // Update allocation top.
787 UpdateAllocationTopHelper(result_end, scratch);
788
789 // Tag result if requested.
790 if ((flags & TAG_OBJECT) != 0) {
791 or_(Operand(result), Immediate(kHeapObjectTag));
792 }
793}
794
795
796void MacroAssembler::UndoAllocationInNewSpace(Register object) {
797 ExternalReference new_space_allocation_top =
798 ExternalReference::new_space_allocation_top_address();
799
800 // Make sure the object has no tag before resetting top.
801 and_(Operand(object), Immediate(~kHeapObjectTagMask));
802#ifdef DEBUG
803 cmp(object, Operand::StaticVariable(new_space_allocation_top));
804 Check(below, "Undo allocation of non allocated memory");
805#endif
806 mov(Operand::StaticVariable(new_space_allocation_top), object);
807}
808
809
Steve Block3ce2e202009-11-05 08:53:23 +0000810void MacroAssembler::AllocateHeapNumber(Register result,
811 Register scratch1,
812 Register scratch2,
813 Label* gc_required) {
814 // Allocate heap number in new space.
815 AllocateInNewSpace(HeapNumber::kSize,
816 result,
817 scratch1,
818 scratch2,
819 gc_required,
820 TAG_OBJECT);
821
822 // Set the map.
823 mov(FieldOperand(result, HeapObject::kMapOffset),
824 Immediate(Factory::heap_number_map()));
825}
826
827
Steve Blockd0582a62009-12-15 09:54:21 +0000828void MacroAssembler::AllocateTwoByteString(Register result,
829 Register length,
830 Register scratch1,
831 Register scratch2,
832 Register scratch3,
833 Label* gc_required) {
834 // Calculate the number of bytes needed for the characters in the string while
835 // observing object alignment.
836 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
837 mov(scratch1, length);
838 ASSERT(kShortSize == 2);
839 shl(scratch1, 1);
840 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
841 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
842
843 // Allocate two byte string in new space.
844 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
845 times_1,
846 scratch1,
847 result,
848 scratch2,
849 scratch3,
850 gc_required,
851 TAG_OBJECT);
852
853 // Set the map, length and hash field.
854 mov(FieldOperand(result, HeapObject::kMapOffset),
855 Immediate(Factory::string_map()));
856 mov(FieldOperand(result, String::kLengthOffset), length);
857 mov(FieldOperand(result, String::kHashFieldOffset),
858 Immediate(String::kEmptyHashField));
859}
860
861
862void MacroAssembler::AllocateAsciiString(Register result,
863 Register length,
864 Register scratch1,
865 Register scratch2,
866 Register scratch3,
867 Label* gc_required) {
868 // Calculate the number of bytes needed for the characters in the string while
869 // observing object alignment.
870 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
871 mov(scratch1, length);
872 ASSERT(kCharSize == 1);
873 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
874 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
875
876 // Allocate ascii string in new space.
877 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
878 times_1,
879 scratch1,
880 result,
881 scratch2,
882 scratch3,
883 gc_required,
884 TAG_OBJECT);
885
886 // Set the map, length and hash field.
887 mov(FieldOperand(result, HeapObject::kMapOffset),
888 Immediate(Factory::ascii_string_map()));
889 mov(FieldOperand(result, String::kLengthOffset), length);
890 mov(FieldOperand(result, String::kHashFieldOffset),
891 Immediate(String::kEmptyHashField));
892}
893
894
895void MacroAssembler::AllocateConsString(Register result,
896 Register scratch1,
897 Register scratch2,
898 Label* gc_required) {
899 // Allocate heap number in new space.
900 AllocateInNewSpace(ConsString::kSize,
901 result,
902 scratch1,
903 scratch2,
904 gc_required,
905 TAG_OBJECT);
906
907 // Set the map. The other fields are left uninitialized.
908 mov(FieldOperand(result, HeapObject::kMapOffset),
909 Immediate(Factory::cons_string_map()));
910}
911
912
913void MacroAssembler::AllocateAsciiConsString(Register result,
914 Register scratch1,
915 Register scratch2,
916 Label* gc_required) {
917 // Allocate heap number in new space.
918 AllocateInNewSpace(ConsString::kSize,
919 result,
920 scratch1,
921 scratch2,
922 gc_required,
923 TAG_OBJECT);
924
925 // Set the map. The other fields are left uninitialized.
926 mov(FieldOperand(result, HeapObject::kMapOffset),
927 Immediate(Factory::cons_ascii_string_map()));
928}
929
930
Steve Blocka7e24c12009-10-30 11:49:00 +0000931void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
932 Register result,
933 Register op,
934 JumpTarget* then_target) {
935 JumpTarget ok;
936 test(result, Operand(result));
937 ok.Branch(not_zero, taken);
938 test(op, Operand(op));
939 then_target->Branch(sign, not_taken);
940 ok.Bind();
941}
942
943
944void MacroAssembler::NegativeZeroTest(Register result,
945 Register op,
946 Label* then_label) {
947 Label ok;
948 test(result, Operand(result));
949 j(not_zero, &ok, taken);
950 test(op, Operand(op));
951 j(sign, then_label, not_taken);
952 bind(&ok);
953}
954
955
956void MacroAssembler::NegativeZeroTest(Register result,
957 Register op1,
958 Register op2,
959 Register scratch,
960 Label* then_label) {
961 Label ok;
962 test(result, Operand(result));
963 j(not_zero, &ok, taken);
964 mov(scratch, Operand(op1));
965 or_(scratch, Operand(op2));
966 j(sign, then_label, not_taken);
967 bind(&ok);
968}
969
970
971void MacroAssembler::TryGetFunctionPrototype(Register function,
972 Register result,
973 Register scratch,
974 Label* miss) {
975 // Check that the receiver isn't a smi.
976 test(function, Immediate(kSmiTagMask));
977 j(zero, miss, not_taken);
978
979 // Check that the function really is a function.
980 CmpObjectType(function, JS_FUNCTION_TYPE, result);
981 j(not_equal, miss, not_taken);
982
983 // Make sure that the function has an instance prototype.
984 Label non_instance;
985 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
986 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
987 j(not_zero, &non_instance, not_taken);
988
989 // Get the prototype or initial map from the function.
990 mov(result,
991 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
992
993 // If the prototype or initial map is the hole, don't return it and
994 // simply miss the cache instead. This will allow us to allocate a
995 // prototype object on-demand in the runtime system.
996 cmp(Operand(result), Immediate(Factory::the_hole_value()));
997 j(equal, miss, not_taken);
998
999 // If the function does not have an initial map, we're done.
1000 Label done;
1001 CmpObjectType(result, MAP_TYPE, scratch);
1002 j(not_equal, &done);
1003
1004 // Get the prototype from the initial map.
1005 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1006 jmp(&done);
1007
1008 // Non-instance prototype: Fetch prototype from constructor field
1009 // in initial map.
1010 bind(&non_instance);
1011 mov(result, FieldOperand(result, Map::kConstructorOffset));
1012
1013 // All done.
1014 bind(&done);
1015}
1016
1017
1018void MacroAssembler::CallStub(CodeStub* stub) {
1019 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
1020 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1021}
1022
1023
Steve Blockd0582a62009-12-15 09:54:21 +00001024void MacroAssembler::TailCallStub(CodeStub* stub) {
1025 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
1026 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1027}
1028
1029
Steve Blocka7e24c12009-10-30 11:49:00 +00001030void MacroAssembler::StubReturn(int argc) {
1031 ASSERT(argc >= 1 && generating_stub());
1032 ret((argc - 1) * kPointerSize);
1033}
1034
1035
1036void MacroAssembler::IllegalOperation(int num_arguments) {
1037 if (num_arguments > 0) {
1038 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1039 }
1040 mov(eax, Immediate(Factory::undefined_value()));
1041}
1042
1043
1044void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1045 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1046}
1047
1048
1049void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1050 // If the expected number of arguments of the runtime function is
1051 // constant, we check that the actual number of arguments match the
1052 // expectation.
1053 if (f->nargs >= 0 && f->nargs != num_arguments) {
1054 IllegalOperation(num_arguments);
1055 return;
1056 }
1057
1058 Runtime::FunctionId function_id =
1059 static_cast<Runtime::FunctionId>(f->stub_id);
1060 RuntimeStub stub(function_id, num_arguments);
1061 CallStub(&stub);
1062}
1063
1064
1065void MacroAssembler::TailCallRuntime(const ExternalReference& ext,
1066 int num_arguments,
1067 int result_size) {
1068 // TODO(1236192): Most runtime routines don't need the number of
1069 // arguments passed in because it is constant. At some point we
1070 // should remove this need and make the runtime routine entry code
1071 // smarter.
1072 Set(eax, Immediate(num_arguments));
1073 JumpToRuntime(ext);
1074}
1075
1076
Steve Blockd0582a62009-12-15 09:54:21 +00001077void MacroAssembler::PushHandleScope(Register scratch) {
1078 // Push the number of extensions, smi-tagged so the gc will ignore it.
1079 ExternalReference extensions_address =
1080 ExternalReference::handle_scope_extensions_address();
1081 mov(scratch, Operand::StaticVariable(extensions_address));
1082 ASSERT_EQ(0, kSmiTag);
1083 shl(scratch, kSmiTagSize);
1084 push(scratch);
1085 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1086 // Push next and limit pointers which will be wordsize aligned and
1087 // hence automatically smi tagged.
1088 ExternalReference next_address =
1089 ExternalReference::handle_scope_next_address();
1090 push(Operand::StaticVariable(next_address));
1091 ExternalReference limit_address =
1092 ExternalReference::handle_scope_limit_address();
1093 push(Operand::StaticVariable(limit_address));
1094}
1095
1096
1097void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1098 ExternalReference extensions_address =
1099 ExternalReference::handle_scope_extensions_address();
1100 Label write_back;
1101 mov(scratch, Operand::StaticVariable(extensions_address));
1102 cmp(Operand(scratch), Immediate(0));
1103 j(equal, &write_back);
1104 // Calling a runtime function messes with registers so we save and
1105 // restore any one we're asked not to change
1106 if (saved.is_valid()) push(saved);
1107 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1108 if (saved.is_valid()) pop(saved);
1109
1110 bind(&write_back);
1111 ExternalReference limit_address =
1112 ExternalReference::handle_scope_limit_address();
1113 pop(Operand::StaticVariable(limit_address));
1114 ExternalReference next_address =
1115 ExternalReference::handle_scope_next_address();
1116 pop(Operand::StaticVariable(next_address));
1117 pop(scratch);
1118 shr(scratch, kSmiTagSize);
1119 mov(Operand::StaticVariable(extensions_address), scratch);
1120}
1121
1122
Steve Blocka7e24c12009-10-30 11:49:00 +00001123void MacroAssembler::JumpToRuntime(const ExternalReference& ext) {
1124 // Set the entry point and jump to the C entry runtime stub.
1125 mov(ebx, Immediate(ext));
1126 CEntryStub ces(1);
1127 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1128}
1129
1130
1131void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1132 const ParameterCount& actual,
1133 Handle<Code> code_constant,
1134 const Operand& code_operand,
1135 Label* done,
1136 InvokeFlag flag) {
1137 bool definitely_matches = false;
1138 Label invoke;
1139 if (expected.is_immediate()) {
1140 ASSERT(actual.is_immediate());
1141 if (expected.immediate() == actual.immediate()) {
1142 definitely_matches = true;
1143 } else {
1144 mov(eax, actual.immediate());
1145 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1146 if (expected.immediate() == sentinel) {
1147 // Don't worry about adapting arguments for builtins that
1148 // don't want that done. Skip adaption code by making it look
1149 // like we have a match between expected and actual number of
1150 // arguments.
1151 definitely_matches = true;
1152 } else {
1153 mov(ebx, expected.immediate());
1154 }
1155 }
1156 } else {
1157 if (actual.is_immediate()) {
1158 // Expected is in register, actual is immediate. This is the
1159 // case when we invoke function values without going through the
1160 // IC mechanism.
1161 cmp(expected.reg(), actual.immediate());
1162 j(equal, &invoke);
1163 ASSERT(expected.reg().is(ebx));
1164 mov(eax, actual.immediate());
1165 } else if (!expected.reg().is(actual.reg())) {
1166 // Both expected and actual are in (different) registers. This
1167 // is the case when we invoke functions using call and apply.
1168 cmp(expected.reg(), Operand(actual.reg()));
1169 j(equal, &invoke);
1170 ASSERT(actual.reg().is(eax));
1171 ASSERT(expected.reg().is(ebx));
1172 }
1173 }
1174
1175 if (!definitely_matches) {
1176 Handle<Code> adaptor =
1177 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1178 if (!code_constant.is_null()) {
1179 mov(edx, Immediate(code_constant));
1180 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1181 } else if (!code_operand.is_reg(edx)) {
1182 mov(edx, code_operand);
1183 }
1184
1185 if (flag == CALL_FUNCTION) {
1186 call(adaptor, RelocInfo::CODE_TARGET);
1187 jmp(done);
1188 } else {
1189 jmp(adaptor, RelocInfo::CODE_TARGET);
1190 }
1191 bind(&invoke);
1192 }
1193}
1194
1195
1196void MacroAssembler::InvokeCode(const Operand& code,
1197 const ParameterCount& expected,
1198 const ParameterCount& actual,
1199 InvokeFlag flag) {
1200 Label done;
1201 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1202 if (flag == CALL_FUNCTION) {
1203 call(code);
1204 } else {
1205 ASSERT(flag == JUMP_FUNCTION);
1206 jmp(code);
1207 }
1208 bind(&done);
1209}
1210
1211
1212void MacroAssembler::InvokeCode(Handle<Code> code,
1213 const ParameterCount& expected,
1214 const ParameterCount& actual,
1215 RelocInfo::Mode rmode,
1216 InvokeFlag flag) {
1217 Label done;
1218 Operand dummy(eax);
1219 InvokePrologue(expected, actual, code, dummy, &done, flag);
1220 if (flag == CALL_FUNCTION) {
1221 call(code, rmode);
1222 } else {
1223 ASSERT(flag == JUMP_FUNCTION);
1224 jmp(code, rmode);
1225 }
1226 bind(&done);
1227}
1228
1229
1230void MacroAssembler::InvokeFunction(Register fun,
1231 const ParameterCount& actual,
1232 InvokeFlag flag) {
1233 ASSERT(fun.is(edi));
1234 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1235 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1236 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1237 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1238 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1239
1240 ParameterCount expected(ebx);
1241 InvokeCode(Operand(edx), expected, actual, flag);
1242}
1243
1244
1245void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1246 bool resolved;
1247 Handle<Code> code = ResolveBuiltin(id, &resolved);
1248
1249 // Calls are not allowed in some stubs.
1250 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1251
1252 // Rely on the assertion to check that the number of provided
1253 // arguments match the expected number of arguments. Fake a
1254 // parameter count to avoid emitting code to do the check.
1255 ParameterCount expected(0);
1256 InvokeCode(Handle<Code>(code), expected, expected,
1257 RelocInfo::CODE_TARGET, flag);
1258
1259 const char* name = Builtins::GetName(id);
1260 int argc = Builtins::GetArgumentsCount(id);
1261
1262 if (!resolved) {
1263 uint32_t flags =
1264 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001265 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
1266 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1267 unresolved_.Add(entry);
1268 }
1269}
1270
1271
1272void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
1273 bool resolved;
1274 Handle<Code> code = ResolveBuiltin(id, &resolved);
1275
1276 const char* name = Builtins::GetName(id);
1277 int argc = Builtins::GetArgumentsCount(id);
1278
1279 mov(Operand(target), Immediate(code));
1280 if (!resolved) {
1281 uint32_t flags =
1282 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Steve Blocka7e24c12009-10-30 11:49:00 +00001283 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
1284 Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name };
1285 unresolved_.Add(entry);
1286 }
1287 add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag));
1288}
1289
1290
1291Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
1292 bool* resolved) {
1293 // Move the builtin function into the temporary function slot by
1294 // reading it from the builtins object. NOTE: We should be able to
1295 // reduce this to two instructions by putting the function table in
1296 // the global object instead of the "builtins" object and by using a
1297 // real register for the function.
1298 mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1299 mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset));
1300 int builtins_offset =
1301 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
1302 mov(edi, FieldOperand(edx, builtins_offset));
1303
1304
1305 return Builtins::GetCode(id, resolved);
1306}
1307
1308
Steve Blockd0582a62009-12-15 09:54:21 +00001309void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1310 if (context_chain_length > 0) {
1311 // Move up the chain of contexts to the context containing the slot.
1312 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1313 // Load the function context (which is the incoming, outer context).
1314 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1315 for (int i = 1; i < context_chain_length; i++) {
1316 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1317 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1318 }
1319 // The context may be an intermediate context, not a function context.
1320 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1321 } else { // Slot is in the current function context.
1322 // The context may be an intermediate context, not a function context.
1323 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1324 }
1325}
1326
1327
1328
Steve Blocka7e24c12009-10-30 11:49:00 +00001329void MacroAssembler::Ret() {
1330 ret(0);
1331}
1332
1333
1334void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1335 if (FLAG_native_code_counters && counter->Enabled()) {
1336 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1337 }
1338}
1339
1340
1341void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1342 ASSERT(value > 0);
1343 if (FLAG_native_code_counters && counter->Enabled()) {
1344 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1345 if (value == 1) {
1346 inc(operand);
1347 } else {
1348 add(operand, Immediate(value));
1349 }
1350 }
1351}
1352
1353
1354void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1355 ASSERT(value > 0);
1356 if (FLAG_native_code_counters && counter->Enabled()) {
1357 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1358 if (value == 1) {
1359 dec(operand);
1360 } else {
1361 sub(operand, Immediate(value));
1362 }
1363 }
1364}
1365
1366
1367void MacroAssembler::Assert(Condition cc, const char* msg) {
1368 if (FLAG_debug_code) Check(cc, msg);
1369}
1370
1371
1372void MacroAssembler::Check(Condition cc, const char* msg) {
1373 Label L;
1374 j(cc, &L, taken);
1375 Abort(msg);
1376 // will not return here
1377 bind(&L);
1378}
1379
1380
1381void MacroAssembler::Abort(const char* msg) {
1382 // We want to pass the msg string like a smi to avoid GC
1383 // problems, however msg is not guaranteed to be aligned
1384 // properly. Instead, we pass an aligned pointer that is
1385 // a proper v8 smi, but also pass the alignment difference
1386 // from the real pointer as a smi.
1387 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1388 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1389 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1390#ifdef DEBUG
1391 if (msg != NULL) {
1392 RecordComment("Abort message: ");
1393 RecordComment(msg);
1394 }
1395#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001396 // Disable stub call restrictions to always allow calls to abort.
1397 set_allow_stub_calls(true);
1398
Steve Blocka7e24c12009-10-30 11:49:00 +00001399 push(eax);
1400 push(Immediate(p0));
1401 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1402 CallRuntime(Runtime::kAbort, 2);
1403 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001404 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001405}
1406
1407
1408CodePatcher::CodePatcher(byte* address, int size)
1409 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1410 // Create a new macro assembler pointing to the address of the code to patch.
1411 // The size is adjusted with kGap on order for the assembler to generate size
1412 // bytes of instructions without failing with buffer size constraints.
1413 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1414}
1415
1416
1417CodePatcher::~CodePatcher() {
1418 // Indicate that code has changed.
1419 CPU::FlushICache(address_, size_);
1420
1421 // Check that the code was patched as expected.
1422 ASSERT(masm_.pc_ == address_ + size_);
1423 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1424}
1425
1426
1427} } // namespace v8::internal