blob: ba2fe2dd4ec273d4455f97c6461121d7ed5e1679 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 Label fast;
64
65 // Compute the page start address from the heap object pointer, and reuse
66 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010067 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000068 Register page_start = object;
69
70 // Compute the bit addr in the remembered set/index of the pointer in the
71 // page. Reuse 'addr' as pointer_offset.
Steve Block6ded16b2010-05-10 14:33:55 +010072 sub(addr, Operand(page_start));
73 shr(addr, kObjectAlignmentBits);
Steve Blocka7e24c12009-10-30 11:49:00 +000074 Register pointer_offset = addr;
75
76 // If the bit offset lies beyond the normal remembered set range, it is in
77 // the extra remembered set area of a large object.
Steve Block6ded16b2010-05-10 14:33:55 +010078 cmp(pointer_offset, Page::kPageSize / kPointerSize);
79 j(less, &fast);
Steve Blocka7e24c12009-10-30 11:49:00 +000080
81 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
82 // extra remembered set after the large object.
83
84 // Find the length of the large object (FixedArray).
Steve Block6ded16b2010-05-10 14:33:55 +010085 mov(scratch, Operand(page_start, Page::kObjectStartOffset
Steve Blocka7e24c12009-10-30 11:49:00 +000086 + FixedArray::kLengthOffset));
87 Register array_length = scratch;
88
89 // Extra remembered set starts right after the large object (a FixedArray), at
90 // page_start + kObjectStartOffset + objectSize
91 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
92 // Add the delta between the end of the normal RSet and the start of the
93 // extra RSet to 'page_start', so that addressing the bit using
94 // 'pointer_offset' hits the extra RSet words.
Steve Block6ded16b2010-05-10 14:33:55 +010095 lea(page_start,
96 Operand(page_start, array_length, times_pointer_size,
97 Page::kObjectStartOffset + FixedArray::kHeaderSize
98 - Page::kRSetEndOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +000099
100 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
101 // to limit code size. We should probably evaluate this decision by
102 // measuring the performance of an equivalent implementation using
103 // "simpler" instructions
Steve Block6ded16b2010-05-10 14:33:55 +0100104 bind(&fast);
105 bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000106}
107
108
Steve Block6ded16b2010-05-10 14:33:55 +0100109void MacroAssembler::InNewSpace(Register object,
110 Register scratch,
111 Condition cc,
112 Label* branch) {
113 ASSERT(cc == equal || cc == not_equal);
114 if (Serializer::enabled()) {
115 // Can't do arithmetic on external references if it might get serialized.
116 mov(scratch, Operand(object));
117 // The mask isn't really an address. We load it as an external reference in
118 // case the size of the new space is different between the snapshot maker
119 // and the running system.
120 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
121 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
122 j(cc, branch);
123 } else {
124 int32_t new_space_start = reinterpret_cast<int32_t>(
125 ExternalReference::new_space_start().address());
126 lea(scratch, Operand(object, -new_space_start));
127 and_(scratch, Heap::NewSpaceMask());
128 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000129 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000130}
131
132
133// Set the remembered set bit for [object+offset].
134// object is the object being stored into, value is the object being stored.
135// If offset is zero, then the scratch register contains the array index into
136// the elements array represented as a Smi.
137// All registers are clobbered by the operation.
138void MacroAssembler::RecordWrite(Register object, int offset,
139 Register value, Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000140 // The compiled code assumes that record write doesn't change the
141 // context register, so we check that none of the clobbered
142 // registers are esi.
143 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
144
Steve Blocka7e24c12009-10-30 11:49:00 +0000145 // First, check if a remembered set write is even needed. The tests below
146 // catch stores of Smis and stores into young gen (which does not have space
Steve Block6ded16b2010-05-10 14:33:55 +0100147 // for the remembered set bits).
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 Label done;
149
150 // Skip barrier if writing a smi.
151 ASSERT_EQ(0, kSmiTag);
152 test(value, Immediate(kSmiTagMask));
153 j(zero, &done);
154
Steve Block6ded16b2010-05-10 14:33:55 +0100155 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000156
Steve Block6ded16b2010-05-10 14:33:55 +0100157 // The offset is relative to a tagged or untagged HeapObject pointer,
158 // so either offset or offset + kHeapObjectTag must be a
159 // multiple of kPointerSize.
160 ASSERT(IsAligned(offset, kPointerSize) ||
161 IsAligned(offset + kHeapObjectTag, kPointerSize));
162
163 // We use optimized write barrier code if the word being written to is not in
164 // a large object chunk or is in the first page of a large object chunk.
165 // We make sure that an offset is inside the right limits whether it is
166 // tagged or untagged.
167 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000168 // Compute the bit offset in the remembered set, leave it in 'value'.
169 lea(value, Operand(object, offset));
170 and_(value, Page::kPageAlignmentMask);
171 shr(value, kPointerSizeLog2);
172
173 // Compute the page address from the heap object pointer, leave it in
174 // 'object'.
175 and_(object, ~Page::kPageAlignmentMask);
176
177 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
178 // to limit code size. We should probably evaluate this decision by
179 // measuring the performance of an equivalent implementation using
180 // "simpler" instructions
181 bts(Operand(object, Page::kRSetOffset), value);
182 } else {
183 Register dst = scratch;
184 if (offset != 0) {
185 lea(dst, Operand(object, offset));
186 } else {
187 // array access: calculate the destination address in the same manner as
188 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
189 // into an array of words.
190 ASSERT_EQ(1, kSmiTagSize);
191 ASSERT_EQ(0, kSmiTag);
192 lea(dst, Operand(object, dst, times_half_pointer_size,
193 FixedArray::kHeaderSize - kHeapObjectTag));
194 }
195 // If we are already generating a shared stub, not inlining the
196 // record write code isn't going to save us any memory.
197 if (generating_stub()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100198 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000199 } else {
200 RecordWriteStub stub(object, dst, value);
201 CallStub(&stub);
202 }
203 }
204
205 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000206
207 // Clobber all input registers when running with the debug-code flag
208 // turned on to provoke errors.
209 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100210 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
211 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
212 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000213 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000214}
215
216
Steve Blockd0582a62009-12-15 09:54:21 +0000217void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
218 cmp(esp,
219 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
220 j(below, on_stack_overflow);
221}
222
223
Steve Blocka7e24c12009-10-30 11:49:00 +0000224#ifdef ENABLE_DEBUGGER_SUPPORT
225void MacroAssembler::SaveRegistersToMemory(RegList regs) {
226 ASSERT((regs & ~kJSCallerSaved) == 0);
227 // Copy the content of registers to memory location.
228 for (int i = 0; i < kNumJSCallerSaved; i++) {
229 int r = JSCallerSavedCode(i);
230 if ((regs & (1 << r)) != 0) {
231 Register reg = { r };
232 ExternalReference reg_addr =
233 ExternalReference(Debug_Address::Register(i));
234 mov(Operand::StaticVariable(reg_addr), reg);
235 }
236 }
237}
238
239
240void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
241 ASSERT((regs & ~kJSCallerSaved) == 0);
242 // Copy the content of memory location to registers.
243 for (int i = kNumJSCallerSaved; --i >= 0;) {
244 int r = JSCallerSavedCode(i);
245 if ((regs & (1 << r)) != 0) {
246 Register reg = { r };
247 ExternalReference reg_addr =
248 ExternalReference(Debug_Address::Register(i));
249 mov(reg, Operand::StaticVariable(reg_addr));
250 }
251 }
252}
253
254
255void MacroAssembler::PushRegistersFromMemory(RegList regs) {
256 ASSERT((regs & ~kJSCallerSaved) == 0);
257 // Push the content of the memory location to the stack.
258 for (int i = 0; i < kNumJSCallerSaved; i++) {
259 int r = JSCallerSavedCode(i);
260 if ((regs & (1 << r)) != 0) {
261 ExternalReference reg_addr =
262 ExternalReference(Debug_Address::Register(i));
263 push(Operand::StaticVariable(reg_addr));
264 }
265 }
266}
267
268
269void MacroAssembler::PopRegistersToMemory(RegList regs) {
270 ASSERT((regs & ~kJSCallerSaved) == 0);
271 // Pop the content from the stack to the memory location.
272 for (int i = kNumJSCallerSaved; --i >= 0;) {
273 int r = JSCallerSavedCode(i);
274 if ((regs & (1 << r)) != 0) {
275 ExternalReference reg_addr =
276 ExternalReference(Debug_Address::Register(i));
277 pop(Operand::StaticVariable(reg_addr));
278 }
279 }
280}
281
282
283void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
284 Register scratch,
285 RegList regs) {
286 ASSERT((regs & ~kJSCallerSaved) == 0);
287 // Copy the content of the stack to the memory location and adjust base.
288 for (int i = kNumJSCallerSaved; --i >= 0;) {
289 int r = JSCallerSavedCode(i);
290 if ((regs & (1 << r)) != 0) {
291 mov(scratch, Operand(base, 0));
292 ExternalReference reg_addr =
293 ExternalReference(Debug_Address::Register(i));
294 mov(Operand::StaticVariable(reg_addr), scratch);
295 lea(base, Operand(base, kPointerSize));
296 }
297 }
298}
Andrei Popescu402d9372010-02-26 13:31:12 +0000299
300void MacroAssembler::DebugBreak() {
301 Set(eax, Immediate(0));
302 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
303 CEntryStub ces(1);
304 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
305}
Steve Blocka7e24c12009-10-30 11:49:00 +0000306#endif
307
308void MacroAssembler::Set(Register dst, const Immediate& x) {
309 if (x.is_zero()) {
310 xor_(dst, Operand(dst)); // shorter than mov
311 } else {
312 mov(dst, x);
313 }
314}
315
316
317void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
318 mov(dst, x);
319}
320
321
322void MacroAssembler::CmpObjectType(Register heap_object,
323 InstanceType type,
324 Register map) {
325 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
326 CmpInstanceType(map, type);
327}
328
329
330void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
331 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
332 static_cast<int8_t>(type));
333}
334
335
Andrei Popescu31002712010-02-23 13:46:05 +0000336void MacroAssembler::CheckMap(Register obj,
337 Handle<Map> map,
338 Label* fail,
339 bool is_heap_object) {
340 if (!is_heap_object) {
341 test(obj, Immediate(kSmiTagMask));
342 j(zero, fail);
343 }
344 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
345 j(not_equal, fail);
346}
347
348
Leon Clarkee46be812010-01-19 14:06:41 +0000349Condition MacroAssembler::IsObjectStringType(Register heap_object,
350 Register map,
351 Register instance_type) {
352 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
353 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
354 ASSERT(kNotStringTag != 0);
355 test(instance_type, Immediate(kIsNotStringMask));
356 return zero;
357}
358
359
Steve Blocka7e24c12009-10-30 11:49:00 +0000360void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000361 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000362 fucomip();
363 ffree(0);
364 fincstp();
365 } else {
366 fucompp();
367 push(eax);
368 fnstsw_ax();
369 sahf();
370 pop(eax);
371 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000372}
373
374
Steve Block6ded16b2010-05-10 14:33:55 +0100375void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000376 Label ok;
377 test(object, Immediate(kSmiTagMask));
378 j(zero, &ok);
379 cmp(FieldOperand(object, HeapObject::kMapOffset),
380 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100381 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000382 bind(&ok);
383}
384
385
Steve Block6ded16b2010-05-10 14:33:55 +0100386void MacroAssembler::AbortIfNotSmi(Register object) {
387 test(object, Immediate(kSmiTagMask));
388 Assert(equal, "Operand not a smi");
389}
390
391
Steve Blocka7e24c12009-10-30 11:49:00 +0000392void MacroAssembler::EnterFrame(StackFrame::Type type) {
393 push(ebp);
394 mov(ebp, Operand(esp));
395 push(esi);
396 push(Immediate(Smi::FromInt(type)));
397 push(Immediate(CodeObject()));
398 if (FLAG_debug_code) {
399 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
400 Check(not_equal, "code object not properly patched");
401 }
402}
403
404
405void MacroAssembler::LeaveFrame(StackFrame::Type type) {
406 if (FLAG_debug_code) {
407 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
408 Immediate(Smi::FromInt(type)));
409 Check(equal, "stack frame types must match");
410 }
411 leave();
412}
413
Steve Blockd0582a62009-12-15 09:54:21 +0000414void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000415 // Setup the frame structure on the stack.
416 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
417 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
418 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
419 push(ebp);
420 mov(ebp, Operand(esp));
421
422 // Reserve room for entry stack pointer and push the debug marker.
423 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000424 push(Immediate(0)); // Saved entry sp, patched before call.
425 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000426
427 // Save the frame pointer and the context in top.
428 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
429 ExternalReference context_address(Top::k_context_address);
430 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
431 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000432}
Steve Blocka7e24c12009-10-30 11:49:00 +0000433
Steve Blockd0582a62009-12-15 09:54:21 +0000434void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000435#ifdef ENABLE_DEBUGGER_SUPPORT
436 // Save the state of all registers to the stack from the memory
437 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000438 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000439 // TODO(1243899): This should be symmetric to
440 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
441 // correct here, but computed for the other call. Very error
442 // prone! FIX THIS. Actually there are deeper problems with
443 // register saving than this asymmetry (see the bug report
444 // associated with this issue).
445 PushRegistersFromMemory(kJSCallerSaved);
446 }
447#endif
448
Steve Blockd0582a62009-12-15 09:54:21 +0000449 // Reserve space for arguments.
450 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000451
452 // Get the required frame alignment for the OS.
453 static const int kFrameAlignment = OS::ActivationFrameAlignment();
454 if (kFrameAlignment > 0) {
455 ASSERT(IsPowerOf2(kFrameAlignment));
456 and_(esp, -kFrameAlignment);
457 }
458
459 // Patch the saved entry sp.
460 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
461}
462
463
Steve Blockd0582a62009-12-15 09:54:21 +0000464void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
465 EnterExitFramePrologue(mode);
466
467 // Setup argc and argv in callee-saved registers.
468 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
469 mov(edi, Operand(eax));
470 lea(esi, Operand(ebp, eax, times_4, offset));
471
472 EnterExitFrameEpilogue(mode, 2);
473}
474
475
476void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
477 int stack_space,
478 int argc) {
479 EnterExitFramePrologue(mode);
480
481 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
482 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
483
484 EnterExitFrameEpilogue(mode, argc);
485}
486
487
488void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000489#ifdef ENABLE_DEBUGGER_SUPPORT
490 // Restore the memory copy of the registers by digging them out from
491 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000492 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000493 // It's okay to clobber register ebx below because we don't need
494 // the function pointer after this.
495 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000496 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000497 lea(ebx, Operand(ebp, kOffset));
498 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
499 }
500#endif
501
502 // Get the return address from the stack and restore the frame pointer.
503 mov(ecx, Operand(ebp, 1 * kPointerSize));
504 mov(ebp, Operand(ebp, 0 * kPointerSize));
505
506 // Pop the arguments and the receiver from the caller stack.
507 lea(esp, Operand(esi, 1 * kPointerSize));
508
509 // Restore current context from top and clear it in debug mode.
510 ExternalReference context_address(Top::k_context_address);
511 mov(esi, Operand::StaticVariable(context_address));
512#ifdef DEBUG
513 mov(Operand::StaticVariable(context_address), Immediate(0));
514#endif
515
516 // Push the return address to get ready to return.
517 push(ecx);
518
519 // Clear the top frame.
520 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
521 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
522}
523
524
525void MacroAssembler::PushTryHandler(CodeLocation try_location,
526 HandlerType type) {
527 // Adjust this code if not the case.
528 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
529 // The pc (return address) is already on TOS.
530 if (try_location == IN_JAVASCRIPT) {
531 if (type == TRY_CATCH_HANDLER) {
532 push(Immediate(StackHandler::TRY_CATCH));
533 } else {
534 push(Immediate(StackHandler::TRY_FINALLY));
535 }
536 push(ebp);
537 } else {
538 ASSERT(try_location == IN_JS_ENTRY);
539 // The frame pointer does not point to a JS frame so we save NULL
540 // for ebp. We expect the code throwing an exception to check ebp
541 // before dereferencing it to restore the context.
542 push(Immediate(StackHandler::ENTRY));
543 push(Immediate(0)); // NULL frame pointer.
544 }
545 // Save the current handler as the next handler.
546 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
547 // Link this handler as the new current one.
548 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
549}
550
551
Leon Clarkee46be812010-01-19 14:06:41 +0000552void MacroAssembler::PopTryHandler() {
553 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
554 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
555 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
556}
557
558
Steve Blocka7e24c12009-10-30 11:49:00 +0000559Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
560 JSObject* holder, Register holder_reg,
561 Register scratch,
Andrei Popescu402d9372010-02-26 13:31:12 +0000562 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 Label* miss) {
564 // Make sure there's no overlap between scratch and the other
565 // registers.
566 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
567
568 // Keep track of the current object in register reg.
569 Register reg = object_reg;
Andrei Popescu402d9372010-02-26 13:31:12 +0000570 int depth = 0;
571
572 if (save_at_depth == depth) {
573 mov(Operand(esp, kPointerSize), object_reg);
574 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000575
576 // Check the maps in the prototype chain.
577 // Traverse the prototype chain from the object and do map checks.
578 while (object != holder) {
579 depth++;
580
581 // Only global objects and objects that do not require access
582 // checks are allowed in stubs.
583 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
584
585 JSObject* prototype = JSObject::cast(object->GetPrototype());
586 if (Heap::InNewSpace(prototype)) {
587 // Get the map of the current object.
588 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
589 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
590 // Branch on the result of the map check.
591 j(not_equal, miss, not_taken);
592 // Check access rights to the global object. This has to happen
593 // after the map check so that we know that the object is
594 // actually a global object.
595 if (object->IsJSGlobalProxy()) {
596 CheckAccessGlobalProxy(reg, scratch, miss);
597
598 // Restore scratch register to be the map of the object.
599 // We load the prototype from the map in the scratch register.
600 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
601 }
602 // The prototype is in new space; we cannot store a reference
603 // to it in the code. Load it from the map.
604 reg = holder_reg; // from now the object is in holder_reg
605 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000606 } else {
607 // Check the map of the current object.
608 cmp(FieldOperand(reg, HeapObject::kMapOffset),
609 Immediate(Handle<Map>(object->map())));
610 // Branch on the result of the map check.
611 j(not_equal, miss, not_taken);
612 // Check access rights to the global object. This has to happen
613 // after the map check so that we know that the object is
614 // actually a global object.
615 if (object->IsJSGlobalProxy()) {
616 CheckAccessGlobalProxy(reg, scratch, miss);
617 }
618 // The prototype is in old space; load it directly.
619 reg = holder_reg; // from now the object is in holder_reg
620 mov(reg, Handle<JSObject>(prototype));
621 }
622
Andrei Popescu402d9372010-02-26 13:31:12 +0000623 if (save_at_depth == depth) {
624 mov(Operand(esp, kPointerSize), reg);
625 }
626
Steve Blocka7e24c12009-10-30 11:49:00 +0000627 // Go to the next object in the prototype chain.
628 object = prototype;
629 }
630
631 // Check the holder map.
632 cmp(FieldOperand(reg, HeapObject::kMapOffset),
633 Immediate(Handle<Map>(holder->map())));
634 j(not_equal, miss, not_taken);
635
636 // Log the check depth.
Andrei Popescu402d9372010-02-26 13:31:12 +0000637 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000638
639 // Perform security check for access to the global object and return
640 // the holder register.
641 ASSERT(object == holder);
642 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
643 if (object->IsJSGlobalProxy()) {
644 CheckAccessGlobalProxy(reg, scratch, miss);
645 }
646 return reg;
647}
648
649
650void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
651 Register scratch,
652 Label* miss) {
653 Label same_contexts;
654
655 ASSERT(!holder_reg.is(scratch));
656
657 // Load current lexical context from the stack frame.
658 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
659
660 // When generating debug code, make sure the lexical context is set.
661 if (FLAG_debug_code) {
662 cmp(Operand(scratch), Immediate(0));
663 Check(not_equal, "we should not have an empty lexical context");
664 }
665 // Load the global context of the current context.
666 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
667 mov(scratch, FieldOperand(scratch, offset));
668 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
669
670 // Check the context is a global context.
671 if (FLAG_debug_code) {
672 push(scratch);
673 // Read the first word and compare to global_context_map.
674 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
675 cmp(scratch, Factory::global_context_map());
676 Check(equal, "JSGlobalObject::global_context should be a global context.");
677 pop(scratch);
678 }
679
680 // Check if both contexts are the same.
681 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
682 j(equal, &same_contexts, taken);
683
684 // Compare security tokens, save holder_reg on the stack so we can use it
685 // as a temporary register.
686 //
687 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
688 push(holder_reg);
689 // Check that the security token in the calling global object is
690 // compatible with the security token in the receiving global
691 // object.
692 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
693
694 // Check the context is a global context.
695 if (FLAG_debug_code) {
696 cmp(holder_reg, Factory::null_value());
697 Check(not_equal, "JSGlobalProxy::context() should not be null.");
698
699 push(holder_reg);
700 // Read the first word and compare to global_context_map(),
701 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
702 cmp(holder_reg, Factory::global_context_map());
703 Check(equal, "JSGlobalObject::global_context should be a global context.");
704 pop(holder_reg);
705 }
706
707 int token_offset = Context::kHeaderSize +
708 Context::SECURITY_TOKEN_INDEX * kPointerSize;
709 mov(scratch, FieldOperand(scratch, token_offset));
710 cmp(scratch, FieldOperand(holder_reg, token_offset));
711 pop(holder_reg);
712 j(not_equal, miss, not_taken);
713
714 bind(&same_contexts);
715}
716
717
718void MacroAssembler::LoadAllocationTopHelper(Register result,
719 Register result_end,
720 Register scratch,
721 AllocationFlags flags) {
722 ExternalReference new_space_allocation_top =
723 ExternalReference::new_space_allocation_top_address();
724
725 // Just return if allocation top is already known.
726 if ((flags & RESULT_CONTAINS_TOP) != 0) {
727 // No use of scratch if allocation top is provided.
728 ASSERT(scratch.is(no_reg));
729#ifdef DEBUG
730 // Assert that result actually contains top on entry.
731 cmp(result, Operand::StaticVariable(new_space_allocation_top));
732 Check(equal, "Unexpected allocation top");
733#endif
734 return;
735 }
736
737 // Move address of new object to result. Use scratch register if available.
738 if (scratch.is(no_reg)) {
739 mov(result, Operand::StaticVariable(new_space_allocation_top));
740 } else {
741 ASSERT(!scratch.is(result_end));
742 mov(Operand(scratch), Immediate(new_space_allocation_top));
743 mov(result, Operand(scratch, 0));
744 }
745}
746
747
748void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
749 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000750 if (FLAG_debug_code) {
751 test(result_end, Immediate(kObjectAlignmentMask));
752 Check(zero, "Unaligned allocation in new space");
753 }
754
Steve Blocka7e24c12009-10-30 11:49:00 +0000755 ExternalReference new_space_allocation_top =
756 ExternalReference::new_space_allocation_top_address();
757
758 // Update new top. Use scratch if available.
759 if (scratch.is(no_reg)) {
760 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
761 } else {
762 mov(Operand(scratch, 0), result_end);
763 }
764}
765
766
767void MacroAssembler::AllocateInNewSpace(int object_size,
768 Register result,
769 Register result_end,
770 Register scratch,
771 Label* gc_required,
772 AllocationFlags flags) {
773 ASSERT(!result.is(result_end));
774
775 // Load address of new object into result.
776 LoadAllocationTopHelper(result, result_end, scratch, flags);
777
778 // Calculate new top and bail out if new space is exhausted.
779 ExternalReference new_space_allocation_limit =
780 ExternalReference::new_space_allocation_limit_address();
781 lea(result_end, Operand(result, object_size));
782 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
783 j(above, gc_required, not_taken);
784
Steve Blocka7e24c12009-10-30 11:49:00 +0000785 // Tag result if requested.
786 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000787 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 }
Leon Clarkee46be812010-01-19 14:06:41 +0000789
790 // Update allocation top.
791 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000792}
793
794
795void MacroAssembler::AllocateInNewSpace(int header_size,
796 ScaleFactor element_size,
797 Register element_count,
798 Register result,
799 Register result_end,
800 Register scratch,
801 Label* gc_required,
802 AllocationFlags flags) {
803 ASSERT(!result.is(result_end));
804
805 // Load address of new object into result.
806 LoadAllocationTopHelper(result, result_end, scratch, flags);
807
808 // Calculate new top and bail out if new space is exhausted.
809 ExternalReference new_space_allocation_limit =
810 ExternalReference::new_space_allocation_limit_address();
811 lea(result_end, Operand(result, element_count, element_size, header_size));
812 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
813 j(above, gc_required);
814
Steve Blocka7e24c12009-10-30 11:49:00 +0000815 // Tag result if requested.
816 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000817 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000818 }
Leon Clarkee46be812010-01-19 14:06:41 +0000819
820 // Update allocation top.
821 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000822}
823
824
825void MacroAssembler::AllocateInNewSpace(Register object_size,
826 Register result,
827 Register result_end,
828 Register scratch,
829 Label* gc_required,
830 AllocationFlags flags) {
831 ASSERT(!result.is(result_end));
832
833 // Load address of new object into result.
834 LoadAllocationTopHelper(result, result_end, scratch, flags);
835
836 // Calculate new top and bail out if new space is exhausted.
837 ExternalReference new_space_allocation_limit =
838 ExternalReference::new_space_allocation_limit_address();
839 if (!object_size.is(result_end)) {
840 mov(result_end, object_size);
841 }
842 add(result_end, Operand(result));
843 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
844 j(above, gc_required, not_taken);
845
Steve Blocka7e24c12009-10-30 11:49:00 +0000846 // Tag result if requested.
847 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000848 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000849 }
Leon Clarkee46be812010-01-19 14:06:41 +0000850
851 // Update allocation top.
852 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000853}
854
855
856void MacroAssembler::UndoAllocationInNewSpace(Register object) {
857 ExternalReference new_space_allocation_top =
858 ExternalReference::new_space_allocation_top_address();
859
860 // Make sure the object has no tag before resetting top.
861 and_(Operand(object), Immediate(~kHeapObjectTagMask));
862#ifdef DEBUG
863 cmp(object, Operand::StaticVariable(new_space_allocation_top));
864 Check(below, "Undo allocation of non allocated memory");
865#endif
866 mov(Operand::StaticVariable(new_space_allocation_top), object);
867}
868
869
Steve Block3ce2e202009-11-05 08:53:23 +0000870void MacroAssembler::AllocateHeapNumber(Register result,
871 Register scratch1,
872 Register scratch2,
873 Label* gc_required) {
874 // Allocate heap number in new space.
875 AllocateInNewSpace(HeapNumber::kSize,
876 result,
877 scratch1,
878 scratch2,
879 gc_required,
880 TAG_OBJECT);
881
882 // Set the map.
883 mov(FieldOperand(result, HeapObject::kMapOffset),
884 Immediate(Factory::heap_number_map()));
885}
886
887
Steve Blockd0582a62009-12-15 09:54:21 +0000888void MacroAssembler::AllocateTwoByteString(Register result,
889 Register length,
890 Register scratch1,
891 Register scratch2,
892 Register scratch3,
893 Label* gc_required) {
894 // Calculate the number of bytes needed for the characters in the string while
895 // observing object alignment.
896 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000897 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000898 // scratch1 = length * 2 + kObjectAlignmentMask.
899 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000900 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
901
902 // Allocate two byte string in new space.
903 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
904 times_1,
905 scratch1,
906 result,
907 scratch2,
908 scratch3,
909 gc_required,
910 TAG_OBJECT);
911
912 // Set the map, length and hash field.
913 mov(FieldOperand(result, HeapObject::kMapOffset),
914 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100915 mov(scratch1, length);
916 SmiTag(scratch1);
917 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000918 mov(FieldOperand(result, String::kHashFieldOffset),
919 Immediate(String::kEmptyHashField));
920}
921
922
923void MacroAssembler::AllocateAsciiString(Register result,
924 Register length,
925 Register scratch1,
926 Register scratch2,
927 Register scratch3,
928 Label* gc_required) {
929 // Calculate the number of bytes needed for the characters in the string while
930 // observing object alignment.
931 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
932 mov(scratch1, length);
933 ASSERT(kCharSize == 1);
934 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
935 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
936
937 // Allocate ascii string in new space.
938 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
939 times_1,
940 scratch1,
941 result,
942 scratch2,
943 scratch3,
944 gc_required,
945 TAG_OBJECT);
946
947 // Set the map, length and hash field.
948 mov(FieldOperand(result, HeapObject::kMapOffset),
949 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100950 mov(scratch1, length);
951 SmiTag(scratch1);
952 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000953 mov(FieldOperand(result, String::kHashFieldOffset),
954 Immediate(String::kEmptyHashField));
955}
956
957
958void MacroAssembler::AllocateConsString(Register result,
959 Register scratch1,
960 Register scratch2,
961 Label* gc_required) {
962 // Allocate heap number in new space.
963 AllocateInNewSpace(ConsString::kSize,
964 result,
965 scratch1,
966 scratch2,
967 gc_required,
968 TAG_OBJECT);
969
970 // Set the map. The other fields are left uninitialized.
971 mov(FieldOperand(result, HeapObject::kMapOffset),
972 Immediate(Factory::cons_string_map()));
973}
974
975
976void MacroAssembler::AllocateAsciiConsString(Register result,
977 Register scratch1,
978 Register scratch2,
979 Label* gc_required) {
980 // Allocate heap number in new space.
981 AllocateInNewSpace(ConsString::kSize,
982 result,
983 scratch1,
984 scratch2,
985 gc_required,
986 TAG_OBJECT);
987
988 // Set the map. The other fields are left uninitialized.
989 mov(FieldOperand(result, HeapObject::kMapOffset),
990 Immediate(Factory::cons_ascii_string_map()));
991}
992
993
Steve Blocka7e24c12009-10-30 11:49:00 +0000994void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
995 Register result,
996 Register op,
997 JumpTarget* then_target) {
998 JumpTarget ok;
999 test(result, Operand(result));
1000 ok.Branch(not_zero, taken);
1001 test(op, Operand(op));
1002 then_target->Branch(sign, not_taken);
1003 ok.Bind();
1004}
1005
1006
1007void MacroAssembler::NegativeZeroTest(Register result,
1008 Register op,
1009 Label* then_label) {
1010 Label ok;
1011 test(result, Operand(result));
1012 j(not_zero, &ok, taken);
1013 test(op, Operand(op));
1014 j(sign, then_label, not_taken);
1015 bind(&ok);
1016}
1017
1018
1019void MacroAssembler::NegativeZeroTest(Register result,
1020 Register op1,
1021 Register op2,
1022 Register scratch,
1023 Label* then_label) {
1024 Label ok;
1025 test(result, Operand(result));
1026 j(not_zero, &ok, taken);
1027 mov(scratch, Operand(op1));
1028 or_(scratch, Operand(op2));
1029 j(sign, then_label, not_taken);
1030 bind(&ok);
1031}
1032
1033
1034void MacroAssembler::TryGetFunctionPrototype(Register function,
1035 Register result,
1036 Register scratch,
1037 Label* miss) {
1038 // Check that the receiver isn't a smi.
1039 test(function, Immediate(kSmiTagMask));
1040 j(zero, miss, not_taken);
1041
1042 // Check that the function really is a function.
1043 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1044 j(not_equal, miss, not_taken);
1045
1046 // Make sure that the function has an instance prototype.
1047 Label non_instance;
1048 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1049 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1050 j(not_zero, &non_instance, not_taken);
1051
1052 // Get the prototype or initial map from the function.
1053 mov(result,
1054 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1055
1056 // If the prototype or initial map is the hole, don't return it and
1057 // simply miss the cache instead. This will allow us to allocate a
1058 // prototype object on-demand in the runtime system.
1059 cmp(Operand(result), Immediate(Factory::the_hole_value()));
1060 j(equal, miss, not_taken);
1061
1062 // If the function does not have an initial map, we're done.
1063 Label done;
1064 CmpObjectType(result, MAP_TYPE, scratch);
1065 j(not_equal, &done);
1066
1067 // Get the prototype from the initial map.
1068 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1069 jmp(&done);
1070
1071 // Non-instance prototype: Fetch prototype from constructor field
1072 // in initial map.
1073 bind(&non_instance);
1074 mov(result, FieldOperand(result, Map::kConstructorOffset));
1075
1076 // All done.
1077 bind(&done);
1078}
1079
1080
1081void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001082 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +00001083 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1084}
1085
1086
Leon Clarkee46be812010-01-19 14:06:41 +00001087Object* MacroAssembler::TryCallStub(CodeStub* stub) {
1088 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1089 Object* result = stub->TryGetCode();
1090 if (!result->IsFailure()) {
1091 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1092 }
1093 return result;
1094}
1095
1096
Steve Blockd0582a62009-12-15 09:54:21 +00001097void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001098 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001099 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1100}
1101
1102
Leon Clarkee46be812010-01-19 14:06:41 +00001103Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1104 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1105 Object* result = stub->TryGetCode();
1106 if (!result->IsFailure()) {
1107 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1108 }
1109 return result;
1110}
1111
1112
Steve Blocka7e24c12009-10-30 11:49:00 +00001113void MacroAssembler::StubReturn(int argc) {
1114 ASSERT(argc >= 1 && generating_stub());
1115 ret((argc - 1) * kPointerSize);
1116}
1117
1118
1119void MacroAssembler::IllegalOperation(int num_arguments) {
1120 if (num_arguments > 0) {
1121 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1122 }
1123 mov(eax, Immediate(Factory::undefined_value()));
1124}
1125
1126
1127void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1128 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1129}
1130
1131
Leon Clarkee46be812010-01-19 14:06:41 +00001132Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1133 int num_arguments) {
1134 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1135}
1136
1137
Steve Blocka7e24c12009-10-30 11:49:00 +00001138void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1139 // If the expected number of arguments of the runtime function is
1140 // constant, we check that the actual number of arguments match the
1141 // expectation.
1142 if (f->nargs >= 0 && f->nargs != num_arguments) {
1143 IllegalOperation(num_arguments);
1144 return;
1145 }
1146
Leon Clarke4515c472010-02-03 11:58:03 +00001147 // TODO(1236192): Most runtime routines don't need the number of
1148 // arguments passed in because it is constant. At some point we
1149 // should remove this need and make the runtime routine entry code
1150 // smarter.
1151 Set(eax, Immediate(num_arguments));
1152 mov(ebx, Immediate(ExternalReference(f)));
1153 CEntryStub ces(1);
1154 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001155}
1156
1157
Andrei Popescu402d9372010-02-26 13:31:12 +00001158void MacroAssembler::CallExternalReference(ExternalReference ref,
1159 int num_arguments) {
1160 mov(eax, Immediate(num_arguments));
1161 mov(ebx, Immediate(ref));
1162
1163 CEntryStub stub(1);
1164 CallStub(&stub);
1165}
1166
1167
Leon Clarkee46be812010-01-19 14:06:41 +00001168Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1169 int num_arguments) {
1170 if (f->nargs >= 0 && f->nargs != num_arguments) {
1171 IllegalOperation(num_arguments);
1172 // Since we did not call the stub, there was no allocation failure.
1173 // Return some non-failure object.
1174 return Heap::undefined_value();
1175 }
1176
Leon Clarke4515c472010-02-03 11:58:03 +00001177 // TODO(1236192): Most runtime routines don't need the number of
1178 // arguments passed in because it is constant. At some point we
1179 // should remove this need and make the runtime routine entry code
1180 // smarter.
1181 Set(eax, Immediate(num_arguments));
1182 mov(ebx, Immediate(ExternalReference(f)));
1183 CEntryStub ces(1);
1184 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001185}
1186
1187
Steve Block6ded16b2010-05-10 14:33:55 +01001188void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1189 int num_arguments,
1190 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001191 // TODO(1236192): Most runtime routines don't need the number of
1192 // arguments passed in because it is constant. At some point we
1193 // should remove this need and make the runtime routine entry code
1194 // smarter.
1195 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001196 JumpToExternalReference(ext);
1197}
1198
1199
1200void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1201 int num_arguments,
1202 int result_size) {
1203 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001204}
1205
1206
Steve Blockd0582a62009-12-15 09:54:21 +00001207void MacroAssembler::PushHandleScope(Register scratch) {
1208 // Push the number of extensions, smi-tagged so the gc will ignore it.
1209 ExternalReference extensions_address =
1210 ExternalReference::handle_scope_extensions_address();
1211 mov(scratch, Operand::StaticVariable(extensions_address));
1212 ASSERT_EQ(0, kSmiTag);
1213 shl(scratch, kSmiTagSize);
1214 push(scratch);
1215 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1216 // Push next and limit pointers which will be wordsize aligned and
1217 // hence automatically smi tagged.
1218 ExternalReference next_address =
1219 ExternalReference::handle_scope_next_address();
1220 push(Operand::StaticVariable(next_address));
1221 ExternalReference limit_address =
1222 ExternalReference::handle_scope_limit_address();
1223 push(Operand::StaticVariable(limit_address));
1224}
1225
1226
Leon Clarkee46be812010-01-19 14:06:41 +00001227Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1228 Register scratch,
1229 bool gc_allowed) {
1230 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001231 ExternalReference extensions_address =
1232 ExternalReference::handle_scope_extensions_address();
1233 Label write_back;
1234 mov(scratch, Operand::StaticVariable(extensions_address));
1235 cmp(Operand(scratch), Immediate(0));
1236 j(equal, &write_back);
1237 // Calling a runtime function messes with registers so we save and
1238 // restore any one we're asked not to change
1239 if (saved.is_valid()) push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001240 if (gc_allowed) {
1241 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1242 } else {
1243 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1244 if (result->IsFailure()) return result;
1245 }
Steve Blockd0582a62009-12-15 09:54:21 +00001246 if (saved.is_valid()) pop(saved);
1247
1248 bind(&write_back);
1249 ExternalReference limit_address =
1250 ExternalReference::handle_scope_limit_address();
1251 pop(Operand::StaticVariable(limit_address));
1252 ExternalReference next_address =
1253 ExternalReference::handle_scope_next_address();
1254 pop(Operand::StaticVariable(next_address));
1255 pop(scratch);
1256 shr(scratch, kSmiTagSize);
1257 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001258
1259 return result;
1260}
1261
1262
1263void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1264 PopHandleScopeHelper(saved, scratch, true);
1265}
1266
1267
1268Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1269 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001270}
1271
1272
Steve Block6ded16b2010-05-10 14:33:55 +01001273void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001274 // Set the entry point and jump to the C entry runtime stub.
1275 mov(ebx, Immediate(ext));
1276 CEntryStub ces(1);
1277 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1278}
1279
1280
1281void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1282 const ParameterCount& actual,
1283 Handle<Code> code_constant,
1284 const Operand& code_operand,
1285 Label* done,
1286 InvokeFlag flag) {
1287 bool definitely_matches = false;
1288 Label invoke;
1289 if (expected.is_immediate()) {
1290 ASSERT(actual.is_immediate());
1291 if (expected.immediate() == actual.immediate()) {
1292 definitely_matches = true;
1293 } else {
1294 mov(eax, actual.immediate());
1295 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1296 if (expected.immediate() == sentinel) {
1297 // Don't worry about adapting arguments for builtins that
1298 // don't want that done. Skip adaption code by making it look
1299 // like we have a match between expected and actual number of
1300 // arguments.
1301 definitely_matches = true;
1302 } else {
1303 mov(ebx, expected.immediate());
1304 }
1305 }
1306 } else {
1307 if (actual.is_immediate()) {
1308 // Expected is in register, actual is immediate. This is the
1309 // case when we invoke function values without going through the
1310 // IC mechanism.
1311 cmp(expected.reg(), actual.immediate());
1312 j(equal, &invoke);
1313 ASSERT(expected.reg().is(ebx));
1314 mov(eax, actual.immediate());
1315 } else if (!expected.reg().is(actual.reg())) {
1316 // Both expected and actual are in (different) registers. This
1317 // is the case when we invoke functions using call and apply.
1318 cmp(expected.reg(), Operand(actual.reg()));
1319 j(equal, &invoke);
1320 ASSERT(actual.reg().is(eax));
1321 ASSERT(expected.reg().is(ebx));
1322 }
1323 }
1324
1325 if (!definitely_matches) {
1326 Handle<Code> adaptor =
1327 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1328 if (!code_constant.is_null()) {
1329 mov(edx, Immediate(code_constant));
1330 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1331 } else if (!code_operand.is_reg(edx)) {
1332 mov(edx, code_operand);
1333 }
1334
1335 if (flag == CALL_FUNCTION) {
1336 call(adaptor, RelocInfo::CODE_TARGET);
1337 jmp(done);
1338 } else {
1339 jmp(adaptor, RelocInfo::CODE_TARGET);
1340 }
1341 bind(&invoke);
1342 }
1343}
1344
1345
1346void MacroAssembler::InvokeCode(const Operand& code,
1347 const ParameterCount& expected,
1348 const ParameterCount& actual,
1349 InvokeFlag flag) {
1350 Label done;
1351 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1352 if (flag == CALL_FUNCTION) {
1353 call(code);
1354 } else {
1355 ASSERT(flag == JUMP_FUNCTION);
1356 jmp(code);
1357 }
1358 bind(&done);
1359}
1360
1361
1362void MacroAssembler::InvokeCode(Handle<Code> code,
1363 const ParameterCount& expected,
1364 const ParameterCount& actual,
1365 RelocInfo::Mode rmode,
1366 InvokeFlag flag) {
1367 Label done;
1368 Operand dummy(eax);
1369 InvokePrologue(expected, actual, code, dummy, &done, flag);
1370 if (flag == CALL_FUNCTION) {
1371 call(code, rmode);
1372 } else {
1373 ASSERT(flag == JUMP_FUNCTION);
1374 jmp(code, rmode);
1375 }
1376 bind(&done);
1377}
1378
1379
1380void MacroAssembler::InvokeFunction(Register fun,
1381 const ParameterCount& actual,
1382 InvokeFlag flag) {
1383 ASSERT(fun.is(edi));
1384 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1385 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1386 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1387 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1388 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1389
1390 ParameterCount expected(ebx);
1391 InvokeCode(Operand(edx), expected, actual, flag);
1392}
1393
1394
Andrei Popescu402d9372010-02-26 13:31:12 +00001395void MacroAssembler::InvokeFunction(JSFunction* function,
1396 const ParameterCount& actual,
1397 InvokeFlag flag) {
1398 ASSERT(function->is_compiled());
1399 // Get the function and setup the context.
1400 mov(edi, Immediate(Handle<JSFunction>(function)));
1401 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001402
Andrei Popescu402d9372010-02-26 13:31:12 +00001403 // Invoke the cached code.
1404 Handle<Code> code(function->code());
1405 ParameterCount expected(function->shared()->formal_parameter_count());
1406 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1407}
1408
1409
1410void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 // Calls are not allowed in some stubs.
1412 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1413
1414 // Rely on the assertion to check that the number of provided
1415 // arguments match the expected number of arguments. Fake a
1416 // parameter count to avoid emitting code to do the check.
1417 ParameterCount expected(0);
Andrei Popescu402d9372010-02-26 13:31:12 +00001418 GetBuiltinEntry(edx, id);
1419 InvokeCode(Operand(edx), expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001420}
1421
1422
1423void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001424 ASSERT(!target.is(edi));
1425
1426 // Load the builtins object into target register.
1427 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1428 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1429
Andrei Popescu402d9372010-02-26 13:31:12 +00001430 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001431 mov(edi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1432
1433 // Load the code entry point from the builtins object.
1434 mov(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
1435 if (FLAG_debug_code) {
1436 // Make sure the code objects in the builtins object and in the
1437 // builtin function are the same.
1438 push(target);
1439 mov(target, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1440 mov(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
1441 cmp(target, Operand(esp, 0));
1442 Assert(equal, "Builtin code object changed");
1443 pop(target);
1444 }
1445 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001446}
1447
1448
Steve Blockd0582a62009-12-15 09:54:21 +00001449void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1450 if (context_chain_length > 0) {
1451 // Move up the chain of contexts to the context containing the slot.
1452 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1453 // Load the function context (which is the incoming, outer context).
1454 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1455 for (int i = 1; i < context_chain_length; i++) {
1456 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1457 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1458 }
1459 // The context may be an intermediate context, not a function context.
1460 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1461 } else { // Slot is in the current function context.
1462 // The context may be an intermediate context, not a function context.
1463 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1464 }
1465}
1466
1467
1468
Steve Blocka7e24c12009-10-30 11:49:00 +00001469void MacroAssembler::Ret() {
1470 ret(0);
1471}
1472
1473
Leon Clarkee46be812010-01-19 14:06:41 +00001474void MacroAssembler::Drop(int stack_elements) {
1475 if (stack_elements > 0) {
1476 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1477 }
1478}
1479
1480
1481void MacroAssembler::Move(Register dst, Handle<Object> value) {
1482 mov(dst, value);
1483}
1484
1485
Steve Blocka7e24c12009-10-30 11:49:00 +00001486void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1487 if (FLAG_native_code_counters && counter->Enabled()) {
1488 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1489 }
1490}
1491
1492
1493void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1494 ASSERT(value > 0);
1495 if (FLAG_native_code_counters && counter->Enabled()) {
1496 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1497 if (value == 1) {
1498 inc(operand);
1499 } else {
1500 add(operand, Immediate(value));
1501 }
1502 }
1503}
1504
1505
1506void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1507 ASSERT(value > 0);
1508 if (FLAG_native_code_counters && counter->Enabled()) {
1509 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1510 if (value == 1) {
1511 dec(operand);
1512 } else {
1513 sub(operand, Immediate(value));
1514 }
1515 }
1516}
1517
1518
Leon Clarked91b9f72010-01-27 17:25:45 +00001519void MacroAssembler::IncrementCounter(Condition cc,
1520 StatsCounter* counter,
1521 int value) {
1522 ASSERT(value > 0);
1523 if (FLAG_native_code_counters && counter->Enabled()) {
1524 Label skip;
1525 j(NegateCondition(cc), &skip);
1526 pushfd();
1527 IncrementCounter(counter, value);
1528 popfd();
1529 bind(&skip);
1530 }
1531}
1532
1533
1534void MacroAssembler::DecrementCounter(Condition cc,
1535 StatsCounter* counter,
1536 int value) {
1537 ASSERT(value > 0);
1538 if (FLAG_native_code_counters && counter->Enabled()) {
1539 Label skip;
1540 j(NegateCondition(cc), &skip);
1541 pushfd();
1542 DecrementCounter(counter, value);
1543 popfd();
1544 bind(&skip);
1545 }
1546}
1547
1548
Steve Blocka7e24c12009-10-30 11:49:00 +00001549void MacroAssembler::Assert(Condition cc, const char* msg) {
1550 if (FLAG_debug_code) Check(cc, msg);
1551}
1552
1553
1554void MacroAssembler::Check(Condition cc, const char* msg) {
1555 Label L;
1556 j(cc, &L, taken);
1557 Abort(msg);
1558 // will not return here
1559 bind(&L);
1560}
1561
1562
Steve Block6ded16b2010-05-10 14:33:55 +01001563void MacroAssembler::CheckStackAlignment() {
1564 int frame_alignment = OS::ActivationFrameAlignment();
1565 int frame_alignment_mask = frame_alignment - 1;
1566 if (frame_alignment > kPointerSize) {
1567 ASSERT(IsPowerOf2(frame_alignment));
1568 Label alignment_as_expected;
1569 test(esp, Immediate(frame_alignment_mask));
1570 j(zero, &alignment_as_expected);
1571 // Abort if stack is not aligned.
1572 int3();
1573 bind(&alignment_as_expected);
1574 }
1575}
1576
1577
Steve Blocka7e24c12009-10-30 11:49:00 +00001578void MacroAssembler::Abort(const char* msg) {
1579 // We want to pass the msg string like a smi to avoid GC
1580 // problems, however msg is not guaranteed to be aligned
1581 // properly. Instead, we pass an aligned pointer that is
1582 // a proper v8 smi, but also pass the alignment difference
1583 // from the real pointer as a smi.
1584 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1585 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1586 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1587#ifdef DEBUG
1588 if (msg != NULL) {
1589 RecordComment("Abort message: ");
1590 RecordComment(msg);
1591 }
1592#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001593 // Disable stub call restrictions to always allow calls to abort.
1594 set_allow_stub_calls(true);
1595
Steve Blocka7e24c12009-10-30 11:49:00 +00001596 push(eax);
1597 push(Immediate(p0));
1598 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1599 CallRuntime(Runtime::kAbort, 2);
1600 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001601 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001602}
1603
1604
Andrei Popescu402d9372010-02-26 13:31:12 +00001605void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1606 Register instance_type,
1607 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001608 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001609 if (!scratch.is(instance_type)) {
1610 mov(scratch, instance_type);
1611 }
1612 and_(scratch,
1613 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1614 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1615 j(not_equal, failure);
1616}
1617
1618
Leon Clarked91b9f72010-01-27 17:25:45 +00001619void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1620 Register object2,
1621 Register scratch1,
1622 Register scratch2,
1623 Label* failure) {
1624 // Check that both objects are not smis.
1625 ASSERT_EQ(0, kSmiTag);
1626 mov(scratch1, Operand(object1));
1627 and_(scratch1, Operand(object2));
1628 test(scratch1, Immediate(kSmiTagMask));
1629 j(zero, failure);
1630
1631 // Load instance type for both strings.
1632 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1633 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1634 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1635 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1636
1637 // Check that both are flat ascii strings.
1638 const int kFlatAsciiStringMask =
1639 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1640 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1641 // Interleave bits from both instance types and compare them in one check.
1642 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1643 and_(scratch1, kFlatAsciiStringMask);
1644 and_(scratch2, kFlatAsciiStringMask);
1645 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1646 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1647 j(not_equal, failure);
1648}
1649
1650
Steve Block6ded16b2010-05-10 14:33:55 +01001651void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1652 int frameAlignment = OS::ActivationFrameAlignment();
1653 if (frameAlignment != 0) {
1654 // Make stack end at alignment and make room for num_arguments words
1655 // and the original value of esp.
1656 mov(scratch, esp);
1657 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1658 ASSERT(IsPowerOf2(frameAlignment));
1659 and_(esp, -frameAlignment);
1660 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1661 } else {
1662 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1663 }
1664}
1665
1666
1667void MacroAssembler::CallCFunction(ExternalReference function,
1668 int num_arguments) {
1669 // Trashing eax is ok as it will be the return value.
1670 mov(Operand(eax), Immediate(function));
1671 CallCFunction(eax, num_arguments);
1672}
1673
1674
1675void MacroAssembler::CallCFunction(Register function,
1676 int num_arguments) {
1677 // Check stack alignment.
1678 if (FLAG_debug_code) {
1679 CheckStackAlignment();
1680 }
1681
1682 call(Operand(function));
1683 if (OS::ActivationFrameAlignment() != 0) {
1684 mov(esp, Operand(esp, num_arguments * kPointerSize));
1685 } else {
1686 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1687 }
1688}
1689
1690
Steve Blocka7e24c12009-10-30 11:49:00 +00001691CodePatcher::CodePatcher(byte* address, int size)
1692 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1693 // Create a new macro assembler pointing to the address of the code to patch.
1694 // The size is adjusted with kGap on order for the assembler to generate size
1695 // bytes of instructions without failing with buffer size constraints.
1696 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1697}
1698
1699
1700CodePatcher::~CodePatcher() {
1701 // Indicate that code has changed.
1702 CPU::FlushICache(address_, size_);
1703
1704 // Check that the code was patched as expected.
1705 ASSERT(masm_.pc_ == address_ + size_);
1706 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1707}
1708
1709
1710} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001711
1712#endif // V8_TARGET_ARCH_IA32