blob: a7d2834520c66d553e75c395d5dd9a50ea631c39 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
32#include "debug.h"
33#include "runtime.h"
34#include "serialize.h"
35
36namespace v8 {
37namespace internal {
38
39// -------------------------------------------------------------------------
40// MacroAssembler implementation.
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
43 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000044 generating_stub_(false),
45 allow_stub_calls_(true),
46 code_object_(Heap::undefined_value()) {
47}
48
49
Steve Block6ded16b2010-05-10 14:33:55 +010050void MacroAssembler::RecordWriteHelper(Register object,
51 Register addr,
52 Register scratch) {
53 if (FLAG_debug_code) {
54 // Check that the object is not in new space.
55 Label not_in_new_space;
56 InNewSpace(object, scratch, not_equal, &not_in_new_space);
57 Abort("new-space object passed to RecordWriteHelper");
58 bind(&not_in_new_space);
59 }
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061 Label fast;
62
63 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066 Register page_start = object;
67
68 // Compute the bit addr in the remembered set/index of the pointer in the
69 // page. Reuse 'addr' as pointer_offset.
Steve Block6ded16b2010-05-10 14:33:55 +010070 sub(addr, Operand(page_start));
71 shr(addr, kObjectAlignmentBits);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 Register pointer_offset = addr;
73
74 // If the bit offset lies beyond the normal remembered set range, it is in
75 // the extra remembered set area of a large object.
Steve Block6ded16b2010-05-10 14:33:55 +010076 cmp(pointer_offset, Page::kPageSize / kPointerSize);
77 j(less, &fast);
Steve Blocka7e24c12009-10-30 11:49:00 +000078
79 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
80 // extra remembered set after the large object.
81
82 // Find the length of the large object (FixedArray).
Steve Block6ded16b2010-05-10 14:33:55 +010083 mov(scratch, Operand(page_start, Page::kObjectStartOffset
Steve Blocka7e24c12009-10-30 11:49:00 +000084 + FixedArray::kLengthOffset));
85 Register array_length = scratch;
86
87 // Extra remembered set starts right after the large object (a FixedArray), at
88 // page_start + kObjectStartOffset + objectSize
89 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
90 // Add the delta between the end of the normal RSet and the start of the
91 // extra RSet to 'page_start', so that addressing the bit using
92 // 'pointer_offset' hits the extra RSet words.
Steve Block6ded16b2010-05-10 14:33:55 +010093 lea(page_start,
94 Operand(page_start, array_length, times_pointer_size,
95 Page::kObjectStartOffset + FixedArray::kHeaderSize
96 - Page::kRSetEndOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +000097
98 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
99 // to limit code size. We should probably evaluate this decision by
100 // measuring the performance of an equivalent implementation using
101 // "simpler" instructions
Steve Block6ded16b2010-05-10 14:33:55 +0100102 bind(&fast);
103 bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000104}
105
106
Steve Block6ded16b2010-05-10 14:33:55 +0100107void MacroAssembler::InNewSpace(Register object,
108 Register scratch,
109 Condition cc,
110 Label* branch) {
111 ASSERT(cc == equal || cc == not_equal);
112 if (Serializer::enabled()) {
113 // Can't do arithmetic on external references if it might get serialized.
114 mov(scratch, Operand(object));
115 // The mask isn't really an address. We load it as an external reference in
116 // case the size of the new space is different between the snapshot maker
117 // and the running system.
118 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
119 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
120 j(cc, branch);
121 } else {
122 int32_t new_space_start = reinterpret_cast<int32_t>(
123 ExternalReference::new_space_start().address());
124 lea(scratch, Operand(object, -new_space_start));
125 and_(scratch, Heap::NewSpaceMask());
126 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000127 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000128}
129
130
131// Set the remembered set bit for [object+offset].
132// object is the object being stored into, value is the object being stored.
133// If offset is zero, then the scratch register contains the array index into
134// the elements array represented as a Smi.
135// All registers are clobbered by the operation.
136void MacroAssembler::RecordWrite(Register object, int offset,
137 Register value, Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000138 // The compiled code assumes that record write doesn't change the
139 // context register, so we check that none of the clobbered
140 // registers are esi.
141 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
142
Steve Blocka7e24c12009-10-30 11:49:00 +0000143 // First, check if a remembered set write is even needed. The tests below
144 // catch stores of Smis and stores into young gen (which does not have space
Steve Block6ded16b2010-05-10 14:33:55 +0100145 // for the remembered set bits).
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 Label done;
147
148 // Skip barrier if writing a smi.
149 ASSERT_EQ(0, kSmiTag);
150 test(value, Immediate(kSmiTagMask));
151 j(zero, &done);
152
Steve Block6ded16b2010-05-10 14:33:55 +0100153 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000154
Steve Block6ded16b2010-05-10 14:33:55 +0100155 // The offset is relative to a tagged or untagged HeapObject pointer,
156 // so either offset or offset + kHeapObjectTag must be a
157 // multiple of kPointerSize.
158 ASSERT(IsAligned(offset, kPointerSize) ||
159 IsAligned(offset + kHeapObjectTag, kPointerSize));
160
161 // We use optimized write barrier code if the word being written to is not in
162 // a large object chunk or is in the first page of a large object chunk.
163 // We make sure that an offset is inside the right limits whether it is
164 // tagged or untagged.
165 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000166 // Compute the bit offset in the remembered set, leave it in 'value'.
167 lea(value, Operand(object, offset));
168 and_(value, Page::kPageAlignmentMask);
169 shr(value, kPointerSizeLog2);
170
171 // Compute the page address from the heap object pointer, leave it in
172 // 'object'.
173 and_(object, ~Page::kPageAlignmentMask);
174
175 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
176 // to limit code size. We should probably evaluate this decision by
177 // measuring the performance of an equivalent implementation using
178 // "simpler" instructions
179 bts(Operand(object, Page::kRSetOffset), value);
180 } else {
181 Register dst = scratch;
182 if (offset != 0) {
183 lea(dst, Operand(object, offset));
184 } else {
185 // array access: calculate the destination address in the same manner as
186 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
187 // into an array of words.
188 ASSERT_EQ(1, kSmiTagSize);
189 ASSERT_EQ(0, kSmiTag);
190 lea(dst, Operand(object, dst, times_half_pointer_size,
191 FixedArray::kHeaderSize - kHeapObjectTag));
192 }
193 // If we are already generating a shared stub, not inlining the
194 // record write code isn't going to save us any memory.
195 if (generating_stub()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100196 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000197 } else {
198 RecordWriteStub stub(object, dst, value);
199 CallStub(&stub);
200 }
201 }
202
203 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000204
205 // Clobber all input registers when running with the debug-code flag
206 // turned on to provoke errors.
207 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100208 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
209 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
210 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000211 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000212}
213
214
Steve Blockd0582a62009-12-15 09:54:21 +0000215void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
216 cmp(esp,
217 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
218 j(below, on_stack_overflow);
219}
220
221
Steve Blocka7e24c12009-10-30 11:49:00 +0000222#ifdef ENABLE_DEBUGGER_SUPPORT
223void MacroAssembler::SaveRegistersToMemory(RegList regs) {
224 ASSERT((regs & ~kJSCallerSaved) == 0);
225 // Copy the content of registers to memory location.
226 for (int i = 0; i < kNumJSCallerSaved; i++) {
227 int r = JSCallerSavedCode(i);
228 if ((regs & (1 << r)) != 0) {
229 Register reg = { r };
230 ExternalReference reg_addr =
231 ExternalReference(Debug_Address::Register(i));
232 mov(Operand::StaticVariable(reg_addr), reg);
233 }
234 }
235}
236
237
238void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
239 ASSERT((regs & ~kJSCallerSaved) == 0);
240 // Copy the content of memory location to registers.
241 for (int i = kNumJSCallerSaved; --i >= 0;) {
242 int r = JSCallerSavedCode(i);
243 if ((regs & (1 << r)) != 0) {
244 Register reg = { r };
245 ExternalReference reg_addr =
246 ExternalReference(Debug_Address::Register(i));
247 mov(reg, Operand::StaticVariable(reg_addr));
248 }
249 }
250}
251
252
253void MacroAssembler::PushRegistersFromMemory(RegList regs) {
254 ASSERT((regs & ~kJSCallerSaved) == 0);
255 // Push the content of the memory location to the stack.
256 for (int i = 0; i < kNumJSCallerSaved; i++) {
257 int r = JSCallerSavedCode(i);
258 if ((regs & (1 << r)) != 0) {
259 ExternalReference reg_addr =
260 ExternalReference(Debug_Address::Register(i));
261 push(Operand::StaticVariable(reg_addr));
262 }
263 }
264}
265
266
267void MacroAssembler::PopRegistersToMemory(RegList regs) {
268 ASSERT((regs & ~kJSCallerSaved) == 0);
269 // Pop the content from the stack to the memory location.
270 for (int i = kNumJSCallerSaved; --i >= 0;) {
271 int r = JSCallerSavedCode(i);
272 if ((regs & (1 << r)) != 0) {
273 ExternalReference reg_addr =
274 ExternalReference(Debug_Address::Register(i));
275 pop(Operand::StaticVariable(reg_addr));
276 }
277 }
278}
279
280
281void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
282 Register scratch,
283 RegList regs) {
284 ASSERT((regs & ~kJSCallerSaved) == 0);
285 // Copy the content of the stack to the memory location and adjust base.
286 for (int i = kNumJSCallerSaved; --i >= 0;) {
287 int r = JSCallerSavedCode(i);
288 if ((regs & (1 << r)) != 0) {
289 mov(scratch, Operand(base, 0));
290 ExternalReference reg_addr =
291 ExternalReference(Debug_Address::Register(i));
292 mov(Operand::StaticVariable(reg_addr), scratch);
293 lea(base, Operand(base, kPointerSize));
294 }
295 }
296}
Andrei Popescu402d9372010-02-26 13:31:12 +0000297
298void MacroAssembler::DebugBreak() {
299 Set(eax, Immediate(0));
300 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
301 CEntryStub ces(1);
302 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
303}
Steve Blocka7e24c12009-10-30 11:49:00 +0000304#endif
305
306void MacroAssembler::Set(Register dst, const Immediate& x) {
307 if (x.is_zero()) {
308 xor_(dst, Operand(dst)); // shorter than mov
309 } else {
310 mov(dst, x);
311 }
312}
313
314
315void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
316 mov(dst, x);
317}
318
319
320void MacroAssembler::CmpObjectType(Register heap_object,
321 InstanceType type,
322 Register map) {
323 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
324 CmpInstanceType(map, type);
325}
326
327
328void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
329 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
330 static_cast<int8_t>(type));
331}
332
333
Andrei Popescu31002712010-02-23 13:46:05 +0000334void MacroAssembler::CheckMap(Register obj,
335 Handle<Map> map,
336 Label* fail,
337 bool is_heap_object) {
338 if (!is_heap_object) {
339 test(obj, Immediate(kSmiTagMask));
340 j(zero, fail);
341 }
342 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
343 j(not_equal, fail);
344}
345
346
Leon Clarkee46be812010-01-19 14:06:41 +0000347Condition MacroAssembler::IsObjectStringType(Register heap_object,
348 Register map,
349 Register instance_type) {
350 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
351 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
352 ASSERT(kNotStringTag != 0);
353 test(instance_type, Immediate(kIsNotStringMask));
354 return zero;
355}
356
357
Steve Blocka7e24c12009-10-30 11:49:00 +0000358void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000359 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000360 fucomip();
361 ffree(0);
362 fincstp();
363 } else {
364 fucompp();
365 push(eax);
366 fnstsw_ax();
367 sahf();
368 pop(eax);
369 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000370}
371
372
Steve Block6ded16b2010-05-10 14:33:55 +0100373void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000374 Label ok;
375 test(object, Immediate(kSmiTagMask));
376 j(zero, &ok);
377 cmp(FieldOperand(object, HeapObject::kMapOffset),
378 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100379 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000380 bind(&ok);
381}
382
383
Steve Block6ded16b2010-05-10 14:33:55 +0100384void MacroAssembler::AbortIfNotSmi(Register object) {
385 test(object, Immediate(kSmiTagMask));
386 Assert(equal, "Operand not a smi");
387}
388
389
Steve Blocka7e24c12009-10-30 11:49:00 +0000390void MacroAssembler::EnterFrame(StackFrame::Type type) {
391 push(ebp);
392 mov(ebp, Operand(esp));
393 push(esi);
394 push(Immediate(Smi::FromInt(type)));
395 push(Immediate(CodeObject()));
396 if (FLAG_debug_code) {
397 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
398 Check(not_equal, "code object not properly patched");
399 }
400}
401
402
403void MacroAssembler::LeaveFrame(StackFrame::Type type) {
404 if (FLAG_debug_code) {
405 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
406 Immediate(Smi::FromInt(type)));
407 Check(equal, "stack frame types must match");
408 }
409 leave();
410}
411
Steve Blockd0582a62009-12-15 09:54:21 +0000412void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000413 // Setup the frame structure on the stack.
414 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
415 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
416 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
417 push(ebp);
418 mov(ebp, Operand(esp));
419
420 // Reserve room for entry stack pointer and push the debug marker.
421 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000422 push(Immediate(0)); // Saved entry sp, patched before call.
423 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000424
425 // Save the frame pointer and the context in top.
426 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
427 ExternalReference context_address(Top::k_context_address);
428 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
429 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000430}
Steve Blocka7e24c12009-10-30 11:49:00 +0000431
Steve Blockd0582a62009-12-15 09:54:21 +0000432void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000433#ifdef ENABLE_DEBUGGER_SUPPORT
434 // Save the state of all registers to the stack from the memory
435 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000436 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000437 // TODO(1243899): This should be symmetric to
438 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
439 // correct here, but computed for the other call. Very error
440 // prone! FIX THIS. Actually there are deeper problems with
441 // register saving than this asymmetry (see the bug report
442 // associated with this issue).
443 PushRegistersFromMemory(kJSCallerSaved);
444 }
445#endif
446
Steve Blockd0582a62009-12-15 09:54:21 +0000447 // Reserve space for arguments.
448 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000449
450 // Get the required frame alignment for the OS.
451 static const int kFrameAlignment = OS::ActivationFrameAlignment();
452 if (kFrameAlignment > 0) {
453 ASSERT(IsPowerOf2(kFrameAlignment));
454 and_(esp, -kFrameAlignment);
455 }
456
457 // Patch the saved entry sp.
458 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
459}
460
461
Steve Blockd0582a62009-12-15 09:54:21 +0000462void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
463 EnterExitFramePrologue(mode);
464
465 // Setup argc and argv in callee-saved registers.
466 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
467 mov(edi, Operand(eax));
468 lea(esi, Operand(ebp, eax, times_4, offset));
469
470 EnterExitFrameEpilogue(mode, 2);
471}
472
473
474void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
475 int stack_space,
476 int argc) {
477 EnterExitFramePrologue(mode);
478
479 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
480 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
481
482 EnterExitFrameEpilogue(mode, argc);
483}
484
485
486void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000487#ifdef ENABLE_DEBUGGER_SUPPORT
488 // Restore the memory copy of the registers by digging them out from
489 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000490 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000491 // It's okay to clobber register ebx below because we don't need
492 // the function pointer after this.
493 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000494 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000495 lea(ebx, Operand(ebp, kOffset));
496 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
497 }
498#endif
499
500 // Get the return address from the stack and restore the frame pointer.
501 mov(ecx, Operand(ebp, 1 * kPointerSize));
502 mov(ebp, Operand(ebp, 0 * kPointerSize));
503
504 // Pop the arguments and the receiver from the caller stack.
505 lea(esp, Operand(esi, 1 * kPointerSize));
506
507 // Restore current context from top and clear it in debug mode.
508 ExternalReference context_address(Top::k_context_address);
509 mov(esi, Operand::StaticVariable(context_address));
510#ifdef DEBUG
511 mov(Operand::StaticVariable(context_address), Immediate(0));
512#endif
513
514 // Push the return address to get ready to return.
515 push(ecx);
516
517 // Clear the top frame.
518 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
519 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
520}
521
522
523void MacroAssembler::PushTryHandler(CodeLocation try_location,
524 HandlerType type) {
525 // Adjust this code if not the case.
526 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
527 // The pc (return address) is already on TOS.
528 if (try_location == IN_JAVASCRIPT) {
529 if (type == TRY_CATCH_HANDLER) {
530 push(Immediate(StackHandler::TRY_CATCH));
531 } else {
532 push(Immediate(StackHandler::TRY_FINALLY));
533 }
534 push(ebp);
535 } else {
536 ASSERT(try_location == IN_JS_ENTRY);
537 // The frame pointer does not point to a JS frame so we save NULL
538 // for ebp. We expect the code throwing an exception to check ebp
539 // before dereferencing it to restore the context.
540 push(Immediate(StackHandler::ENTRY));
541 push(Immediate(0)); // NULL frame pointer.
542 }
543 // Save the current handler as the next handler.
544 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
545 // Link this handler as the new current one.
546 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
547}
548
549
Leon Clarkee46be812010-01-19 14:06:41 +0000550void MacroAssembler::PopTryHandler() {
551 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
552 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
553 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
554}
555
556
Steve Blocka7e24c12009-10-30 11:49:00 +0000557Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
558 JSObject* holder, Register holder_reg,
559 Register scratch,
Andrei Popescu402d9372010-02-26 13:31:12 +0000560 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +0000561 Label* miss) {
562 // Make sure there's no overlap between scratch and the other
563 // registers.
564 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
565
566 // Keep track of the current object in register reg.
567 Register reg = object_reg;
Andrei Popescu402d9372010-02-26 13:31:12 +0000568 int depth = 0;
569
570 if (save_at_depth == depth) {
571 mov(Operand(esp, kPointerSize), object_reg);
572 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000573
574 // Check the maps in the prototype chain.
575 // Traverse the prototype chain from the object and do map checks.
576 while (object != holder) {
577 depth++;
578
579 // Only global objects and objects that do not require access
580 // checks are allowed in stubs.
581 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
582
583 JSObject* prototype = JSObject::cast(object->GetPrototype());
584 if (Heap::InNewSpace(prototype)) {
585 // Get the map of the current object.
586 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
587 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
588 // Branch on the result of the map check.
589 j(not_equal, miss, not_taken);
590 // Check access rights to the global object. This has to happen
591 // after the map check so that we know that the object is
592 // actually a global object.
593 if (object->IsJSGlobalProxy()) {
594 CheckAccessGlobalProxy(reg, scratch, miss);
595
596 // Restore scratch register to be the map of the object.
597 // We load the prototype from the map in the scratch register.
598 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
599 }
600 // The prototype is in new space; we cannot store a reference
601 // to it in the code. Load it from the map.
602 reg = holder_reg; // from now the object is in holder_reg
603 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000604 } else {
605 // Check the map of the current object.
606 cmp(FieldOperand(reg, HeapObject::kMapOffset),
607 Immediate(Handle<Map>(object->map())));
608 // Branch on the result of the map check.
609 j(not_equal, miss, not_taken);
610 // Check access rights to the global object. This has to happen
611 // after the map check so that we know that the object is
612 // actually a global object.
613 if (object->IsJSGlobalProxy()) {
614 CheckAccessGlobalProxy(reg, scratch, miss);
615 }
616 // The prototype is in old space; load it directly.
617 reg = holder_reg; // from now the object is in holder_reg
618 mov(reg, Handle<JSObject>(prototype));
619 }
620
Andrei Popescu402d9372010-02-26 13:31:12 +0000621 if (save_at_depth == depth) {
622 mov(Operand(esp, kPointerSize), reg);
623 }
624
Steve Blocka7e24c12009-10-30 11:49:00 +0000625 // Go to the next object in the prototype chain.
626 object = prototype;
627 }
628
629 // Check the holder map.
630 cmp(FieldOperand(reg, HeapObject::kMapOffset),
631 Immediate(Handle<Map>(holder->map())));
632 j(not_equal, miss, not_taken);
633
634 // Log the check depth.
Andrei Popescu402d9372010-02-26 13:31:12 +0000635 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000636
637 // Perform security check for access to the global object and return
638 // the holder register.
639 ASSERT(object == holder);
640 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
641 if (object->IsJSGlobalProxy()) {
642 CheckAccessGlobalProxy(reg, scratch, miss);
643 }
644 return reg;
645}
646
647
648void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
649 Register scratch,
650 Label* miss) {
651 Label same_contexts;
652
653 ASSERT(!holder_reg.is(scratch));
654
655 // Load current lexical context from the stack frame.
656 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
657
658 // When generating debug code, make sure the lexical context is set.
659 if (FLAG_debug_code) {
660 cmp(Operand(scratch), Immediate(0));
661 Check(not_equal, "we should not have an empty lexical context");
662 }
663 // Load the global context of the current context.
664 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
665 mov(scratch, FieldOperand(scratch, offset));
666 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
667
668 // Check the context is a global context.
669 if (FLAG_debug_code) {
670 push(scratch);
671 // Read the first word and compare to global_context_map.
672 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
673 cmp(scratch, Factory::global_context_map());
674 Check(equal, "JSGlobalObject::global_context should be a global context.");
675 pop(scratch);
676 }
677
678 // Check if both contexts are the same.
679 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
680 j(equal, &same_contexts, taken);
681
682 // Compare security tokens, save holder_reg on the stack so we can use it
683 // as a temporary register.
684 //
685 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
686 push(holder_reg);
687 // Check that the security token in the calling global object is
688 // compatible with the security token in the receiving global
689 // object.
690 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
691
692 // Check the context is a global context.
693 if (FLAG_debug_code) {
694 cmp(holder_reg, Factory::null_value());
695 Check(not_equal, "JSGlobalProxy::context() should not be null.");
696
697 push(holder_reg);
698 // Read the first word and compare to global_context_map(),
699 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
700 cmp(holder_reg, Factory::global_context_map());
701 Check(equal, "JSGlobalObject::global_context should be a global context.");
702 pop(holder_reg);
703 }
704
705 int token_offset = Context::kHeaderSize +
706 Context::SECURITY_TOKEN_INDEX * kPointerSize;
707 mov(scratch, FieldOperand(scratch, token_offset));
708 cmp(scratch, FieldOperand(holder_reg, token_offset));
709 pop(holder_reg);
710 j(not_equal, miss, not_taken);
711
712 bind(&same_contexts);
713}
714
715
716void MacroAssembler::LoadAllocationTopHelper(Register result,
717 Register result_end,
718 Register scratch,
719 AllocationFlags flags) {
720 ExternalReference new_space_allocation_top =
721 ExternalReference::new_space_allocation_top_address();
722
723 // Just return if allocation top is already known.
724 if ((flags & RESULT_CONTAINS_TOP) != 0) {
725 // No use of scratch if allocation top is provided.
726 ASSERT(scratch.is(no_reg));
727#ifdef DEBUG
728 // Assert that result actually contains top on entry.
729 cmp(result, Operand::StaticVariable(new_space_allocation_top));
730 Check(equal, "Unexpected allocation top");
731#endif
732 return;
733 }
734
735 // Move address of new object to result. Use scratch register if available.
736 if (scratch.is(no_reg)) {
737 mov(result, Operand::StaticVariable(new_space_allocation_top));
738 } else {
739 ASSERT(!scratch.is(result_end));
740 mov(Operand(scratch), Immediate(new_space_allocation_top));
741 mov(result, Operand(scratch, 0));
742 }
743}
744
745
746void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
747 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000748 if (FLAG_debug_code) {
749 test(result_end, Immediate(kObjectAlignmentMask));
750 Check(zero, "Unaligned allocation in new space");
751 }
752
Steve Blocka7e24c12009-10-30 11:49:00 +0000753 ExternalReference new_space_allocation_top =
754 ExternalReference::new_space_allocation_top_address();
755
756 // Update new top. Use scratch if available.
757 if (scratch.is(no_reg)) {
758 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
759 } else {
760 mov(Operand(scratch, 0), result_end);
761 }
762}
763
764
765void MacroAssembler::AllocateInNewSpace(int object_size,
766 Register result,
767 Register result_end,
768 Register scratch,
769 Label* gc_required,
770 AllocationFlags flags) {
771 ASSERT(!result.is(result_end));
772
773 // Load address of new object into result.
774 LoadAllocationTopHelper(result, result_end, scratch, flags);
775
776 // Calculate new top and bail out if new space is exhausted.
777 ExternalReference new_space_allocation_limit =
778 ExternalReference::new_space_allocation_limit_address();
779 lea(result_end, Operand(result, object_size));
780 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
781 j(above, gc_required, not_taken);
782
Steve Blocka7e24c12009-10-30 11:49:00 +0000783 // Tag result if requested.
784 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000785 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000786 }
Leon Clarkee46be812010-01-19 14:06:41 +0000787
788 // Update allocation top.
789 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000790}
791
792
793void MacroAssembler::AllocateInNewSpace(int header_size,
794 ScaleFactor element_size,
795 Register element_count,
796 Register result,
797 Register result_end,
798 Register scratch,
799 Label* gc_required,
800 AllocationFlags flags) {
801 ASSERT(!result.is(result_end));
802
803 // Load address of new object into result.
804 LoadAllocationTopHelper(result, result_end, scratch, flags);
805
806 // Calculate new top and bail out if new space is exhausted.
807 ExternalReference new_space_allocation_limit =
808 ExternalReference::new_space_allocation_limit_address();
809 lea(result_end, Operand(result, element_count, element_size, header_size));
810 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
811 j(above, gc_required);
812
Steve Blocka7e24c12009-10-30 11:49:00 +0000813 // Tag result if requested.
814 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000815 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000816 }
Leon Clarkee46be812010-01-19 14:06:41 +0000817
818 // Update allocation top.
819 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000820}
821
822
823void MacroAssembler::AllocateInNewSpace(Register object_size,
824 Register result,
825 Register result_end,
826 Register scratch,
827 Label* gc_required,
828 AllocationFlags flags) {
829 ASSERT(!result.is(result_end));
830
831 // Load address of new object into result.
832 LoadAllocationTopHelper(result, result_end, scratch, flags);
833
834 // Calculate new top and bail out if new space is exhausted.
835 ExternalReference new_space_allocation_limit =
836 ExternalReference::new_space_allocation_limit_address();
837 if (!object_size.is(result_end)) {
838 mov(result_end, object_size);
839 }
840 add(result_end, Operand(result));
841 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
842 j(above, gc_required, not_taken);
843
Steve Blocka7e24c12009-10-30 11:49:00 +0000844 // Tag result if requested.
845 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000846 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000847 }
Leon Clarkee46be812010-01-19 14:06:41 +0000848
849 // Update allocation top.
850 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000851}
852
853
854void MacroAssembler::UndoAllocationInNewSpace(Register object) {
855 ExternalReference new_space_allocation_top =
856 ExternalReference::new_space_allocation_top_address();
857
858 // Make sure the object has no tag before resetting top.
859 and_(Operand(object), Immediate(~kHeapObjectTagMask));
860#ifdef DEBUG
861 cmp(object, Operand::StaticVariable(new_space_allocation_top));
862 Check(below, "Undo allocation of non allocated memory");
863#endif
864 mov(Operand::StaticVariable(new_space_allocation_top), object);
865}
866
867
Steve Block3ce2e202009-11-05 08:53:23 +0000868void MacroAssembler::AllocateHeapNumber(Register result,
869 Register scratch1,
870 Register scratch2,
871 Label* gc_required) {
872 // Allocate heap number in new space.
873 AllocateInNewSpace(HeapNumber::kSize,
874 result,
875 scratch1,
876 scratch2,
877 gc_required,
878 TAG_OBJECT);
879
880 // Set the map.
881 mov(FieldOperand(result, HeapObject::kMapOffset),
882 Immediate(Factory::heap_number_map()));
883}
884
885
Steve Blockd0582a62009-12-15 09:54:21 +0000886void MacroAssembler::AllocateTwoByteString(Register result,
887 Register length,
888 Register scratch1,
889 Register scratch2,
890 Register scratch3,
891 Label* gc_required) {
892 // Calculate the number of bytes needed for the characters in the string while
893 // observing object alignment.
894 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000895 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000896 // scratch1 = length * 2 + kObjectAlignmentMask.
897 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000898 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
899
900 // Allocate two byte string in new space.
901 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
902 times_1,
903 scratch1,
904 result,
905 scratch2,
906 scratch3,
907 gc_required,
908 TAG_OBJECT);
909
910 // Set the map, length and hash field.
911 mov(FieldOperand(result, HeapObject::kMapOffset),
912 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100913 mov(scratch1, length);
914 SmiTag(scratch1);
915 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000916 mov(FieldOperand(result, String::kHashFieldOffset),
917 Immediate(String::kEmptyHashField));
918}
919
920
921void MacroAssembler::AllocateAsciiString(Register result,
922 Register length,
923 Register scratch1,
924 Register scratch2,
925 Register scratch3,
926 Label* gc_required) {
927 // Calculate the number of bytes needed for the characters in the string while
928 // observing object alignment.
929 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
930 mov(scratch1, length);
931 ASSERT(kCharSize == 1);
932 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
933 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
934
935 // Allocate ascii string in new space.
936 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
937 times_1,
938 scratch1,
939 result,
940 scratch2,
941 scratch3,
942 gc_required,
943 TAG_OBJECT);
944
945 // Set the map, length and hash field.
946 mov(FieldOperand(result, HeapObject::kMapOffset),
947 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100948 mov(scratch1, length);
949 SmiTag(scratch1);
950 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000951 mov(FieldOperand(result, String::kHashFieldOffset),
952 Immediate(String::kEmptyHashField));
953}
954
955
956void MacroAssembler::AllocateConsString(Register result,
957 Register scratch1,
958 Register scratch2,
959 Label* gc_required) {
960 // Allocate heap number in new space.
961 AllocateInNewSpace(ConsString::kSize,
962 result,
963 scratch1,
964 scratch2,
965 gc_required,
966 TAG_OBJECT);
967
968 // Set the map. The other fields are left uninitialized.
969 mov(FieldOperand(result, HeapObject::kMapOffset),
970 Immediate(Factory::cons_string_map()));
971}
972
973
974void MacroAssembler::AllocateAsciiConsString(Register result,
975 Register scratch1,
976 Register scratch2,
977 Label* gc_required) {
978 // Allocate heap number in new space.
979 AllocateInNewSpace(ConsString::kSize,
980 result,
981 scratch1,
982 scratch2,
983 gc_required,
984 TAG_OBJECT);
985
986 // Set the map. The other fields are left uninitialized.
987 mov(FieldOperand(result, HeapObject::kMapOffset),
988 Immediate(Factory::cons_ascii_string_map()));
989}
990
991
Steve Blocka7e24c12009-10-30 11:49:00 +0000992void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
993 Register result,
994 Register op,
995 JumpTarget* then_target) {
996 JumpTarget ok;
997 test(result, Operand(result));
998 ok.Branch(not_zero, taken);
999 test(op, Operand(op));
1000 then_target->Branch(sign, not_taken);
1001 ok.Bind();
1002}
1003
1004
1005void MacroAssembler::NegativeZeroTest(Register result,
1006 Register op,
1007 Label* then_label) {
1008 Label ok;
1009 test(result, Operand(result));
1010 j(not_zero, &ok, taken);
1011 test(op, Operand(op));
1012 j(sign, then_label, not_taken);
1013 bind(&ok);
1014}
1015
1016
1017void MacroAssembler::NegativeZeroTest(Register result,
1018 Register op1,
1019 Register op2,
1020 Register scratch,
1021 Label* then_label) {
1022 Label ok;
1023 test(result, Operand(result));
1024 j(not_zero, &ok, taken);
1025 mov(scratch, Operand(op1));
1026 or_(scratch, Operand(op2));
1027 j(sign, then_label, not_taken);
1028 bind(&ok);
1029}
1030
1031
1032void MacroAssembler::TryGetFunctionPrototype(Register function,
1033 Register result,
1034 Register scratch,
1035 Label* miss) {
1036 // Check that the receiver isn't a smi.
1037 test(function, Immediate(kSmiTagMask));
1038 j(zero, miss, not_taken);
1039
1040 // Check that the function really is a function.
1041 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1042 j(not_equal, miss, not_taken);
1043
1044 // Make sure that the function has an instance prototype.
1045 Label non_instance;
1046 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1047 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1048 j(not_zero, &non_instance, not_taken);
1049
1050 // Get the prototype or initial map from the function.
1051 mov(result,
1052 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1053
1054 // If the prototype or initial map is the hole, don't return it and
1055 // simply miss the cache instead. This will allow us to allocate a
1056 // prototype object on-demand in the runtime system.
1057 cmp(Operand(result), Immediate(Factory::the_hole_value()));
1058 j(equal, miss, not_taken);
1059
1060 // If the function does not have an initial map, we're done.
1061 Label done;
1062 CmpObjectType(result, MAP_TYPE, scratch);
1063 j(not_equal, &done);
1064
1065 // Get the prototype from the initial map.
1066 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1067 jmp(&done);
1068
1069 // Non-instance prototype: Fetch prototype from constructor field
1070 // in initial map.
1071 bind(&non_instance);
1072 mov(result, FieldOperand(result, Map::kConstructorOffset));
1073
1074 // All done.
1075 bind(&done);
1076}
1077
1078
1079void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001080 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +00001081 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1082}
1083
1084
Leon Clarkee46be812010-01-19 14:06:41 +00001085Object* MacroAssembler::TryCallStub(CodeStub* stub) {
1086 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1087 Object* result = stub->TryGetCode();
1088 if (!result->IsFailure()) {
1089 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1090 }
1091 return result;
1092}
1093
1094
Steve Blockd0582a62009-12-15 09:54:21 +00001095void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001096 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001097 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1098}
1099
1100
Leon Clarkee46be812010-01-19 14:06:41 +00001101Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1102 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1103 Object* result = stub->TryGetCode();
1104 if (!result->IsFailure()) {
1105 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1106 }
1107 return result;
1108}
1109
1110
Steve Blocka7e24c12009-10-30 11:49:00 +00001111void MacroAssembler::StubReturn(int argc) {
1112 ASSERT(argc >= 1 && generating_stub());
1113 ret((argc - 1) * kPointerSize);
1114}
1115
1116
1117void MacroAssembler::IllegalOperation(int num_arguments) {
1118 if (num_arguments > 0) {
1119 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1120 }
1121 mov(eax, Immediate(Factory::undefined_value()));
1122}
1123
1124
1125void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1126 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1127}
1128
1129
Leon Clarkee46be812010-01-19 14:06:41 +00001130Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1131 int num_arguments) {
1132 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1133}
1134
1135
Steve Blocka7e24c12009-10-30 11:49:00 +00001136void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1137 // If the expected number of arguments of the runtime function is
1138 // constant, we check that the actual number of arguments match the
1139 // expectation.
1140 if (f->nargs >= 0 && f->nargs != num_arguments) {
1141 IllegalOperation(num_arguments);
1142 return;
1143 }
1144
Leon Clarke4515c472010-02-03 11:58:03 +00001145 // TODO(1236192): Most runtime routines don't need the number of
1146 // arguments passed in because it is constant. At some point we
1147 // should remove this need and make the runtime routine entry code
1148 // smarter.
1149 Set(eax, Immediate(num_arguments));
1150 mov(ebx, Immediate(ExternalReference(f)));
1151 CEntryStub ces(1);
1152 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001153}
1154
1155
Andrei Popescu402d9372010-02-26 13:31:12 +00001156void MacroAssembler::CallExternalReference(ExternalReference ref,
1157 int num_arguments) {
1158 mov(eax, Immediate(num_arguments));
1159 mov(ebx, Immediate(ref));
1160
1161 CEntryStub stub(1);
1162 CallStub(&stub);
1163}
1164
1165
Leon Clarkee46be812010-01-19 14:06:41 +00001166Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1167 int num_arguments) {
1168 if (f->nargs >= 0 && f->nargs != num_arguments) {
1169 IllegalOperation(num_arguments);
1170 // Since we did not call the stub, there was no allocation failure.
1171 // Return some non-failure object.
1172 return Heap::undefined_value();
1173 }
1174
Leon Clarke4515c472010-02-03 11:58:03 +00001175 // TODO(1236192): Most runtime routines don't need the number of
1176 // arguments passed in because it is constant. At some point we
1177 // should remove this need and make the runtime routine entry code
1178 // smarter.
1179 Set(eax, Immediate(num_arguments));
1180 mov(ebx, Immediate(ExternalReference(f)));
1181 CEntryStub ces(1);
1182 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001183}
1184
1185
Steve Block6ded16b2010-05-10 14:33:55 +01001186void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1187 int num_arguments,
1188 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001189 // TODO(1236192): Most runtime routines don't need the number of
1190 // arguments passed in because it is constant. At some point we
1191 // should remove this need and make the runtime routine entry code
1192 // smarter.
1193 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001194 JumpToExternalReference(ext);
1195}
1196
1197
1198void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1199 int num_arguments,
1200 int result_size) {
1201 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001202}
1203
1204
Steve Blockd0582a62009-12-15 09:54:21 +00001205void MacroAssembler::PushHandleScope(Register scratch) {
1206 // Push the number of extensions, smi-tagged so the gc will ignore it.
1207 ExternalReference extensions_address =
1208 ExternalReference::handle_scope_extensions_address();
1209 mov(scratch, Operand::StaticVariable(extensions_address));
1210 ASSERT_EQ(0, kSmiTag);
1211 shl(scratch, kSmiTagSize);
1212 push(scratch);
1213 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1214 // Push next and limit pointers which will be wordsize aligned and
1215 // hence automatically smi tagged.
1216 ExternalReference next_address =
1217 ExternalReference::handle_scope_next_address();
1218 push(Operand::StaticVariable(next_address));
1219 ExternalReference limit_address =
1220 ExternalReference::handle_scope_limit_address();
1221 push(Operand::StaticVariable(limit_address));
1222}
1223
1224
Leon Clarkee46be812010-01-19 14:06:41 +00001225Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1226 Register scratch,
1227 bool gc_allowed) {
1228 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001229 ExternalReference extensions_address =
1230 ExternalReference::handle_scope_extensions_address();
1231 Label write_back;
1232 mov(scratch, Operand::StaticVariable(extensions_address));
1233 cmp(Operand(scratch), Immediate(0));
1234 j(equal, &write_back);
1235 // Calling a runtime function messes with registers so we save and
1236 // restore any one we're asked not to change
1237 if (saved.is_valid()) push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001238 if (gc_allowed) {
1239 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1240 } else {
1241 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1242 if (result->IsFailure()) return result;
1243 }
Steve Blockd0582a62009-12-15 09:54:21 +00001244 if (saved.is_valid()) pop(saved);
1245
1246 bind(&write_back);
1247 ExternalReference limit_address =
1248 ExternalReference::handle_scope_limit_address();
1249 pop(Operand::StaticVariable(limit_address));
1250 ExternalReference next_address =
1251 ExternalReference::handle_scope_next_address();
1252 pop(Operand::StaticVariable(next_address));
1253 pop(scratch);
1254 shr(scratch, kSmiTagSize);
1255 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001256
1257 return result;
1258}
1259
1260
1261void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1262 PopHandleScopeHelper(saved, scratch, true);
1263}
1264
1265
1266Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1267 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001268}
1269
1270
Steve Block6ded16b2010-05-10 14:33:55 +01001271void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001272 // Set the entry point and jump to the C entry runtime stub.
1273 mov(ebx, Immediate(ext));
1274 CEntryStub ces(1);
1275 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1276}
1277
1278
1279void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1280 const ParameterCount& actual,
1281 Handle<Code> code_constant,
1282 const Operand& code_operand,
1283 Label* done,
1284 InvokeFlag flag) {
1285 bool definitely_matches = false;
1286 Label invoke;
1287 if (expected.is_immediate()) {
1288 ASSERT(actual.is_immediate());
1289 if (expected.immediate() == actual.immediate()) {
1290 definitely_matches = true;
1291 } else {
1292 mov(eax, actual.immediate());
1293 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1294 if (expected.immediate() == sentinel) {
1295 // Don't worry about adapting arguments for builtins that
1296 // don't want that done. Skip adaption code by making it look
1297 // like we have a match between expected and actual number of
1298 // arguments.
1299 definitely_matches = true;
1300 } else {
1301 mov(ebx, expected.immediate());
1302 }
1303 }
1304 } else {
1305 if (actual.is_immediate()) {
1306 // Expected is in register, actual is immediate. This is the
1307 // case when we invoke function values without going through the
1308 // IC mechanism.
1309 cmp(expected.reg(), actual.immediate());
1310 j(equal, &invoke);
1311 ASSERT(expected.reg().is(ebx));
1312 mov(eax, actual.immediate());
1313 } else if (!expected.reg().is(actual.reg())) {
1314 // Both expected and actual are in (different) registers. This
1315 // is the case when we invoke functions using call and apply.
1316 cmp(expected.reg(), Operand(actual.reg()));
1317 j(equal, &invoke);
1318 ASSERT(actual.reg().is(eax));
1319 ASSERT(expected.reg().is(ebx));
1320 }
1321 }
1322
1323 if (!definitely_matches) {
1324 Handle<Code> adaptor =
1325 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1326 if (!code_constant.is_null()) {
1327 mov(edx, Immediate(code_constant));
1328 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1329 } else if (!code_operand.is_reg(edx)) {
1330 mov(edx, code_operand);
1331 }
1332
1333 if (flag == CALL_FUNCTION) {
1334 call(adaptor, RelocInfo::CODE_TARGET);
1335 jmp(done);
1336 } else {
1337 jmp(adaptor, RelocInfo::CODE_TARGET);
1338 }
1339 bind(&invoke);
1340 }
1341}
1342
1343
1344void MacroAssembler::InvokeCode(const Operand& code,
1345 const ParameterCount& expected,
1346 const ParameterCount& actual,
1347 InvokeFlag flag) {
1348 Label done;
1349 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1350 if (flag == CALL_FUNCTION) {
1351 call(code);
1352 } else {
1353 ASSERT(flag == JUMP_FUNCTION);
1354 jmp(code);
1355 }
1356 bind(&done);
1357}
1358
1359
1360void MacroAssembler::InvokeCode(Handle<Code> code,
1361 const ParameterCount& expected,
1362 const ParameterCount& actual,
1363 RelocInfo::Mode rmode,
1364 InvokeFlag flag) {
1365 Label done;
1366 Operand dummy(eax);
1367 InvokePrologue(expected, actual, code, dummy, &done, flag);
1368 if (flag == CALL_FUNCTION) {
1369 call(code, rmode);
1370 } else {
1371 ASSERT(flag == JUMP_FUNCTION);
1372 jmp(code, rmode);
1373 }
1374 bind(&done);
1375}
1376
1377
1378void MacroAssembler::InvokeFunction(Register fun,
1379 const ParameterCount& actual,
1380 InvokeFlag flag) {
1381 ASSERT(fun.is(edi));
1382 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1383 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1384 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1385 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1386 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1387
1388 ParameterCount expected(ebx);
1389 InvokeCode(Operand(edx), expected, actual, flag);
1390}
1391
1392
Andrei Popescu402d9372010-02-26 13:31:12 +00001393void MacroAssembler::InvokeFunction(JSFunction* function,
1394 const ParameterCount& actual,
1395 InvokeFlag flag) {
1396 ASSERT(function->is_compiled());
1397 // Get the function and setup the context.
1398 mov(edi, Immediate(Handle<JSFunction>(function)));
1399 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001400
Andrei Popescu402d9372010-02-26 13:31:12 +00001401 // Invoke the cached code.
1402 Handle<Code> code(function->code());
1403 ParameterCount expected(function->shared()->formal_parameter_count());
1404 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1405}
1406
1407
1408void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001409 // Calls are not allowed in some stubs.
1410 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1411
1412 // Rely on the assertion to check that the number of provided
1413 // arguments match the expected number of arguments. Fake a
1414 // parameter count to avoid emitting code to do the check.
1415 ParameterCount expected(0);
Andrei Popescu402d9372010-02-26 13:31:12 +00001416 GetBuiltinEntry(edx, id);
1417 InvokeCode(Operand(edx), expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001418}
1419
1420
1421void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001422 ASSERT(!target.is(edi));
1423
1424 // Load the builtins object into target register.
1425 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1426 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1427
Andrei Popescu402d9372010-02-26 13:31:12 +00001428 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001429 mov(edi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1430
1431 // Load the code entry point from the builtins object.
1432 mov(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
1433 if (FLAG_debug_code) {
1434 // Make sure the code objects in the builtins object and in the
1435 // builtin function are the same.
1436 push(target);
1437 mov(target, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1438 mov(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
1439 cmp(target, Operand(esp, 0));
1440 Assert(equal, "Builtin code object changed");
1441 pop(target);
1442 }
1443 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001444}
1445
1446
Steve Blockd0582a62009-12-15 09:54:21 +00001447void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1448 if (context_chain_length > 0) {
1449 // Move up the chain of contexts to the context containing the slot.
1450 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1451 // Load the function context (which is the incoming, outer context).
1452 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1453 for (int i = 1; i < context_chain_length; i++) {
1454 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1455 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1456 }
1457 // The context may be an intermediate context, not a function context.
1458 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1459 } else { // Slot is in the current function context.
1460 // The context may be an intermediate context, not a function context.
1461 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1462 }
1463}
1464
1465
1466
Steve Blocka7e24c12009-10-30 11:49:00 +00001467void MacroAssembler::Ret() {
1468 ret(0);
1469}
1470
1471
Leon Clarkee46be812010-01-19 14:06:41 +00001472void MacroAssembler::Drop(int stack_elements) {
1473 if (stack_elements > 0) {
1474 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1475 }
1476}
1477
1478
1479void MacroAssembler::Move(Register dst, Handle<Object> value) {
1480 mov(dst, value);
1481}
1482
1483
Steve Blocka7e24c12009-10-30 11:49:00 +00001484void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1485 if (FLAG_native_code_counters && counter->Enabled()) {
1486 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1487 }
1488}
1489
1490
1491void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1492 ASSERT(value > 0);
1493 if (FLAG_native_code_counters && counter->Enabled()) {
1494 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1495 if (value == 1) {
1496 inc(operand);
1497 } else {
1498 add(operand, Immediate(value));
1499 }
1500 }
1501}
1502
1503
1504void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1505 ASSERT(value > 0);
1506 if (FLAG_native_code_counters && counter->Enabled()) {
1507 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1508 if (value == 1) {
1509 dec(operand);
1510 } else {
1511 sub(operand, Immediate(value));
1512 }
1513 }
1514}
1515
1516
Leon Clarked91b9f72010-01-27 17:25:45 +00001517void MacroAssembler::IncrementCounter(Condition cc,
1518 StatsCounter* counter,
1519 int value) {
1520 ASSERT(value > 0);
1521 if (FLAG_native_code_counters && counter->Enabled()) {
1522 Label skip;
1523 j(NegateCondition(cc), &skip);
1524 pushfd();
1525 IncrementCounter(counter, value);
1526 popfd();
1527 bind(&skip);
1528 }
1529}
1530
1531
1532void MacroAssembler::DecrementCounter(Condition cc,
1533 StatsCounter* counter,
1534 int value) {
1535 ASSERT(value > 0);
1536 if (FLAG_native_code_counters && counter->Enabled()) {
1537 Label skip;
1538 j(NegateCondition(cc), &skip);
1539 pushfd();
1540 DecrementCounter(counter, value);
1541 popfd();
1542 bind(&skip);
1543 }
1544}
1545
1546
Steve Blocka7e24c12009-10-30 11:49:00 +00001547void MacroAssembler::Assert(Condition cc, const char* msg) {
1548 if (FLAG_debug_code) Check(cc, msg);
1549}
1550
1551
1552void MacroAssembler::Check(Condition cc, const char* msg) {
1553 Label L;
1554 j(cc, &L, taken);
1555 Abort(msg);
1556 // will not return here
1557 bind(&L);
1558}
1559
1560
Steve Block6ded16b2010-05-10 14:33:55 +01001561void MacroAssembler::CheckStackAlignment() {
1562 int frame_alignment = OS::ActivationFrameAlignment();
1563 int frame_alignment_mask = frame_alignment - 1;
1564 if (frame_alignment > kPointerSize) {
1565 ASSERT(IsPowerOf2(frame_alignment));
1566 Label alignment_as_expected;
1567 test(esp, Immediate(frame_alignment_mask));
1568 j(zero, &alignment_as_expected);
1569 // Abort if stack is not aligned.
1570 int3();
1571 bind(&alignment_as_expected);
1572 }
1573}
1574
1575
Steve Blocka7e24c12009-10-30 11:49:00 +00001576void MacroAssembler::Abort(const char* msg) {
1577 // We want to pass the msg string like a smi to avoid GC
1578 // problems, however msg is not guaranteed to be aligned
1579 // properly. Instead, we pass an aligned pointer that is
1580 // a proper v8 smi, but also pass the alignment difference
1581 // from the real pointer as a smi.
1582 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1583 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1584 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1585#ifdef DEBUG
1586 if (msg != NULL) {
1587 RecordComment("Abort message: ");
1588 RecordComment(msg);
1589 }
1590#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001591 // Disable stub call restrictions to always allow calls to abort.
1592 set_allow_stub_calls(true);
1593
Steve Blocka7e24c12009-10-30 11:49:00 +00001594 push(eax);
1595 push(Immediate(p0));
1596 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1597 CallRuntime(Runtime::kAbort, 2);
1598 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001599 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001600}
1601
1602
Andrei Popescu402d9372010-02-26 13:31:12 +00001603void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1604 Register instance_type,
1605 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001606 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001607 if (!scratch.is(instance_type)) {
1608 mov(scratch, instance_type);
1609 }
1610 and_(scratch,
1611 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1612 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1613 j(not_equal, failure);
1614}
1615
1616
Leon Clarked91b9f72010-01-27 17:25:45 +00001617void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1618 Register object2,
1619 Register scratch1,
1620 Register scratch2,
1621 Label* failure) {
1622 // Check that both objects are not smis.
1623 ASSERT_EQ(0, kSmiTag);
1624 mov(scratch1, Operand(object1));
1625 and_(scratch1, Operand(object2));
1626 test(scratch1, Immediate(kSmiTagMask));
1627 j(zero, failure);
1628
1629 // Load instance type for both strings.
1630 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1631 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1632 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1633 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1634
1635 // Check that both are flat ascii strings.
1636 const int kFlatAsciiStringMask =
1637 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1638 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1639 // Interleave bits from both instance types and compare them in one check.
1640 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1641 and_(scratch1, kFlatAsciiStringMask);
1642 and_(scratch2, kFlatAsciiStringMask);
1643 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1644 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1645 j(not_equal, failure);
1646}
1647
1648
Steve Block6ded16b2010-05-10 14:33:55 +01001649void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1650 int frameAlignment = OS::ActivationFrameAlignment();
1651 if (frameAlignment != 0) {
1652 // Make stack end at alignment and make room for num_arguments words
1653 // and the original value of esp.
1654 mov(scratch, esp);
1655 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1656 ASSERT(IsPowerOf2(frameAlignment));
1657 and_(esp, -frameAlignment);
1658 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1659 } else {
1660 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1661 }
1662}
1663
1664
1665void MacroAssembler::CallCFunction(ExternalReference function,
1666 int num_arguments) {
1667 // Trashing eax is ok as it will be the return value.
1668 mov(Operand(eax), Immediate(function));
1669 CallCFunction(eax, num_arguments);
1670}
1671
1672
1673void MacroAssembler::CallCFunction(Register function,
1674 int num_arguments) {
1675 // Check stack alignment.
1676 if (FLAG_debug_code) {
1677 CheckStackAlignment();
1678 }
1679
1680 call(Operand(function));
1681 if (OS::ActivationFrameAlignment() != 0) {
1682 mov(esp, Operand(esp, num_arguments * kPointerSize));
1683 } else {
1684 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1685 }
1686}
1687
1688
Steve Blocka7e24c12009-10-30 11:49:00 +00001689CodePatcher::CodePatcher(byte* address, int size)
1690 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1691 // Create a new macro assembler pointing to the address of the code to patch.
1692 // The size is adjusted with kGap on order for the assembler to generate size
1693 // bytes of instructions without failing with buffer size constraints.
1694 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1695}
1696
1697
1698CodePatcher::~CodePatcher() {
1699 // Indicate that code has changed.
1700 CPU::FlushICache(address_, size_);
1701
1702 // Check that the code was patched as expected.
1703 ASSERT(masm_.pc_ == address_ + size_);
1704 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1705}
1706
1707
1708} } // namespace v8::internal