blob: 658caf1ef8748b90df7624fb3011c41949b45930 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100101void MacroAssembler::RecordWrite(Register object,
102 int offset,
103 Register value,
104 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000105 // The compiled code assumes that record write doesn't change the
106 // context register, so we check that none of the clobbered
107 // registers are esi.
108 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
109
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100110 // First, check if a write barrier is even needed. The tests below
111 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 Label done;
113
114 // Skip barrier if writing a smi.
115 ASSERT_EQ(0, kSmiTag);
116 test(value, Immediate(kSmiTagMask));
117 j(zero, &done);
118
Steve Block6ded16b2010-05-10 14:33:55 +0100119 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000120
Steve Block6ded16b2010-05-10 14:33:55 +0100121 // The offset is relative to a tagged or untagged HeapObject pointer,
122 // so either offset or offset + kHeapObjectTag must be a
123 // multiple of kPointerSize.
124 ASSERT(IsAligned(offset, kPointerSize) ||
125 IsAligned(offset + kHeapObjectTag, kPointerSize));
126
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100127 Register dst = scratch;
128 if (offset != 0) {
129 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100131 // Array access: calculate the destination address in the same manner as
132 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
133 // into an array of words.
134 ASSERT_EQ(1, kSmiTagSize);
135 ASSERT_EQ(0, kSmiTag);
136 lea(dst, Operand(object, dst, times_half_pointer_size,
137 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000140
141 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000142
143 // Clobber all input registers when running with the debug-code flag
144 // turned on to provoke errors.
145 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100146 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
147 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
148 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000149 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block8defd9f2010-07-08 12:39:36 +0100153void MacroAssembler::RecordWrite(Register object,
154 Register address,
155 Register value) {
156 // The compiled code assumes that record write doesn't change the
157 // context register, so we check that none of the clobbered
158 // registers are esi.
159 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
160
161 // First, check if a write barrier is even needed. The tests below
162 // catch stores of Smis and stores into young gen.
163 Label done;
164
165 // Skip barrier if writing a smi.
166 ASSERT_EQ(0, kSmiTag);
167 test(value, Immediate(kSmiTagMask));
168 j(zero, &done);
169
170 InNewSpace(object, value, equal, &done);
171
172 RecordWriteHelper(object, address, value);
173
174 bind(&done);
175
176 // Clobber all input registers when running with the debug-code flag
177 // turned on to provoke errors.
178 if (FLAG_debug_code) {
179 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
180 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
181 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
182 }
183}
184
185
Steve Blockd0582a62009-12-15 09:54:21 +0000186void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
187 cmp(esp,
188 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
189 j(below, on_stack_overflow);
190}
191
192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193#ifdef ENABLE_DEBUGGER_SUPPORT
194void MacroAssembler::SaveRegistersToMemory(RegList regs) {
195 ASSERT((regs & ~kJSCallerSaved) == 0);
196 // Copy the content of registers to memory location.
197 for (int i = 0; i < kNumJSCallerSaved; i++) {
198 int r = JSCallerSavedCode(i);
199 if ((regs & (1 << r)) != 0) {
200 Register reg = { r };
201 ExternalReference reg_addr =
202 ExternalReference(Debug_Address::Register(i));
203 mov(Operand::StaticVariable(reg_addr), reg);
204 }
205 }
206}
207
208
209void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
210 ASSERT((regs & ~kJSCallerSaved) == 0);
211 // Copy the content of memory location to registers.
212 for (int i = kNumJSCallerSaved; --i >= 0;) {
213 int r = JSCallerSavedCode(i);
214 if ((regs & (1 << r)) != 0) {
215 Register reg = { r };
216 ExternalReference reg_addr =
217 ExternalReference(Debug_Address::Register(i));
218 mov(reg, Operand::StaticVariable(reg_addr));
219 }
220 }
221}
222
223
224void MacroAssembler::PushRegistersFromMemory(RegList regs) {
225 ASSERT((regs & ~kJSCallerSaved) == 0);
226 // Push the content of the memory location to the stack.
227 for (int i = 0; i < kNumJSCallerSaved; i++) {
228 int r = JSCallerSavedCode(i);
229 if ((regs & (1 << r)) != 0) {
230 ExternalReference reg_addr =
231 ExternalReference(Debug_Address::Register(i));
232 push(Operand::StaticVariable(reg_addr));
233 }
234 }
235}
236
237
238void MacroAssembler::PopRegistersToMemory(RegList regs) {
239 ASSERT((regs & ~kJSCallerSaved) == 0);
240 // Pop the content from the stack to the memory location.
241 for (int i = kNumJSCallerSaved; --i >= 0;) {
242 int r = JSCallerSavedCode(i);
243 if ((regs & (1 << r)) != 0) {
244 ExternalReference reg_addr =
245 ExternalReference(Debug_Address::Register(i));
246 pop(Operand::StaticVariable(reg_addr));
247 }
248 }
249}
250
251
252void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
253 Register scratch,
254 RegList regs) {
255 ASSERT((regs & ~kJSCallerSaved) == 0);
256 // Copy the content of the stack to the memory location and adjust base.
257 for (int i = kNumJSCallerSaved; --i >= 0;) {
258 int r = JSCallerSavedCode(i);
259 if ((regs & (1 << r)) != 0) {
260 mov(scratch, Operand(base, 0));
261 ExternalReference reg_addr =
262 ExternalReference(Debug_Address::Register(i));
263 mov(Operand::StaticVariable(reg_addr), scratch);
264 lea(base, Operand(base, kPointerSize));
265 }
266 }
267}
Andrei Popescu402d9372010-02-26 13:31:12 +0000268
269void MacroAssembler::DebugBreak() {
270 Set(eax, Immediate(0));
271 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
272 CEntryStub ces(1);
273 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
274}
Steve Blocka7e24c12009-10-30 11:49:00 +0000275#endif
276
277void MacroAssembler::Set(Register dst, const Immediate& x) {
278 if (x.is_zero()) {
279 xor_(dst, Operand(dst)); // shorter than mov
280 } else {
281 mov(dst, x);
282 }
283}
284
285
286void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
287 mov(dst, x);
288}
289
290
291void MacroAssembler::CmpObjectType(Register heap_object,
292 InstanceType type,
293 Register map) {
294 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
295 CmpInstanceType(map, type);
296}
297
298
299void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
300 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
301 static_cast<int8_t>(type));
302}
303
304
Andrei Popescu31002712010-02-23 13:46:05 +0000305void MacroAssembler::CheckMap(Register obj,
306 Handle<Map> map,
307 Label* fail,
308 bool is_heap_object) {
309 if (!is_heap_object) {
310 test(obj, Immediate(kSmiTagMask));
311 j(zero, fail);
312 }
313 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
314 j(not_equal, fail);
315}
316
317
Leon Clarkee46be812010-01-19 14:06:41 +0000318Condition MacroAssembler::IsObjectStringType(Register heap_object,
319 Register map,
320 Register instance_type) {
321 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
322 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
323 ASSERT(kNotStringTag != 0);
324 test(instance_type, Immediate(kIsNotStringMask));
325 return zero;
326}
327
328
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100329void MacroAssembler::IsObjectJSObjectType(Register heap_object,
330 Register map,
331 Register scratch,
332 Label* fail) {
333 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
334 IsInstanceJSObjectType(map, scratch, fail);
335}
336
337
338void MacroAssembler::IsInstanceJSObjectType(Register map,
339 Register scratch,
340 Label* fail) {
341 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
342 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
343 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
344 j(above, fail);
345}
346
347
Steve Blocka7e24c12009-10-30 11:49:00 +0000348void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000349 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000350 fucomip();
351 ffree(0);
352 fincstp();
353 } else {
354 fucompp();
355 push(eax);
356 fnstsw_ax();
357 sahf();
358 pop(eax);
359 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000360}
361
362
Steve Block6ded16b2010-05-10 14:33:55 +0100363void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000364 Label ok;
365 test(object, Immediate(kSmiTagMask));
366 j(zero, &ok);
367 cmp(FieldOperand(object, HeapObject::kMapOffset),
368 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100369 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000370 bind(&ok);
371}
372
373
Steve Block6ded16b2010-05-10 14:33:55 +0100374void MacroAssembler::AbortIfNotSmi(Register object) {
375 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100376 Assert(equal, "Operand is not a smi");
377}
378
379
380void MacroAssembler::AbortIfSmi(Register object) {
381 test(object, Immediate(kSmiTagMask));
382 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100383}
384
385
Steve Blocka7e24c12009-10-30 11:49:00 +0000386void MacroAssembler::EnterFrame(StackFrame::Type type) {
387 push(ebp);
388 mov(ebp, Operand(esp));
389 push(esi);
390 push(Immediate(Smi::FromInt(type)));
391 push(Immediate(CodeObject()));
392 if (FLAG_debug_code) {
393 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
394 Check(not_equal, "code object not properly patched");
395 }
396}
397
398
399void MacroAssembler::LeaveFrame(StackFrame::Type type) {
400 if (FLAG_debug_code) {
401 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
402 Immediate(Smi::FromInt(type)));
403 Check(equal, "stack frame types must match");
404 }
405 leave();
406}
407
Steve Blockd0582a62009-12-15 09:54:21 +0000408void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000409 // Setup the frame structure on the stack.
410 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
411 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
412 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
413 push(ebp);
414 mov(ebp, Operand(esp));
415
416 // Reserve room for entry stack pointer and push the debug marker.
417 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000418 push(Immediate(0)); // Saved entry sp, patched before call.
419 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000420
421 // Save the frame pointer and the context in top.
422 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
423 ExternalReference context_address(Top::k_context_address);
424 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
425 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000426}
Steve Blocka7e24c12009-10-30 11:49:00 +0000427
Steve Blockd0582a62009-12-15 09:54:21 +0000428void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000429#ifdef ENABLE_DEBUGGER_SUPPORT
430 // Save the state of all registers to the stack from the memory
431 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000432 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000433 // TODO(1243899): This should be symmetric to
434 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
435 // correct here, but computed for the other call. Very error
436 // prone! FIX THIS. Actually there are deeper problems with
437 // register saving than this asymmetry (see the bug report
438 // associated with this issue).
439 PushRegistersFromMemory(kJSCallerSaved);
440 }
441#endif
442
Steve Blockd0582a62009-12-15 09:54:21 +0000443 // Reserve space for arguments.
444 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000445
446 // Get the required frame alignment for the OS.
447 static const int kFrameAlignment = OS::ActivationFrameAlignment();
448 if (kFrameAlignment > 0) {
449 ASSERT(IsPowerOf2(kFrameAlignment));
450 and_(esp, -kFrameAlignment);
451 }
452
453 // Patch the saved entry sp.
454 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
455}
456
457
Steve Blockd0582a62009-12-15 09:54:21 +0000458void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
459 EnterExitFramePrologue(mode);
460
461 // Setup argc and argv in callee-saved registers.
462 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
463 mov(edi, Operand(eax));
464 lea(esi, Operand(ebp, eax, times_4, offset));
465
466 EnterExitFrameEpilogue(mode, 2);
467}
468
469
470void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
471 int stack_space,
472 int argc) {
473 EnterExitFramePrologue(mode);
474
475 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
476 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
477
478 EnterExitFrameEpilogue(mode, argc);
479}
480
481
482void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000483#ifdef ENABLE_DEBUGGER_SUPPORT
484 // Restore the memory copy of the registers by digging them out from
485 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000486 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000487 // It's okay to clobber register ebx below because we don't need
488 // the function pointer after this.
489 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000490 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000491 lea(ebx, Operand(ebp, kOffset));
492 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
493 }
494#endif
495
496 // Get the return address from the stack and restore the frame pointer.
497 mov(ecx, Operand(ebp, 1 * kPointerSize));
498 mov(ebp, Operand(ebp, 0 * kPointerSize));
499
500 // Pop the arguments and the receiver from the caller stack.
501 lea(esp, Operand(esi, 1 * kPointerSize));
502
503 // Restore current context from top and clear it in debug mode.
504 ExternalReference context_address(Top::k_context_address);
505 mov(esi, Operand::StaticVariable(context_address));
506#ifdef DEBUG
507 mov(Operand::StaticVariable(context_address), Immediate(0));
508#endif
509
510 // Push the return address to get ready to return.
511 push(ecx);
512
513 // Clear the top frame.
514 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
515 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
516}
517
518
519void MacroAssembler::PushTryHandler(CodeLocation try_location,
520 HandlerType type) {
521 // Adjust this code if not the case.
522 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
523 // The pc (return address) is already on TOS.
524 if (try_location == IN_JAVASCRIPT) {
525 if (type == TRY_CATCH_HANDLER) {
526 push(Immediate(StackHandler::TRY_CATCH));
527 } else {
528 push(Immediate(StackHandler::TRY_FINALLY));
529 }
530 push(ebp);
531 } else {
532 ASSERT(try_location == IN_JS_ENTRY);
533 // The frame pointer does not point to a JS frame so we save NULL
534 // for ebp. We expect the code throwing an exception to check ebp
535 // before dereferencing it to restore the context.
536 push(Immediate(StackHandler::ENTRY));
537 push(Immediate(0)); // NULL frame pointer.
538 }
539 // Save the current handler as the next handler.
540 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
541 // Link this handler as the new current one.
542 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
543}
544
545
Leon Clarkee46be812010-01-19 14:06:41 +0000546void MacroAssembler::PopTryHandler() {
547 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
548 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
549 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
550}
551
552
Steve Blocka7e24c12009-10-30 11:49:00 +0000553void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
554 Register scratch,
555 Label* miss) {
556 Label same_contexts;
557
558 ASSERT(!holder_reg.is(scratch));
559
560 // Load current lexical context from the stack frame.
561 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
562
563 // When generating debug code, make sure the lexical context is set.
564 if (FLAG_debug_code) {
565 cmp(Operand(scratch), Immediate(0));
566 Check(not_equal, "we should not have an empty lexical context");
567 }
568 // Load the global context of the current context.
569 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
570 mov(scratch, FieldOperand(scratch, offset));
571 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
572
573 // Check the context is a global context.
574 if (FLAG_debug_code) {
575 push(scratch);
576 // Read the first word and compare to global_context_map.
577 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
578 cmp(scratch, Factory::global_context_map());
579 Check(equal, "JSGlobalObject::global_context should be a global context.");
580 pop(scratch);
581 }
582
583 // Check if both contexts are the same.
584 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
585 j(equal, &same_contexts, taken);
586
587 // Compare security tokens, save holder_reg on the stack so we can use it
588 // as a temporary register.
589 //
590 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
591 push(holder_reg);
592 // Check that the security token in the calling global object is
593 // compatible with the security token in the receiving global
594 // object.
595 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
596
597 // Check the context is a global context.
598 if (FLAG_debug_code) {
599 cmp(holder_reg, Factory::null_value());
600 Check(not_equal, "JSGlobalProxy::context() should not be null.");
601
602 push(holder_reg);
603 // Read the first word and compare to global_context_map(),
604 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
605 cmp(holder_reg, Factory::global_context_map());
606 Check(equal, "JSGlobalObject::global_context should be a global context.");
607 pop(holder_reg);
608 }
609
610 int token_offset = Context::kHeaderSize +
611 Context::SECURITY_TOKEN_INDEX * kPointerSize;
612 mov(scratch, FieldOperand(scratch, token_offset));
613 cmp(scratch, FieldOperand(holder_reg, token_offset));
614 pop(holder_reg);
615 j(not_equal, miss, not_taken);
616
617 bind(&same_contexts);
618}
619
620
621void MacroAssembler::LoadAllocationTopHelper(Register result,
622 Register result_end,
623 Register scratch,
624 AllocationFlags flags) {
625 ExternalReference new_space_allocation_top =
626 ExternalReference::new_space_allocation_top_address();
627
628 // Just return if allocation top is already known.
629 if ((flags & RESULT_CONTAINS_TOP) != 0) {
630 // No use of scratch if allocation top is provided.
631 ASSERT(scratch.is(no_reg));
632#ifdef DEBUG
633 // Assert that result actually contains top on entry.
634 cmp(result, Operand::StaticVariable(new_space_allocation_top));
635 Check(equal, "Unexpected allocation top");
636#endif
637 return;
638 }
639
640 // Move address of new object to result. Use scratch register if available.
641 if (scratch.is(no_reg)) {
642 mov(result, Operand::StaticVariable(new_space_allocation_top));
643 } else {
644 ASSERT(!scratch.is(result_end));
645 mov(Operand(scratch), Immediate(new_space_allocation_top));
646 mov(result, Operand(scratch, 0));
647 }
648}
649
650
651void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
652 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000653 if (FLAG_debug_code) {
654 test(result_end, Immediate(kObjectAlignmentMask));
655 Check(zero, "Unaligned allocation in new space");
656 }
657
Steve Blocka7e24c12009-10-30 11:49:00 +0000658 ExternalReference new_space_allocation_top =
659 ExternalReference::new_space_allocation_top_address();
660
661 // Update new top. Use scratch if available.
662 if (scratch.is(no_reg)) {
663 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
664 } else {
665 mov(Operand(scratch, 0), result_end);
666 }
667}
668
669
670void MacroAssembler::AllocateInNewSpace(int object_size,
671 Register result,
672 Register result_end,
673 Register scratch,
674 Label* gc_required,
675 AllocationFlags flags) {
676 ASSERT(!result.is(result_end));
677
678 // Load address of new object into result.
679 LoadAllocationTopHelper(result, result_end, scratch, flags);
680
Ben Murdochbb769b22010-08-11 14:56:33 +0100681 Register top_reg = result_end.is_valid() ? result_end : result;
682
Steve Blocka7e24c12009-10-30 11:49:00 +0000683 // Calculate new top and bail out if new space is exhausted.
684 ExternalReference new_space_allocation_limit =
685 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100686
687 if (top_reg.is(result)) {
688 add(Operand(top_reg), Immediate(object_size));
689 } else {
690 lea(top_reg, Operand(result, object_size));
691 }
692 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 j(above, gc_required, not_taken);
694
Leon Clarkee46be812010-01-19 14:06:41 +0000695 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100696 UpdateAllocationTopHelper(top_reg, scratch);
697
698 // Tag result if requested.
699 if (top_reg.is(result)) {
700 if ((flags & TAG_OBJECT) != 0) {
701 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
702 } else {
703 sub(Operand(result), Immediate(object_size));
704 }
705 } else if ((flags & TAG_OBJECT) != 0) {
706 add(Operand(result), Immediate(kHeapObjectTag));
707 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000708}
709
710
711void MacroAssembler::AllocateInNewSpace(int header_size,
712 ScaleFactor element_size,
713 Register element_count,
714 Register result,
715 Register result_end,
716 Register scratch,
717 Label* gc_required,
718 AllocationFlags flags) {
719 ASSERT(!result.is(result_end));
720
721 // Load address of new object into result.
722 LoadAllocationTopHelper(result, result_end, scratch, flags);
723
724 // Calculate new top and bail out if new space is exhausted.
725 ExternalReference new_space_allocation_limit =
726 ExternalReference::new_space_allocation_limit_address();
727 lea(result_end, Operand(result, element_count, element_size, header_size));
728 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
729 j(above, gc_required);
730
Steve Blocka7e24c12009-10-30 11:49:00 +0000731 // Tag result if requested.
732 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000733 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000734 }
Leon Clarkee46be812010-01-19 14:06:41 +0000735
736 // Update allocation top.
737 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000738}
739
740
741void MacroAssembler::AllocateInNewSpace(Register object_size,
742 Register result,
743 Register result_end,
744 Register scratch,
745 Label* gc_required,
746 AllocationFlags flags) {
747 ASSERT(!result.is(result_end));
748
749 // Load address of new object into result.
750 LoadAllocationTopHelper(result, result_end, scratch, flags);
751
752 // Calculate new top and bail out if new space is exhausted.
753 ExternalReference new_space_allocation_limit =
754 ExternalReference::new_space_allocation_limit_address();
755 if (!object_size.is(result_end)) {
756 mov(result_end, object_size);
757 }
758 add(result_end, Operand(result));
759 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
760 j(above, gc_required, not_taken);
761
Steve Blocka7e24c12009-10-30 11:49:00 +0000762 // Tag result if requested.
763 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000764 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000765 }
Leon Clarkee46be812010-01-19 14:06:41 +0000766
767 // Update allocation top.
768 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000769}
770
771
772void MacroAssembler::UndoAllocationInNewSpace(Register object) {
773 ExternalReference new_space_allocation_top =
774 ExternalReference::new_space_allocation_top_address();
775
776 // Make sure the object has no tag before resetting top.
777 and_(Operand(object), Immediate(~kHeapObjectTagMask));
778#ifdef DEBUG
779 cmp(object, Operand::StaticVariable(new_space_allocation_top));
780 Check(below, "Undo allocation of non allocated memory");
781#endif
782 mov(Operand::StaticVariable(new_space_allocation_top), object);
783}
784
785
Steve Block3ce2e202009-11-05 08:53:23 +0000786void MacroAssembler::AllocateHeapNumber(Register result,
787 Register scratch1,
788 Register scratch2,
789 Label* gc_required) {
790 // Allocate heap number in new space.
791 AllocateInNewSpace(HeapNumber::kSize,
792 result,
793 scratch1,
794 scratch2,
795 gc_required,
796 TAG_OBJECT);
797
798 // Set the map.
799 mov(FieldOperand(result, HeapObject::kMapOffset),
800 Immediate(Factory::heap_number_map()));
801}
802
803
Steve Blockd0582a62009-12-15 09:54:21 +0000804void MacroAssembler::AllocateTwoByteString(Register result,
805 Register length,
806 Register scratch1,
807 Register scratch2,
808 Register scratch3,
809 Label* gc_required) {
810 // Calculate the number of bytes needed for the characters in the string while
811 // observing object alignment.
812 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000813 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000814 // scratch1 = length * 2 + kObjectAlignmentMask.
815 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000816 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
817
818 // Allocate two byte string in new space.
819 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
820 times_1,
821 scratch1,
822 result,
823 scratch2,
824 scratch3,
825 gc_required,
826 TAG_OBJECT);
827
828 // Set the map, length and hash field.
829 mov(FieldOperand(result, HeapObject::kMapOffset),
830 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100831 mov(scratch1, length);
832 SmiTag(scratch1);
833 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000834 mov(FieldOperand(result, String::kHashFieldOffset),
835 Immediate(String::kEmptyHashField));
836}
837
838
839void MacroAssembler::AllocateAsciiString(Register result,
840 Register length,
841 Register scratch1,
842 Register scratch2,
843 Register scratch3,
844 Label* gc_required) {
845 // Calculate the number of bytes needed for the characters in the string while
846 // observing object alignment.
847 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
848 mov(scratch1, length);
849 ASSERT(kCharSize == 1);
850 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
851 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
852
853 // Allocate ascii string in new space.
854 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
855 times_1,
856 scratch1,
857 result,
858 scratch2,
859 scratch3,
860 gc_required,
861 TAG_OBJECT);
862
863 // Set the map, length and hash field.
864 mov(FieldOperand(result, HeapObject::kMapOffset),
865 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100866 mov(scratch1, length);
867 SmiTag(scratch1);
868 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000869 mov(FieldOperand(result, String::kHashFieldOffset),
870 Immediate(String::kEmptyHashField));
871}
872
873
874void MacroAssembler::AllocateConsString(Register result,
875 Register scratch1,
876 Register scratch2,
877 Label* gc_required) {
878 // Allocate heap number in new space.
879 AllocateInNewSpace(ConsString::kSize,
880 result,
881 scratch1,
882 scratch2,
883 gc_required,
884 TAG_OBJECT);
885
886 // Set the map. The other fields are left uninitialized.
887 mov(FieldOperand(result, HeapObject::kMapOffset),
888 Immediate(Factory::cons_string_map()));
889}
890
891
892void MacroAssembler::AllocateAsciiConsString(Register result,
893 Register scratch1,
894 Register scratch2,
895 Label* gc_required) {
896 // Allocate heap number in new space.
897 AllocateInNewSpace(ConsString::kSize,
898 result,
899 scratch1,
900 scratch2,
901 gc_required,
902 TAG_OBJECT);
903
904 // Set the map. The other fields are left uninitialized.
905 mov(FieldOperand(result, HeapObject::kMapOffset),
906 Immediate(Factory::cons_ascii_string_map()));
907}
908
909
Steve Blocka7e24c12009-10-30 11:49:00 +0000910void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
911 Register result,
912 Register op,
913 JumpTarget* then_target) {
914 JumpTarget ok;
915 test(result, Operand(result));
916 ok.Branch(not_zero, taken);
917 test(op, Operand(op));
918 then_target->Branch(sign, not_taken);
919 ok.Bind();
920}
921
922
923void MacroAssembler::NegativeZeroTest(Register result,
924 Register op,
925 Label* then_label) {
926 Label ok;
927 test(result, Operand(result));
928 j(not_zero, &ok, taken);
929 test(op, Operand(op));
930 j(sign, then_label, not_taken);
931 bind(&ok);
932}
933
934
935void MacroAssembler::NegativeZeroTest(Register result,
936 Register op1,
937 Register op2,
938 Register scratch,
939 Label* then_label) {
940 Label ok;
941 test(result, Operand(result));
942 j(not_zero, &ok, taken);
943 mov(scratch, Operand(op1));
944 or_(scratch, Operand(op2));
945 j(sign, then_label, not_taken);
946 bind(&ok);
947}
948
949
950void MacroAssembler::TryGetFunctionPrototype(Register function,
951 Register result,
952 Register scratch,
953 Label* miss) {
954 // Check that the receiver isn't a smi.
955 test(function, Immediate(kSmiTagMask));
956 j(zero, miss, not_taken);
957
958 // Check that the function really is a function.
959 CmpObjectType(function, JS_FUNCTION_TYPE, result);
960 j(not_equal, miss, not_taken);
961
962 // Make sure that the function has an instance prototype.
963 Label non_instance;
964 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
965 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
966 j(not_zero, &non_instance, not_taken);
967
968 // Get the prototype or initial map from the function.
969 mov(result,
970 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
971
972 // If the prototype or initial map is the hole, don't return it and
973 // simply miss the cache instead. This will allow us to allocate a
974 // prototype object on-demand in the runtime system.
975 cmp(Operand(result), Immediate(Factory::the_hole_value()));
976 j(equal, miss, not_taken);
977
978 // If the function does not have an initial map, we're done.
979 Label done;
980 CmpObjectType(result, MAP_TYPE, scratch);
981 j(not_equal, &done);
982
983 // Get the prototype from the initial map.
984 mov(result, FieldOperand(result, Map::kPrototypeOffset));
985 jmp(&done);
986
987 // Non-instance prototype: Fetch prototype from constructor field
988 // in initial map.
989 bind(&non_instance);
990 mov(result, FieldOperand(result, Map::kConstructorOffset));
991
992 // All done.
993 bind(&done);
994}
995
996
997void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000998 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +0000999 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1000}
1001
1002
Leon Clarkee46be812010-01-19 14:06:41 +00001003Object* MacroAssembler::TryCallStub(CodeStub* stub) {
1004 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1005 Object* result = stub->TryGetCode();
1006 if (!result->IsFailure()) {
1007 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1008 }
1009 return result;
1010}
1011
1012
Steve Blockd0582a62009-12-15 09:54:21 +00001013void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001014 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001015 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1016}
1017
1018
Leon Clarkee46be812010-01-19 14:06:41 +00001019Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1020 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1021 Object* result = stub->TryGetCode();
1022 if (!result->IsFailure()) {
1023 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1024 }
1025 return result;
1026}
1027
1028
Steve Blocka7e24c12009-10-30 11:49:00 +00001029void MacroAssembler::StubReturn(int argc) {
1030 ASSERT(argc >= 1 && generating_stub());
1031 ret((argc - 1) * kPointerSize);
1032}
1033
1034
1035void MacroAssembler::IllegalOperation(int num_arguments) {
1036 if (num_arguments > 0) {
1037 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1038 }
1039 mov(eax, Immediate(Factory::undefined_value()));
1040}
1041
1042
1043void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1044 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1045}
1046
1047
Leon Clarkee46be812010-01-19 14:06:41 +00001048Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1049 int num_arguments) {
1050 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1051}
1052
1053
Steve Blocka7e24c12009-10-30 11:49:00 +00001054void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1055 // If the expected number of arguments of the runtime function is
1056 // constant, we check that the actual number of arguments match the
1057 // expectation.
1058 if (f->nargs >= 0 && f->nargs != num_arguments) {
1059 IllegalOperation(num_arguments);
1060 return;
1061 }
1062
Leon Clarke4515c472010-02-03 11:58:03 +00001063 // TODO(1236192): Most runtime routines don't need the number of
1064 // arguments passed in because it is constant. At some point we
1065 // should remove this need and make the runtime routine entry code
1066 // smarter.
1067 Set(eax, Immediate(num_arguments));
1068 mov(ebx, Immediate(ExternalReference(f)));
1069 CEntryStub ces(1);
1070 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001071}
1072
1073
Leon Clarkee46be812010-01-19 14:06:41 +00001074Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1075 int num_arguments) {
1076 if (f->nargs >= 0 && f->nargs != num_arguments) {
1077 IllegalOperation(num_arguments);
1078 // Since we did not call the stub, there was no allocation failure.
1079 // Return some non-failure object.
1080 return Heap::undefined_value();
1081 }
1082
Leon Clarke4515c472010-02-03 11:58:03 +00001083 // TODO(1236192): Most runtime routines don't need the number of
1084 // arguments passed in because it is constant. At some point we
1085 // should remove this need and make the runtime routine entry code
1086 // smarter.
1087 Set(eax, Immediate(num_arguments));
1088 mov(ebx, Immediate(ExternalReference(f)));
1089 CEntryStub ces(1);
1090 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001091}
1092
1093
Ben Murdochbb769b22010-08-11 14:56:33 +01001094void MacroAssembler::CallExternalReference(ExternalReference ref,
1095 int num_arguments) {
1096 mov(eax, Immediate(num_arguments));
1097 mov(ebx, Immediate(ref));
1098
1099 CEntryStub stub(1);
1100 CallStub(&stub);
1101}
1102
1103
Steve Block6ded16b2010-05-10 14:33:55 +01001104void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1105 int num_arguments,
1106 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001107 // TODO(1236192): Most runtime routines don't need the number of
1108 // arguments passed in because it is constant. At some point we
1109 // should remove this need and make the runtime routine entry code
1110 // smarter.
1111 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001112 JumpToExternalReference(ext);
1113}
1114
1115
1116void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1117 int num_arguments,
1118 int result_size) {
1119 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001120}
1121
1122
Steve Blockd0582a62009-12-15 09:54:21 +00001123void MacroAssembler::PushHandleScope(Register scratch) {
1124 // Push the number of extensions, smi-tagged so the gc will ignore it.
1125 ExternalReference extensions_address =
1126 ExternalReference::handle_scope_extensions_address();
1127 mov(scratch, Operand::StaticVariable(extensions_address));
Ben Murdochbb769b22010-08-11 14:56:33 +01001128 SmiTag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001129 push(scratch);
1130 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1131 // Push next and limit pointers which will be wordsize aligned and
1132 // hence automatically smi tagged.
1133 ExternalReference next_address =
1134 ExternalReference::handle_scope_next_address();
1135 push(Operand::StaticVariable(next_address));
1136 ExternalReference limit_address =
1137 ExternalReference::handle_scope_limit_address();
1138 push(Operand::StaticVariable(limit_address));
1139}
1140
1141
Leon Clarkee46be812010-01-19 14:06:41 +00001142Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1143 Register scratch,
1144 bool gc_allowed) {
1145 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001146 ExternalReference extensions_address =
1147 ExternalReference::handle_scope_extensions_address();
1148 Label write_back;
1149 mov(scratch, Operand::StaticVariable(extensions_address));
1150 cmp(Operand(scratch), Immediate(0));
1151 j(equal, &write_back);
Ben Murdochbb769b22010-08-11 14:56:33 +01001152 push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001153 if (gc_allowed) {
1154 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1155 } else {
1156 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1157 if (result->IsFailure()) return result;
1158 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001159 pop(saved);
Steve Blockd0582a62009-12-15 09:54:21 +00001160
1161 bind(&write_back);
1162 ExternalReference limit_address =
1163 ExternalReference::handle_scope_limit_address();
1164 pop(Operand::StaticVariable(limit_address));
1165 ExternalReference next_address =
1166 ExternalReference::handle_scope_next_address();
1167 pop(Operand::StaticVariable(next_address));
1168 pop(scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +01001169 SmiUntag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001170 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001171
1172 return result;
1173}
1174
1175
1176void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1177 PopHandleScopeHelper(saved, scratch, true);
1178}
1179
1180
1181Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1182 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001183}
1184
1185
Steve Block6ded16b2010-05-10 14:33:55 +01001186void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001187 // Set the entry point and jump to the C entry runtime stub.
1188 mov(ebx, Immediate(ext));
1189 CEntryStub ces(1);
1190 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1191}
1192
1193
1194void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1195 const ParameterCount& actual,
1196 Handle<Code> code_constant,
1197 const Operand& code_operand,
1198 Label* done,
1199 InvokeFlag flag) {
1200 bool definitely_matches = false;
1201 Label invoke;
1202 if (expected.is_immediate()) {
1203 ASSERT(actual.is_immediate());
1204 if (expected.immediate() == actual.immediate()) {
1205 definitely_matches = true;
1206 } else {
1207 mov(eax, actual.immediate());
1208 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1209 if (expected.immediate() == sentinel) {
1210 // Don't worry about adapting arguments for builtins that
1211 // don't want that done. Skip adaption code by making it look
1212 // like we have a match between expected and actual number of
1213 // arguments.
1214 definitely_matches = true;
1215 } else {
1216 mov(ebx, expected.immediate());
1217 }
1218 }
1219 } else {
1220 if (actual.is_immediate()) {
1221 // Expected is in register, actual is immediate. This is the
1222 // case when we invoke function values without going through the
1223 // IC mechanism.
1224 cmp(expected.reg(), actual.immediate());
1225 j(equal, &invoke);
1226 ASSERT(expected.reg().is(ebx));
1227 mov(eax, actual.immediate());
1228 } else if (!expected.reg().is(actual.reg())) {
1229 // Both expected and actual are in (different) registers. This
1230 // is the case when we invoke functions using call and apply.
1231 cmp(expected.reg(), Operand(actual.reg()));
1232 j(equal, &invoke);
1233 ASSERT(actual.reg().is(eax));
1234 ASSERT(expected.reg().is(ebx));
1235 }
1236 }
1237
1238 if (!definitely_matches) {
1239 Handle<Code> adaptor =
1240 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1241 if (!code_constant.is_null()) {
1242 mov(edx, Immediate(code_constant));
1243 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1244 } else if (!code_operand.is_reg(edx)) {
1245 mov(edx, code_operand);
1246 }
1247
1248 if (flag == CALL_FUNCTION) {
1249 call(adaptor, RelocInfo::CODE_TARGET);
1250 jmp(done);
1251 } else {
1252 jmp(adaptor, RelocInfo::CODE_TARGET);
1253 }
1254 bind(&invoke);
1255 }
1256}
1257
1258
1259void MacroAssembler::InvokeCode(const Operand& code,
1260 const ParameterCount& expected,
1261 const ParameterCount& actual,
1262 InvokeFlag flag) {
1263 Label done;
1264 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1265 if (flag == CALL_FUNCTION) {
1266 call(code);
1267 } else {
1268 ASSERT(flag == JUMP_FUNCTION);
1269 jmp(code);
1270 }
1271 bind(&done);
1272}
1273
1274
1275void MacroAssembler::InvokeCode(Handle<Code> code,
1276 const ParameterCount& expected,
1277 const ParameterCount& actual,
1278 RelocInfo::Mode rmode,
1279 InvokeFlag flag) {
1280 Label done;
1281 Operand dummy(eax);
1282 InvokePrologue(expected, actual, code, dummy, &done, flag);
1283 if (flag == CALL_FUNCTION) {
1284 call(code, rmode);
1285 } else {
1286 ASSERT(flag == JUMP_FUNCTION);
1287 jmp(code, rmode);
1288 }
1289 bind(&done);
1290}
1291
1292
1293void MacroAssembler::InvokeFunction(Register fun,
1294 const ParameterCount& actual,
1295 InvokeFlag flag) {
1296 ASSERT(fun.is(edi));
1297 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1298 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1299 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001300 SmiUntag(ebx);
Iain Merrick75681382010-08-19 15:07:18 +01001301 mov(edx, FieldOperand(edi, JSFunction::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001302 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1303
1304 ParameterCount expected(ebx);
1305 InvokeCode(Operand(edx), expected, actual, flag);
1306}
1307
1308
Andrei Popescu402d9372010-02-26 13:31:12 +00001309void MacroAssembler::InvokeFunction(JSFunction* function,
1310 const ParameterCount& actual,
1311 InvokeFlag flag) {
1312 ASSERT(function->is_compiled());
1313 // Get the function and setup the context.
1314 mov(edi, Immediate(Handle<JSFunction>(function)));
1315 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001316
Andrei Popescu402d9372010-02-26 13:31:12 +00001317 // Invoke the cached code.
1318 Handle<Code> code(function->code());
1319 ParameterCount expected(function->shared()->formal_parameter_count());
1320 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1321}
1322
1323
1324void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001325 // Calls are not allowed in some stubs.
1326 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1327
1328 // Rely on the assertion to check that the number of provided
1329 // arguments match the expected number of arguments. Fake a
1330 // parameter count to avoid emitting code to do the check.
1331 ParameterCount expected(0);
Andrei Popescu402d9372010-02-26 13:31:12 +00001332 GetBuiltinEntry(edx, id);
1333 InvokeCode(Operand(edx), expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001334}
1335
1336
1337void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001338 ASSERT(!target.is(edi));
1339
1340 // Load the builtins object into target register.
1341 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1342 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1343
Andrei Popescu402d9372010-02-26 13:31:12 +00001344 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001345 mov(edi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1346
1347 // Load the code entry point from the builtins object.
1348 mov(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
1349 if (FLAG_debug_code) {
1350 // Make sure the code objects in the builtins object and in the
1351 // builtin function are the same.
1352 push(target);
Iain Merrick75681382010-08-19 15:07:18 +01001353 mov(target, FieldOperand(edi, JSFunction::kCodeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01001354 cmp(target, Operand(esp, 0));
1355 Assert(equal, "Builtin code object changed");
1356 pop(target);
1357 }
1358 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001359}
1360
1361
Steve Blockd0582a62009-12-15 09:54:21 +00001362void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1363 if (context_chain_length > 0) {
1364 // Move up the chain of contexts to the context containing the slot.
1365 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1366 // Load the function context (which is the incoming, outer context).
1367 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1368 for (int i = 1; i < context_chain_length; i++) {
1369 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1370 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1371 }
1372 // The context may be an intermediate context, not a function context.
1373 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1374 } else { // Slot is in the current function context.
1375 // The context may be an intermediate context, not a function context.
1376 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1377 }
1378}
1379
1380
1381
Steve Blocka7e24c12009-10-30 11:49:00 +00001382void MacroAssembler::Ret() {
1383 ret(0);
1384}
1385
1386
Leon Clarkee46be812010-01-19 14:06:41 +00001387void MacroAssembler::Drop(int stack_elements) {
1388 if (stack_elements > 0) {
1389 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1390 }
1391}
1392
1393
1394void MacroAssembler::Move(Register dst, Handle<Object> value) {
1395 mov(dst, value);
1396}
1397
1398
Steve Blocka7e24c12009-10-30 11:49:00 +00001399void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1400 if (FLAG_native_code_counters && counter->Enabled()) {
1401 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1402 }
1403}
1404
1405
1406void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1407 ASSERT(value > 0);
1408 if (FLAG_native_code_counters && counter->Enabled()) {
1409 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1410 if (value == 1) {
1411 inc(operand);
1412 } else {
1413 add(operand, Immediate(value));
1414 }
1415 }
1416}
1417
1418
1419void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1420 ASSERT(value > 0);
1421 if (FLAG_native_code_counters && counter->Enabled()) {
1422 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1423 if (value == 1) {
1424 dec(operand);
1425 } else {
1426 sub(operand, Immediate(value));
1427 }
1428 }
1429}
1430
1431
Leon Clarked91b9f72010-01-27 17:25:45 +00001432void MacroAssembler::IncrementCounter(Condition cc,
1433 StatsCounter* counter,
1434 int value) {
1435 ASSERT(value > 0);
1436 if (FLAG_native_code_counters && counter->Enabled()) {
1437 Label skip;
1438 j(NegateCondition(cc), &skip);
1439 pushfd();
1440 IncrementCounter(counter, value);
1441 popfd();
1442 bind(&skip);
1443 }
1444}
1445
1446
1447void MacroAssembler::DecrementCounter(Condition cc,
1448 StatsCounter* counter,
1449 int value) {
1450 ASSERT(value > 0);
1451 if (FLAG_native_code_counters && counter->Enabled()) {
1452 Label skip;
1453 j(NegateCondition(cc), &skip);
1454 pushfd();
1455 DecrementCounter(counter, value);
1456 popfd();
1457 bind(&skip);
1458 }
1459}
1460
1461
Steve Blocka7e24c12009-10-30 11:49:00 +00001462void MacroAssembler::Assert(Condition cc, const char* msg) {
1463 if (FLAG_debug_code) Check(cc, msg);
1464}
1465
1466
Iain Merrick75681382010-08-19 15:07:18 +01001467void MacroAssembler::AssertFastElements(Register elements) {
1468 if (FLAG_debug_code) {
1469 Label ok;
1470 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1471 Immediate(Factory::fixed_array_map()));
1472 j(equal, &ok);
1473 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1474 Immediate(Factory::fixed_cow_array_map()));
1475 j(equal, &ok);
1476 Abort("JSObject with fast elements map has slow elements");
1477 bind(&ok);
1478 }
1479}
1480
1481
Steve Blocka7e24c12009-10-30 11:49:00 +00001482void MacroAssembler::Check(Condition cc, const char* msg) {
1483 Label L;
1484 j(cc, &L, taken);
1485 Abort(msg);
1486 // will not return here
1487 bind(&L);
1488}
1489
1490
Steve Block6ded16b2010-05-10 14:33:55 +01001491void MacroAssembler::CheckStackAlignment() {
1492 int frame_alignment = OS::ActivationFrameAlignment();
1493 int frame_alignment_mask = frame_alignment - 1;
1494 if (frame_alignment > kPointerSize) {
1495 ASSERT(IsPowerOf2(frame_alignment));
1496 Label alignment_as_expected;
1497 test(esp, Immediate(frame_alignment_mask));
1498 j(zero, &alignment_as_expected);
1499 // Abort if stack is not aligned.
1500 int3();
1501 bind(&alignment_as_expected);
1502 }
1503}
1504
1505
Steve Blocka7e24c12009-10-30 11:49:00 +00001506void MacroAssembler::Abort(const char* msg) {
1507 // We want to pass the msg string like a smi to avoid GC
1508 // problems, however msg is not guaranteed to be aligned
1509 // properly. Instead, we pass an aligned pointer that is
1510 // a proper v8 smi, but also pass the alignment difference
1511 // from the real pointer as a smi.
1512 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1513 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1514 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1515#ifdef DEBUG
1516 if (msg != NULL) {
1517 RecordComment("Abort message: ");
1518 RecordComment(msg);
1519 }
1520#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001521 // Disable stub call restrictions to always allow calls to abort.
1522 set_allow_stub_calls(true);
1523
Steve Blocka7e24c12009-10-30 11:49:00 +00001524 push(eax);
1525 push(Immediate(p0));
1526 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1527 CallRuntime(Runtime::kAbort, 2);
1528 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001529 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001530}
1531
1532
Iain Merrick75681382010-08-19 15:07:18 +01001533void MacroAssembler::JumpIfNotNumber(Register reg,
1534 TypeInfo info,
1535 Label* on_not_number) {
1536 if (FLAG_debug_code) AbortIfSmi(reg);
1537 if (!info.IsNumber()) {
1538 cmp(FieldOperand(reg, HeapObject::kMapOffset),
1539 Factory::heap_number_map());
1540 j(not_equal, on_not_number);
1541 }
1542}
1543
1544
1545void MacroAssembler::ConvertToInt32(Register dst,
1546 Register source,
1547 Register scratch,
1548 TypeInfo info,
1549 Label* on_not_int32) {
1550 if (FLAG_debug_code) {
1551 AbortIfSmi(source);
1552 AbortIfNotNumber(source);
1553 }
1554 if (info.IsInteger32()) {
1555 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1556 } else {
1557 Label done;
1558 bool push_pop = (scratch.is(no_reg) && dst.is(source));
1559 ASSERT(!scratch.is(source));
1560 if (push_pop) {
1561 push(dst);
1562 scratch = dst;
1563 }
1564 if (scratch.is(no_reg)) scratch = dst;
1565 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1566 cmp(scratch, 0x80000000u);
1567 if (push_pop) {
1568 j(not_equal, &done);
1569 pop(dst);
1570 jmp(on_not_int32);
1571 } else {
1572 j(equal, on_not_int32);
1573 }
1574
1575 bind(&done);
1576 if (push_pop) {
1577 add(Operand(esp), Immediate(kPointerSize)); // Pop.
1578 }
1579 if (!scratch.is(dst)) {
1580 mov(dst, scratch);
1581 }
1582 }
1583}
1584
1585
Andrei Popescu402d9372010-02-26 13:31:12 +00001586void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1587 Register instance_type,
1588 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001589 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001590 if (!scratch.is(instance_type)) {
1591 mov(scratch, instance_type);
1592 }
1593 and_(scratch,
1594 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1595 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1596 j(not_equal, failure);
1597}
1598
1599
Leon Clarked91b9f72010-01-27 17:25:45 +00001600void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1601 Register object2,
1602 Register scratch1,
1603 Register scratch2,
1604 Label* failure) {
1605 // Check that both objects are not smis.
1606 ASSERT_EQ(0, kSmiTag);
1607 mov(scratch1, Operand(object1));
1608 and_(scratch1, Operand(object2));
1609 test(scratch1, Immediate(kSmiTagMask));
1610 j(zero, failure);
1611
1612 // Load instance type for both strings.
1613 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1614 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1615 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1616 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1617
1618 // Check that both are flat ascii strings.
1619 const int kFlatAsciiStringMask =
1620 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1621 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1622 // Interleave bits from both instance types and compare them in one check.
1623 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1624 and_(scratch1, kFlatAsciiStringMask);
1625 and_(scratch2, kFlatAsciiStringMask);
1626 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1627 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1628 j(not_equal, failure);
1629}
1630
1631
Steve Block6ded16b2010-05-10 14:33:55 +01001632void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1633 int frameAlignment = OS::ActivationFrameAlignment();
1634 if (frameAlignment != 0) {
1635 // Make stack end at alignment and make room for num_arguments words
1636 // and the original value of esp.
1637 mov(scratch, esp);
1638 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1639 ASSERT(IsPowerOf2(frameAlignment));
1640 and_(esp, -frameAlignment);
1641 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1642 } else {
1643 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1644 }
1645}
1646
1647
1648void MacroAssembler::CallCFunction(ExternalReference function,
1649 int num_arguments) {
1650 // Trashing eax is ok as it will be the return value.
1651 mov(Operand(eax), Immediate(function));
1652 CallCFunction(eax, num_arguments);
1653}
1654
1655
1656void MacroAssembler::CallCFunction(Register function,
1657 int num_arguments) {
1658 // Check stack alignment.
1659 if (FLAG_debug_code) {
1660 CheckStackAlignment();
1661 }
1662
1663 call(Operand(function));
1664 if (OS::ActivationFrameAlignment() != 0) {
1665 mov(esp, Operand(esp, num_arguments * kPointerSize));
1666 } else {
1667 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1668 }
1669}
1670
1671
Steve Blocka7e24c12009-10-30 11:49:00 +00001672CodePatcher::CodePatcher(byte* address, int size)
1673 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1674 // Create a new macro assembler pointing to the address of the code to patch.
1675 // The size is adjusted with kGap on order for the assembler to generate size
1676 // bytes of instructions without failing with buffer size constraints.
1677 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1678}
1679
1680
1681CodePatcher::~CodePatcher() {
1682 // Indicate that code has changed.
1683 CPU::FlushICache(address_, size_);
1684
1685 // Check that the code was patched as expected.
1686 ASSERT(masm_.pc_ == address_ + size_);
1687 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1688}
1689
1690
1691} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001692
1693#endif // V8_TARGET_ARCH_IA32