blob: b3f7c21273771ef8525fcd5675f849c12a74bef4 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Steve Blocka7e24c12009-10-30 11:49:00 +0000101void MacroAssembler::RecordWrite(Register object, int offset,
102 Register value, Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000103 // The compiled code assumes that record write doesn't change the
104 // context register, so we check that none of the clobbered
105 // registers are esi.
106 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
107
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100108 // First, check if a write barrier is even needed. The tests below
109 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000110 Label done;
111
112 // Skip barrier if writing a smi.
113 ASSERT_EQ(0, kSmiTag);
114 test(value, Immediate(kSmiTagMask));
115 j(zero, &done);
116
Steve Block6ded16b2010-05-10 14:33:55 +0100117 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000118
Steve Block6ded16b2010-05-10 14:33:55 +0100119 // The offset is relative to a tagged or untagged HeapObject pointer,
120 // so either offset or offset + kHeapObjectTag must be a
121 // multiple of kPointerSize.
122 ASSERT(IsAligned(offset, kPointerSize) ||
123 IsAligned(offset + kHeapObjectTag, kPointerSize));
124
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100125 Register dst = scratch;
126 if (offset != 0) {
127 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100129 // Array access: calculate the destination address in the same manner as
130 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
131 // into an array of words.
132 ASSERT_EQ(1, kSmiTagSize);
133 ASSERT_EQ(0, kSmiTag);
134 lea(dst, Operand(object, dst, times_half_pointer_size,
135 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000136 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100137 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000138
139 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000140
141 // Clobber all input registers when running with the debug-code flag
142 // turned on to provoke errors.
143 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100144 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
145 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
146 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000147 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000148}
149
150
Steve Block8defd9f2010-07-08 12:39:36 +0100151void MacroAssembler::RecordWrite(Register object,
152 Register address,
153 Register value) {
154 // The compiled code assumes that record write doesn't change the
155 // context register, so we check that none of the clobbered
156 // registers are esi.
157 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
158
159 // First, check if a write barrier is even needed. The tests below
160 // catch stores of Smis and stores into young gen.
161 Label done;
162
163 // Skip barrier if writing a smi.
164 ASSERT_EQ(0, kSmiTag);
165 test(value, Immediate(kSmiTagMask));
166 j(zero, &done);
167
168 InNewSpace(object, value, equal, &done);
169
170 RecordWriteHelper(object, address, value);
171
172 bind(&done);
173
174 // Clobber all input registers when running with the debug-code flag
175 // turned on to provoke errors.
176 if (FLAG_debug_code) {
177 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
178 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
179 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
180 }
181}
182
183
Steve Blockd0582a62009-12-15 09:54:21 +0000184void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
185 cmp(esp,
186 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
187 j(below, on_stack_overflow);
188}
189
190
Steve Blocka7e24c12009-10-30 11:49:00 +0000191#ifdef ENABLE_DEBUGGER_SUPPORT
192void MacroAssembler::SaveRegistersToMemory(RegList regs) {
193 ASSERT((regs & ~kJSCallerSaved) == 0);
194 // Copy the content of registers to memory location.
195 for (int i = 0; i < kNumJSCallerSaved; i++) {
196 int r = JSCallerSavedCode(i);
197 if ((regs & (1 << r)) != 0) {
198 Register reg = { r };
199 ExternalReference reg_addr =
200 ExternalReference(Debug_Address::Register(i));
201 mov(Operand::StaticVariable(reg_addr), reg);
202 }
203 }
204}
205
206
207void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
208 ASSERT((regs & ~kJSCallerSaved) == 0);
209 // Copy the content of memory location to registers.
210 for (int i = kNumJSCallerSaved; --i >= 0;) {
211 int r = JSCallerSavedCode(i);
212 if ((regs & (1 << r)) != 0) {
213 Register reg = { r };
214 ExternalReference reg_addr =
215 ExternalReference(Debug_Address::Register(i));
216 mov(reg, Operand::StaticVariable(reg_addr));
217 }
218 }
219}
220
221
222void MacroAssembler::PushRegistersFromMemory(RegList regs) {
223 ASSERT((regs & ~kJSCallerSaved) == 0);
224 // Push the content of the memory location to the stack.
225 for (int i = 0; i < kNumJSCallerSaved; i++) {
226 int r = JSCallerSavedCode(i);
227 if ((regs & (1 << r)) != 0) {
228 ExternalReference reg_addr =
229 ExternalReference(Debug_Address::Register(i));
230 push(Operand::StaticVariable(reg_addr));
231 }
232 }
233}
234
235
236void MacroAssembler::PopRegistersToMemory(RegList regs) {
237 ASSERT((regs & ~kJSCallerSaved) == 0);
238 // Pop the content from the stack to the memory location.
239 for (int i = kNumJSCallerSaved; --i >= 0;) {
240 int r = JSCallerSavedCode(i);
241 if ((regs & (1 << r)) != 0) {
242 ExternalReference reg_addr =
243 ExternalReference(Debug_Address::Register(i));
244 pop(Operand::StaticVariable(reg_addr));
245 }
246 }
247}
248
249
250void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
251 Register scratch,
252 RegList regs) {
253 ASSERT((regs & ~kJSCallerSaved) == 0);
254 // Copy the content of the stack to the memory location and adjust base.
255 for (int i = kNumJSCallerSaved; --i >= 0;) {
256 int r = JSCallerSavedCode(i);
257 if ((regs & (1 << r)) != 0) {
258 mov(scratch, Operand(base, 0));
259 ExternalReference reg_addr =
260 ExternalReference(Debug_Address::Register(i));
261 mov(Operand::StaticVariable(reg_addr), scratch);
262 lea(base, Operand(base, kPointerSize));
263 }
264 }
265}
Andrei Popescu402d9372010-02-26 13:31:12 +0000266
267void MacroAssembler::DebugBreak() {
268 Set(eax, Immediate(0));
269 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
270 CEntryStub ces(1);
271 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
272}
Steve Blocka7e24c12009-10-30 11:49:00 +0000273#endif
274
275void MacroAssembler::Set(Register dst, const Immediate& x) {
276 if (x.is_zero()) {
277 xor_(dst, Operand(dst)); // shorter than mov
278 } else {
279 mov(dst, x);
280 }
281}
282
283
284void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
285 mov(dst, x);
286}
287
288
289void MacroAssembler::CmpObjectType(Register heap_object,
290 InstanceType type,
291 Register map) {
292 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
293 CmpInstanceType(map, type);
294}
295
296
297void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
298 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
299 static_cast<int8_t>(type));
300}
301
302
Andrei Popescu31002712010-02-23 13:46:05 +0000303void MacroAssembler::CheckMap(Register obj,
304 Handle<Map> map,
305 Label* fail,
306 bool is_heap_object) {
307 if (!is_heap_object) {
308 test(obj, Immediate(kSmiTagMask));
309 j(zero, fail);
310 }
311 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
312 j(not_equal, fail);
313}
314
315
Leon Clarkee46be812010-01-19 14:06:41 +0000316Condition MacroAssembler::IsObjectStringType(Register heap_object,
317 Register map,
318 Register instance_type) {
319 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
320 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
321 ASSERT(kNotStringTag != 0);
322 test(instance_type, Immediate(kIsNotStringMask));
323 return zero;
324}
325
326
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100327void MacroAssembler::IsObjectJSObjectType(Register heap_object,
328 Register map,
329 Register scratch,
330 Label* fail) {
331 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
332 IsInstanceJSObjectType(map, scratch, fail);
333}
334
335
336void MacroAssembler::IsInstanceJSObjectType(Register map,
337 Register scratch,
338 Label* fail) {
339 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
340 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
341 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
342 j(above, fail);
343}
344
345
Steve Blocka7e24c12009-10-30 11:49:00 +0000346void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000347 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000348 fucomip();
349 ffree(0);
350 fincstp();
351 } else {
352 fucompp();
353 push(eax);
354 fnstsw_ax();
355 sahf();
356 pop(eax);
357 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000358}
359
360
Steve Block6ded16b2010-05-10 14:33:55 +0100361void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000362 Label ok;
363 test(object, Immediate(kSmiTagMask));
364 j(zero, &ok);
365 cmp(FieldOperand(object, HeapObject::kMapOffset),
366 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100367 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000368 bind(&ok);
369}
370
371
Steve Block6ded16b2010-05-10 14:33:55 +0100372void MacroAssembler::AbortIfNotSmi(Register object) {
373 test(object, Immediate(kSmiTagMask));
374 Assert(equal, "Operand not a smi");
375}
376
377
Steve Blocka7e24c12009-10-30 11:49:00 +0000378void MacroAssembler::EnterFrame(StackFrame::Type type) {
379 push(ebp);
380 mov(ebp, Operand(esp));
381 push(esi);
382 push(Immediate(Smi::FromInt(type)));
383 push(Immediate(CodeObject()));
384 if (FLAG_debug_code) {
385 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
386 Check(not_equal, "code object not properly patched");
387 }
388}
389
390
391void MacroAssembler::LeaveFrame(StackFrame::Type type) {
392 if (FLAG_debug_code) {
393 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
394 Immediate(Smi::FromInt(type)));
395 Check(equal, "stack frame types must match");
396 }
397 leave();
398}
399
Steve Blockd0582a62009-12-15 09:54:21 +0000400void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000401 // Setup the frame structure on the stack.
402 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
403 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
404 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
405 push(ebp);
406 mov(ebp, Operand(esp));
407
408 // Reserve room for entry stack pointer and push the debug marker.
409 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000410 push(Immediate(0)); // Saved entry sp, patched before call.
411 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000412
413 // Save the frame pointer and the context in top.
414 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
415 ExternalReference context_address(Top::k_context_address);
416 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
417 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000418}
Steve Blocka7e24c12009-10-30 11:49:00 +0000419
Steve Blockd0582a62009-12-15 09:54:21 +0000420void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000421#ifdef ENABLE_DEBUGGER_SUPPORT
422 // Save the state of all registers to the stack from the memory
423 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000424 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000425 // TODO(1243899): This should be symmetric to
426 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
427 // correct here, but computed for the other call. Very error
428 // prone! FIX THIS. Actually there are deeper problems with
429 // register saving than this asymmetry (see the bug report
430 // associated with this issue).
431 PushRegistersFromMemory(kJSCallerSaved);
432 }
433#endif
434
Steve Blockd0582a62009-12-15 09:54:21 +0000435 // Reserve space for arguments.
436 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000437
438 // Get the required frame alignment for the OS.
439 static const int kFrameAlignment = OS::ActivationFrameAlignment();
440 if (kFrameAlignment > 0) {
441 ASSERT(IsPowerOf2(kFrameAlignment));
442 and_(esp, -kFrameAlignment);
443 }
444
445 // Patch the saved entry sp.
446 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
447}
448
449
Steve Blockd0582a62009-12-15 09:54:21 +0000450void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
451 EnterExitFramePrologue(mode);
452
453 // Setup argc and argv in callee-saved registers.
454 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
455 mov(edi, Operand(eax));
456 lea(esi, Operand(ebp, eax, times_4, offset));
457
458 EnterExitFrameEpilogue(mode, 2);
459}
460
461
462void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
463 int stack_space,
464 int argc) {
465 EnterExitFramePrologue(mode);
466
467 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
468 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
469
470 EnterExitFrameEpilogue(mode, argc);
471}
472
473
474void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000475#ifdef ENABLE_DEBUGGER_SUPPORT
476 // Restore the memory copy of the registers by digging them out from
477 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000478 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000479 // It's okay to clobber register ebx below because we don't need
480 // the function pointer after this.
481 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000482 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 lea(ebx, Operand(ebp, kOffset));
484 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
485 }
486#endif
487
488 // Get the return address from the stack and restore the frame pointer.
489 mov(ecx, Operand(ebp, 1 * kPointerSize));
490 mov(ebp, Operand(ebp, 0 * kPointerSize));
491
492 // Pop the arguments and the receiver from the caller stack.
493 lea(esp, Operand(esi, 1 * kPointerSize));
494
495 // Restore current context from top and clear it in debug mode.
496 ExternalReference context_address(Top::k_context_address);
497 mov(esi, Operand::StaticVariable(context_address));
498#ifdef DEBUG
499 mov(Operand::StaticVariable(context_address), Immediate(0));
500#endif
501
502 // Push the return address to get ready to return.
503 push(ecx);
504
505 // Clear the top frame.
506 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
507 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
508}
509
510
511void MacroAssembler::PushTryHandler(CodeLocation try_location,
512 HandlerType type) {
513 // Adjust this code if not the case.
514 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
515 // The pc (return address) is already on TOS.
516 if (try_location == IN_JAVASCRIPT) {
517 if (type == TRY_CATCH_HANDLER) {
518 push(Immediate(StackHandler::TRY_CATCH));
519 } else {
520 push(Immediate(StackHandler::TRY_FINALLY));
521 }
522 push(ebp);
523 } else {
524 ASSERT(try_location == IN_JS_ENTRY);
525 // The frame pointer does not point to a JS frame so we save NULL
526 // for ebp. We expect the code throwing an exception to check ebp
527 // before dereferencing it to restore the context.
528 push(Immediate(StackHandler::ENTRY));
529 push(Immediate(0)); // NULL frame pointer.
530 }
531 // Save the current handler as the next handler.
532 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
533 // Link this handler as the new current one.
534 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
535}
536
537
Leon Clarkee46be812010-01-19 14:06:41 +0000538void MacroAssembler::PopTryHandler() {
539 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
540 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
541 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
542}
543
544
Steve Blocka7e24c12009-10-30 11:49:00 +0000545void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
546 Register scratch,
547 Label* miss) {
548 Label same_contexts;
549
550 ASSERT(!holder_reg.is(scratch));
551
552 // Load current lexical context from the stack frame.
553 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
554
555 // When generating debug code, make sure the lexical context is set.
556 if (FLAG_debug_code) {
557 cmp(Operand(scratch), Immediate(0));
558 Check(not_equal, "we should not have an empty lexical context");
559 }
560 // Load the global context of the current context.
561 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
562 mov(scratch, FieldOperand(scratch, offset));
563 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
564
565 // Check the context is a global context.
566 if (FLAG_debug_code) {
567 push(scratch);
568 // Read the first word and compare to global_context_map.
569 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
570 cmp(scratch, Factory::global_context_map());
571 Check(equal, "JSGlobalObject::global_context should be a global context.");
572 pop(scratch);
573 }
574
575 // Check if both contexts are the same.
576 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
577 j(equal, &same_contexts, taken);
578
579 // Compare security tokens, save holder_reg on the stack so we can use it
580 // as a temporary register.
581 //
582 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
583 push(holder_reg);
584 // Check that the security token in the calling global object is
585 // compatible with the security token in the receiving global
586 // object.
587 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
588
589 // Check the context is a global context.
590 if (FLAG_debug_code) {
591 cmp(holder_reg, Factory::null_value());
592 Check(not_equal, "JSGlobalProxy::context() should not be null.");
593
594 push(holder_reg);
595 // Read the first word and compare to global_context_map(),
596 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
597 cmp(holder_reg, Factory::global_context_map());
598 Check(equal, "JSGlobalObject::global_context should be a global context.");
599 pop(holder_reg);
600 }
601
602 int token_offset = Context::kHeaderSize +
603 Context::SECURITY_TOKEN_INDEX * kPointerSize;
604 mov(scratch, FieldOperand(scratch, token_offset));
605 cmp(scratch, FieldOperand(holder_reg, token_offset));
606 pop(holder_reg);
607 j(not_equal, miss, not_taken);
608
609 bind(&same_contexts);
610}
611
612
613void MacroAssembler::LoadAllocationTopHelper(Register result,
614 Register result_end,
615 Register scratch,
616 AllocationFlags flags) {
617 ExternalReference new_space_allocation_top =
618 ExternalReference::new_space_allocation_top_address();
619
620 // Just return if allocation top is already known.
621 if ((flags & RESULT_CONTAINS_TOP) != 0) {
622 // No use of scratch if allocation top is provided.
623 ASSERT(scratch.is(no_reg));
624#ifdef DEBUG
625 // Assert that result actually contains top on entry.
626 cmp(result, Operand::StaticVariable(new_space_allocation_top));
627 Check(equal, "Unexpected allocation top");
628#endif
629 return;
630 }
631
632 // Move address of new object to result. Use scratch register if available.
633 if (scratch.is(no_reg)) {
634 mov(result, Operand::StaticVariable(new_space_allocation_top));
635 } else {
636 ASSERT(!scratch.is(result_end));
637 mov(Operand(scratch), Immediate(new_space_allocation_top));
638 mov(result, Operand(scratch, 0));
639 }
640}
641
642
643void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
644 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000645 if (FLAG_debug_code) {
646 test(result_end, Immediate(kObjectAlignmentMask));
647 Check(zero, "Unaligned allocation in new space");
648 }
649
Steve Blocka7e24c12009-10-30 11:49:00 +0000650 ExternalReference new_space_allocation_top =
651 ExternalReference::new_space_allocation_top_address();
652
653 // Update new top. Use scratch if available.
654 if (scratch.is(no_reg)) {
655 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
656 } else {
657 mov(Operand(scratch, 0), result_end);
658 }
659}
660
661
662void MacroAssembler::AllocateInNewSpace(int object_size,
663 Register result,
664 Register result_end,
665 Register scratch,
666 Label* gc_required,
667 AllocationFlags flags) {
668 ASSERT(!result.is(result_end));
669
670 // Load address of new object into result.
671 LoadAllocationTopHelper(result, result_end, scratch, flags);
672
673 // Calculate new top and bail out if new space is exhausted.
674 ExternalReference new_space_allocation_limit =
675 ExternalReference::new_space_allocation_limit_address();
676 lea(result_end, Operand(result, object_size));
677 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
678 j(above, gc_required, not_taken);
679
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 // Tag result if requested.
681 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000682 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000683 }
Leon Clarkee46be812010-01-19 14:06:41 +0000684
685 // Update allocation top.
686 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000687}
688
689
690void MacroAssembler::AllocateInNewSpace(int header_size,
691 ScaleFactor element_size,
692 Register element_count,
693 Register result,
694 Register result_end,
695 Register scratch,
696 Label* gc_required,
697 AllocationFlags flags) {
698 ASSERT(!result.is(result_end));
699
700 // Load address of new object into result.
701 LoadAllocationTopHelper(result, result_end, scratch, flags);
702
703 // Calculate new top and bail out if new space is exhausted.
704 ExternalReference new_space_allocation_limit =
705 ExternalReference::new_space_allocation_limit_address();
706 lea(result_end, Operand(result, element_count, element_size, header_size));
707 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
708 j(above, gc_required);
709
Steve Blocka7e24c12009-10-30 11:49:00 +0000710 // Tag result if requested.
711 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000712 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 }
Leon Clarkee46be812010-01-19 14:06:41 +0000714
715 // Update allocation top.
716 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000717}
718
719
720void MacroAssembler::AllocateInNewSpace(Register object_size,
721 Register result,
722 Register result_end,
723 Register scratch,
724 Label* gc_required,
725 AllocationFlags flags) {
726 ASSERT(!result.is(result_end));
727
728 // Load address of new object into result.
729 LoadAllocationTopHelper(result, result_end, scratch, flags);
730
731 // Calculate new top and bail out if new space is exhausted.
732 ExternalReference new_space_allocation_limit =
733 ExternalReference::new_space_allocation_limit_address();
734 if (!object_size.is(result_end)) {
735 mov(result_end, object_size);
736 }
737 add(result_end, Operand(result));
738 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
739 j(above, gc_required, not_taken);
740
Steve Blocka7e24c12009-10-30 11:49:00 +0000741 // Tag result if requested.
742 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000743 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000744 }
Leon Clarkee46be812010-01-19 14:06:41 +0000745
746 // Update allocation top.
747 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000748}
749
750
751void MacroAssembler::UndoAllocationInNewSpace(Register object) {
752 ExternalReference new_space_allocation_top =
753 ExternalReference::new_space_allocation_top_address();
754
755 // Make sure the object has no tag before resetting top.
756 and_(Operand(object), Immediate(~kHeapObjectTagMask));
757#ifdef DEBUG
758 cmp(object, Operand::StaticVariable(new_space_allocation_top));
759 Check(below, "Undo allocation of non allocated memory");
760#endif
761 mov(Operand::StaticVariable(new_space_allocation_top), object);
762}
763
764
Steve Block3ce2e202009-11-05 08:53:23 +0000765void MacroAssembler::AllocateHeapNumber(Register result,
766 Register scratch1,
767 Register scratch2,
768 Label* gc_required) {
769 // Allocate heap number in new space.
770 AllocateInNewSpace(HeapNumber::kSize,
771 result,
772 scratch1,
773 scratch2,
774 gc_required,
775 TAG_OBJECT);
776
777 // Set the map.
778 mov(FieldOperand(result, HeapObject::kMapOffset),
779 Immediate(Factory::heap_number_map()));
780}
781
782
Steve Blockd0582a62009-12-15 09:54:21 +0000783void MacroAssembler::AllocateTwoByteString(Register result,
784 Register length,
785 Register scratch1,
786 Register scratch2,
787 Register scratch3,
788 Label* gc_required) {
789 // Calculate the number of bytes needed for the characters in the string while
790 // observing object alignment.
791 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000792 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000793 // scratch1 = length * 2 + kObjectAlignmentMask.
794 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000795 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
796
797 // Allocate two byte string in new space.
798 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
799 times_1,
800 scratch1,
801 result,
802 scratch2,
803 scratch3,
804 gc_required,
805 TAG_OBJECT);
806
807 // Set the map, length and hash field.
808 mov(FieldOperand(result, HeapObject::kMapOffset),
809 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100810 mov(scratch1, length);
811 SmiTag(scratch1);
812 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000813 mov(FieldOperand(result, String::kHashFieldOffset),
814 Immediate(String::kEmptyHashField));
815}
816
817
818void MacroAssembler::AllocateAsciiString(Register result,
819 Register length,
820 Register scratch1,
821 Register scratch2,
822 Register scratch3,
823 Label* gc_required) {
824 // Calculate the number of bytes needed for the characters in the string while
825 // observing object alignment.
826 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
827 mov(scratch1, length);
828 ASSERT(kCharSize == 1);
829 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
830 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
831
832 // Allocate ascii string in new space.
833 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
834 times_1,
835 scratch1,
836 result,
837 scratch2,
838 scratch3,
839 gc_required,
840 TAG_OBJECT);
841
842 // Set the map, length and hash field.
843 mov(FieldOperand(result, HeapObject::kMapOffset),
844 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100845 mov(scratch1, length);
846 SmiTag(scratch1);
847 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000848 mov(FieldOperand(result, String::kHashFieldOffset),
849 Immediate(String::kEmptyHashField));
850}
851
852
853void MacroAssembler::AllocateConsString(Register result,
854 Register scratch1,
855 Register scratch2,
856 Label* gc_required) {
857 // Allocate heap number in new space.
858 AllocateInNewSpace(ConsString::kSize,
859 result,
860 scratch1,
861 scratch2,
862 gc_required,
863 TAG_OBJECT);
864
865 // Set the map. The other fields are left uninitialized.
866 mov(FieldOperand(result, HeapObject::kMapOffset),
867 Immediate(Factory::cons_string_map()));
868}
869
870
871void MacroAssembler::AllocateAsciiConsString(Register result,
872 Register scratch1,
873 Register scratch2,
874 Label* gc_required) {
875 // Allocate heap number in new space.
876 AllocateInNewSpace(ConsString::kSize,
877 result,
878 scratch1,
879 scratch2,
880 gc_required,
881 TAG_OBJECT);
882
883 // Set the map. The other fields are left uninitialized.
884 mov(FieldOperand(result, HeapObject::kMapOffset),
885 Immediate(Factory::cons_ascii_string_map()));
886}
887
888
Steve Blocka7e24c12009-10-30 11:49:00 +0000889void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
890 Register result,
891 Register op,
892 JumpTarget* then_target) {
893 JumpTarget ok;
894 test(result, Operand(result));
895 ok.Branch(not_zero, taken);
896 test(op, Operand(op));
897 then_target->Branch(sign, not_taken);
898 ok.Bind();
899}
900
901
902void MacroAssembler::NegativeZeroTest(Register result,
903 Register op,
904 Label* then_label) {
905 Label ok;
906 test(result, Operand(result));
907 j(not_zero, &ok, taken);
908 test(op, Operand(op));
909 j(sign, then_label, not_taken);
910 bind(&ok);
911}
912
913
914void MacroAssembler::NegativeZeroTest(Register result,
915 Register op1,
916 Register op2,
917 Register scratch,
918 Label* then_label) {
919 Label ok;
920 test(result, Operand(result));
921 j(not_zero, &ok, taken);
922 mov(scratch, Operand(op1));
923 or_(scratch, Operand(op2));
924 j(sign, then_label, not_taken);
925 bind(&ok);
926}
927
928
929void MacroAssembler::TryGetFunctionPrototype(Register function,
930 Register result,
931 Register scratch,
932 Label* miss) {
933 // Check that the receiver isn't a smi.
934 test(function, Immediate(kSmiTagMask));
935 j(zero, miss, not_taken);
936
937 // Check that the function really is a function.
938 CmpObjectType(function, JS_FUNCTION_TYPE, result);
939 j(not_equal, miss, not_taken);
940
941 // Make sure that the function has an instance prototype.
942 Label non_instance;
943 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
944 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
945 j(not_zero, &non_instance, not_taken);
946
947 // Get the prototype or initial map from the function.
948 mov(result,
949 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
950
951 // If the prototype or initial map is the hole, don't return it and
952 // simply miss the cache instead. This will allow us to allocate a
953 // prototype object on-demand in the runtime system.
954 cmp(Operand(result), Immediate(Factory::the_hole_value()));
955 j(equal, miss, not_taken);
956
957 // If the function does not have an initial map, we're done.
958 Label done;
959 CmpObjectType(result, MAP_TYPE, scratch);
960 j(not_equal, &done);
961
962 // Get the prototype from the initial map.
963 mov(result, FieldOperand(result, Map::kPrototypeOffset));
964 jmp(&done);
965
966 // Non-instance prototype: Fetch prototype from constructor field
967 // in initial map.
968 bind(&non_instance);
969 mov(result, FieldOperand(result, Map::kConstructorOffset));
970
971 // All done.
972 bind(&done);
973}
974
975
976void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000977 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +0000978 call(stub->GetCode(), RelocInfo::CODE_TARGET);
979}
980
981
Leon Clarkee46be812010-01-19 14:06:41 +0000982Object* MacroAssembler::TryCallStub(CodeStub* stub) {
983 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
984 Object* result = stub->TryGetCode();
985 if (!result->IsFailure()) {
986 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
987 }
988 return result;
989}
990
991
Steve Blockd0582a62009-12-15 09:54:21 +0000992void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000993 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +0000994 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
995}
996
997
Leon Clarkee46be812010-01-19 14:06:41 +0000998Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
999 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1000 Object* result = stub->TryGetCode();
1001 if (!result->IsFailure()) {
1002 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1003 }
1004 return result;
1005}
1006
1007
Steve Blocka7e24c12009-10-30 11:49:00 +00001008void MacroAssembler::StubReturn(int argc) {
1009 ASSERT(argc >= 1 && generating_stub());
1010 ret((argc - 1) * kPointerSize);
1011}
1012
1013
1014void MacroAssembler::IllegalOperation(int num_arguments) {
1015 if (num_arguments > 0) {
1016 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1017 }
1018 mov(eax, Immediate(Factory::undefined_value()));
1019}
1020
1021
1022void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1023 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1024}
1025
1026
Leon Clarkee46be812010-01-19 14:06:41 +00001027Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1028 int num_arguments) {
1029 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1030}
1031
1032
Steve Blocka7e24c12009-10-30 11:49:00 +00001033void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1034 // If the expected number of arguments of the runtime function is
1035 // constant, we check that the actual number of arguments match the
1036 // expectation.
1037 if (f->nargs >= 0 && f->nargs != num_arguments) {
1038 IllegalOperation(num_arguments);
1039 return;
1040 }
1041
Leon Clarke4515c472010-02-03 11:58:03 +00001042 // TODO(1236192): Most runtime routines don't need the number of
1043 // arguments passed in because it is constant. At some point we
1044 // should remove this need and make the runtime routine entry code
1045 // smarter.
1046 Set(eax, Immediate(num_arguments));
1047 mov(ebx, Immediate(ExternalReference(f)));
1048 CEntryStub ces(1);
1049 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001050}
1051
1052
Andrei Popescu402d9372010-02-26 13:31:12 +00001053void MacroAssembler::CallExternalReference(ExternalReference ref,
1054 int num_arguments) {
1055 mov(eax, Immediate(num_arguments));
1056 mov(ebx, Immediate(ref));
1057
1058 CEntryStub stub(1);
1059 CallStub(&stub);
1060}
1061
1062
Leon Clarkee46be812010-01-19 14:06:41 +00001063Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1064 int num_arguments) {
1065 if (f->nargs >= 0 && f->nargs != num_arguments) {
1066 IllegalOperation(num_arguments);
1067 // Since we did not call the stub, there was no allocation failure.
1068 // Return some non-failure object.
1069 return Heap::undefined_value();
1070 }
1071
Leon Clarke4515c472010-02-03 11:58:03 +00001072 // TODO(1236192): Most runtime routines don't need the number of
1073 // arguments passed in because it is constant. At some point we
1074 // should remove this need and make the runtime routine entry code
1075 // smarter.
1076 Set(eax, Immediate(num_arguments));
1077 mov(ebx, Immediate(ExternalReference(f)));
1078 CEntryStub ces(1);
1079 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001080}
1081
1082
Steve Block6ded16b2010-05-10 14:33:55 +01001083void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1084 int num_arguments,
1085 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001086 // TODO(1236192): Most runtime routines don't need the number of
1087 // arguments passed in because it is constant. At some point we
1088 // should remove this need and make the runtime routine entry code
1089 // smarter.
1090 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001091 JumpToExternalReference(ext);
1092}
1093
1094
1095void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1096 int num_arguments,
1097 int result_size) {
1098 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001099}
1100
1101
Steve Blockd0582a62009-12-15 09:54:21 +00001102void MacroAssembler::PushHandleScope(Register scratch) {
1103 // Push the number of extensions, smi-tagged so the gc will ignore it.
1104 ExternalReference extensions_address =
1105 ExternalReference::handle_scope_extensions_address();
1106 mov(scratch, Operand::StaticVariable(extensions_address));
1107 ASSERT_EQ(0, kSmiTag);
1108 shl(scratch, kSmiTagSize);
1109 push(scratch);
1110 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1111 // Push next and limit pointers which will be wordsize aligned and
1112 // hence automatically smi tagged.
1113 ExternalReference next_address =
1114 ExternalReference::handle_scope_next_address();
1115 push(Operand::StaticVariable(next_address));
1116 ExternalReference limit_address =
1117 ExternalReference::handle_scope_limit_address();
1118 push(Operand::StaticVariable(limit_address));
1119}
1120
1121
Leon Clarkee46be812010-01-19 14:06:41 +00001122Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1123 Register scratch,
1124 bool gc_allowed) {
1125 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001126 ExternalReference extensions_address =
1127 ExternalReference::handle_scope_extensions_address();
1128 Label write_back;
1129 mov(scratch, Operand::StaticVariable(extensions_address));
1130 cmp(Operand(scratch), Immediate(0));
1131 j(equal, &write_back);
1132 // Calling a runtime function messes with registers so we save and
1133 // restore any one we're asked not to change
1134 if (saved.is_valid()) push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001135 if (gc_allowed) {
1136 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1137 } else {
1138 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1139 if (result->IsFailure()) return result;
1140 }
Steve Blockd0582a62009-12-15 09:54:21 +00001141 if (saved.is_valid()) pop(saved);
1142
1143 bind(&write_back);
1144 ExternalReference limit_address =
1145 ExternalReference::handle_scope_limit_address();
1146 pop(Operand::StaticVariable(limit_address));
1147 ExternalReference next_address =
1148 ExternalReference::handle_scope_next_address();
1149 pop(Operand::StaticVariable(next_address));
1150 pop(scratch);
1151 shr(scratch, kSmiTagSize);
1152 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001153
1154 return result;
1155}
1156
1157
1158void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1159 PopHandleScopeHelper(saved, scratch, true);
1160}
1161
1162
1163Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1164 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001165}
1166
1167
Steve Block6ded16b2010-05-10 14:33:55 +01001168void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001169 // Set the entry point and jump to the C entry runtime stub.
1170 mov(ebx, Immediate(ext));
1171 CEntryStub ces(1);
1172 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1173}
1174
1175
1176void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1177 const ParameterCount& actual,
1178 Handle<Code> code_constant,
1179 const Operand& code_operand,
1180 Label* done,
1181 InvokeFlag flag) {
1182 bool definitely_matches = false;
1183 Label invoke;
1184 if (expected.is_immediate()) {
1185 ASSERT(actual.is_immediate());
1186 if (expected.immediate() == actual.immediate()) {
1187 definitely_matches = true;
1188 } else {
1189 mov(eax, actual.immediate());
1190 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1191 if (expected.immediate() == sentinel) {
1192 // Don't worry about adapting arguments for builtins that
1193 // don't want that done. Skip adaption code by making it look
1194 // like we have a match between expected and actual number of
1195 // arguments.
1196 definitely_matches = true;
1197 } else {
1198 mov(ebx, expected.immediate());
1199 }
1200 }
1201 } else {
1202 if (actual.is_immediate()) {
1203 // Expected is in register, actual is immediate. This is the
1204 // case when we invoke function values without going through the
1205 // IC mechanism.
1206 cmp(expected.reg(), actual.immediate());
1207 j(equal, &invoke);
1208 ASSERT(expected.reg().is(ebx));
1209 mov(eax, actual.immediate());
1210 } else if (!expected.reg().is(actual.reg())) {
1211 // Both expected and actual are in (different) registers. This
1212 // is the case when we invoke functions using call and apply.
1213 cmp(expected.reg(), Operand(actual.reg()));
1214 j(equal, &invoke);
1215 ASSERT(actual.reg().is(eax));
1216 ASSERT(expected.reg().is(ebx));
1217 }
1218 }
1219
1220 if (!definitely_matches) {
1221 Handle<Code> adaptor =
1222 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1223 if (!code_constant.is_null()) {
1224 mov(edx, Immediate(code_constant));
1225 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1226 } else if (!code_operand.is_reg(edx)) {
1227 mov(edx, code_operand);
1228 }
1229
1230 if (flag == CALL_FUNCTION) {
1231 call(adaptor, RelocInfo::CODE_TARGET);
1232 jmp(done);
1233 } else {
1234 jmp(adaptor, RelocInfo::CODE_TARGET);
1235 }
1236 bind(&invoke);
1237 }
1238}
1239
1240
1241void MacroAssembler::InvokeCode(const Operand& code,
1242 const ParameterCount& expected,
1243 const ParameterCount& actual,
1244 InvokeFlag flag) {
1245 Label done;
1246 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1247 if (flag == CALL_FUNCTION) {
1248 call(code);
1249 } else {
1250 ASSERT(flag == JUMP_FUNCTION);
1251 jmp(code);
1252 }
1253 bind(&done);
1254}
1255
1256
1257void MacroAssembler::InvokeCode(Handle<Code> code,
1258 const ParameterCount& expected,
1259 const ParameterCount& actual,
1260 RelocInfo::Mode rmode,
1261 InvokeFlag flag) {
1262 Label done;
1263 Operand dummy(eax);
1264 InvokePrologue(expected, actual, code, dummy, &done, flag);
1265 if (flag == CALL_FUNCTION) {
1266 call(code, rmode);
1267 } else {
1268 ASSERT(flag == JUMP_FUNCTION);
1269 jmp(code, rmode);
1270 }
1271 bind(&done);
1272}
1273
1274
1275void MacroAssembler::InvokeFunction(Register fun,
1276 const ParameterCount& actual,
1277 InvokeFlag flag) {
1278 ASSERT(fun.is(edi));
1279 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1280 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1281 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001282 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001283 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1284 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1285
1286 ParameterCount expected(ebx);
1287 InvokeCode(Operand(edx), expected, actual, flag);
1288}
1289
1290
Andrei Popescu402d9372010-02-26 13:31:12 +00001291void MacroAssembler::InvokeFunction(JSFunction* function,
1292 const ParameterCount& actual,
1293 InvokeFlag flag) {
1294 ASSERT(function->is_compiled());
1295 // Get the function and setup the context.
1296 mov(edi, Immediate(Handle<JSFunction>(function)));
1297 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001298
Andrei Popescu402d9372010-02-26 13:31:12 +00001299 // Invoke the cached code.
1300 Handle<Code> code(function->code());
1301 ParameterCount expected(function->shared()->formal_parameter_count());
1302 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1303}
1304
1305
1306void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001307 // Calls are not allowed in some stubs.
1308 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1309
1310 // Rely on the assertion to check that the number of provided
1311 // arguments match the expected number of arguments. Fake a
1312 // parameter count to avoid emitting code to do the check.
1313 ParameterCount expected(0);
Andrei Popescu402d9372010-02-26 13:31:12 +00001314 GetBuiltinEntry(edx, id);
1315 InvokeCode(Operand(edx), expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001316}
1317
1318
1319void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001320 ASSERT(!target.is(edi));
1321
1322 // Load the builtins object into target register.
1323 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1324 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1325
Andrei Popescu402d9372010-02-26 13:31:12 +00001326 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001327 mov(edi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1328
1329 // Load the code entry point from the builtins object.
1330 mov(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
1331 if (FLAG_debug_code) {
1332 // Make sure the code objects in the builtins object and in the
1333 // builtin function are the same.
1334 push(target);
1335 mov(target, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1336 mov(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
1337 cmp(target, Operand(esp, 0));
1338 Assert(equal, "Builtin code object changed");
1339 pop(target);
1340 }
1341 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001342}
1343
1344
Steve Blockd0582a62009-12-15 09:54:21 +00001345void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1346 if (context_chain_length > 0) {
1347 // Move up the chain of contexts to the context containing the slot.
1348 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1349 // Load the function context (which is the incoming, outer context).
1350 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1351 for (int i = 1; i < context_chain_length; i++) {
1352 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1353 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1354 }
1355 // The context may be an intermediate context, not a function context.
1356 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1357 } else { // Slot is in the current function context.
1358 // The context may be an intermediate context, not a function context.
1359 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1360 }
1361}
1362
1363
1364
Steve Blocka7e24c12009-10-30 11:49:00 +00001365void MacroAssembler::Ret() {
1366 ret(0);
1367}
1368
1369
Leon Clarkee46be812010-01-19 14:06:41 +00001370void MacroAssembler::Drop(int stack_elements) {
1371 if (stack_elements > 0) {
1372 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1373 }
1374}
1375
1376
1377void MacroAssembler::Move(Register dst, Handle<Object> value) {
1378 mov(dst, value);
1379}
1380
1381
Steve Blocka7e24c12009-10-30 11:49:00 +00001382void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1383 if (FLAG_native_code_counters && counter->Enabled()) {
1384 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1385 }
1386}
1387
1388
1389void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1390 ASSERT(value > 0);
1391 if (FLAG_native_code_counters && counter->Enabled()) {
1392 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1393 if (value == 1) {
1394 inc(operand);
1395 } else {
1396 add(operand, Immediate(value));
1397 }
1398 }
1399}
1400
1401
1402void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1403 ASSERT(value > 0);
1404 if (FLAG_native_code_counters && counter->Enabled()) {
1405 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1406 if (value == 1) {
1407 dec(operand);
1408 } else {
1409 sub(operand, Immediate(value));
1410 }
1411 }
1412}
1413
1414
Leon Clarked91b9f72010-01-27 17:25:45 +00001415void MacroAssembler::IncrementCounter(Condition cc,
1416 StatsCounter* counter,
1417 int value) {
1418 ASSERT(value > 0);
1419 if (FLAG_native_code_counters && counter->Enabled()) {
1420 Label skip;
1421 j(NegateCondition(cc), &skip);
1422 pushfd();
1423 IncrementCounter(counter, value);
1424 popfd();
1425 bind(&skip);
1426 }
1427}
1428
1429
1430void MacroAssembler::DecrementCounter(Condition cc,
1431 StatsCounter* counter,
1432 int value) {
1433 ASSERT(value > 0);
1434 if (FLAG_native_code_counters && counter->Enabled()) {
1435 Label skip;
1436 j(NegateCondition(cc), &skip);
1437 pushfd();
1438 DecrementCounter(counter, value);
1439 popfd();
1440 bind(&skip);
1441 }
1442}
1443
1444
Steve Blocka7e24c12009-10-30 11:49:00 +00001445void MacroAssembler::Assert(Condition cc, const char* msg) {
1446 if (FLAG_debug_code) Check(cc, msg);
1447}
1448
1449
1450void MacroAssembler::Check(Condition cc, const char* msg) {
1451 Label L;
1452 j(cc, &L, taken);
1453 Abort(msg);
1454 // will not return here
1455 bind(&L);
1456}
1457
1458
Steve Block6ded16b2010-05-10 14:33:55 +01001459void MacroAssembler::CheckStackAlignment() {
1460 int frame_alignment = OS::ActivationFrameAlignment();
1461 int frame_alignment_mask = frame_alignment - 1;
1462 if (frame_alignment > kPointerSize) {
1463 ASSERT(IsPowerOf2(frame_alignment));
1464 Label alignment_as_expected;
1465 test(esp, Immediate(frame_alignment_mask));
1466 j(zero, &alignment_as_expected);
1467 // Abort if stack is not aligned.
1468 int3();
1469 bind(&alignment_as_expected);
1470 }
1471}
1472
1473
Steve Blocka7e24c12009-10-30 11:49:00 +00001474void MacroAssembler::Abort(const char* msg) {
1475 // We want to pass the msg string like a smi to avoid GC
1476 // problems, however msg is not guaranteed to be aligned
1477 // properly. Instead, we pass an aligned pointer that is
1478 // a proper v8 smi, but also pass the alignment difference
1479 // from the real pointer as a smi.
1480 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1481 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1482 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1483#ifdef DEBUG
1484 if (msg != NULL) {
1485 RecordComment("Abort message: ");
1486 RecordComment(msg);
1487 }
1488#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001489 // Disable stub call restrictions to always allow calls to abort.
1490 set_allow_stub_calls(true);
1491
Steve Blocka7e24c12009-10-30 11:49:00 +00001492 push(eax);
1493 push(Immediate(p0));
1494 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1495 CallRuntime(Runtime::kAbort, 2);
1496 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001497 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001498}
1499
1500
Andrei Popescu402d9372010-02-26 13:31:12 +00001501void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1502 Register instance_type,
1503 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001504 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001505 if (!scratch.is(instance_type)) {
1506 mov(scratch, instance_type);
1507 }
1508 and_(scratch,
1509 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1510 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1511 j(not_equal, failure);
1512}
1513
1514
Leon Clarked91b9f72010-01-27 17:25:45 +00001515void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1516 Register object2,
1517 Register scratch1,
1518 Register scratch2,
1519 Label* failure) {
1520 // Check that both objects are not smis.
1521 ASSERT_EQ(0, kSmiTag);
1522 mov(scratch1, Operand(object1));
1523 and_(scratch1, Operand(object2));
1524 test(scratch1, Immediate(kSmiTagMask));
1525 j(zero, failure);
1526
1527 // Load instance type for both strings.
1528 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1529 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1530 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1531 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1532
1533 // Check that both are flat ascii strings.
1534 const int kFlatAsciiStringMask =
1535 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1536 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1537 // Interleave bits from both instance types and compare them in one check.
1538 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1539 and_(scratch1, kFlatAsciiStringMask);
1540 and_(scratch2, kFlatAsciiStringMask);
1541 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1542 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1543 j(not_equal, failure);
1544}
1545
1546
Steve Block6ded16b2010-05-10 14:33:55 +01001547void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1548 int frameAlignment = OS::ActivationFrameAlignment();
1549 if (frameAlignment != 0) {
1550 // Make stack end at alignment and make room for num_arguments words
1551 // and the original value of esp.
1552 mov(scratch, esp);
1553 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1554 ASSERT(IsPowerOf2(frameAlignment));
1555 and_(esp, -frameAlignment);
1556 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1557 } else {
1558 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1559 }
1560}
1561
1562
1563void MacroAssembler::CallCFunction(ExternalReference function,
1564 int num_arguments) {
1565 // Trashing eax is ok as it will be the return value.
1566 mov(Operand(eax), Immediate(function));
1567 CallCFunction(eax, num_arguments);
1568}
1569
1570
1571void MacroAssembler::CallCFunction(Register function,
1572 int num_arguments) {
1573 // Check stack alignment.
1574 if (FLAG_debug_code) {
1575 CheckStackAlignment();
1576 }
1577
1578 call(Operand(function));
1579 if (OS::ActivationFrameAlignment() != 0) {
1580 mov(esp, Operand(esp, num_arguments * kPointerSize));
1581 } else {
1582 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1583 }
1584}
1585
1586
Steve Blocka7e24c12009-10-30 11:49:00 +00001587CodePatcher::CodePatcher(byte* address, int size)
1588 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1589 // Create a new macro assembler pointing to the address of the code to patch.
1590 // The size is adjusted with kGap on order for the assembler to generate size
1591 // bytes of instructions without failing with buffer size constraints.
1592 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1593}
1594
1595
1596CodePatcher::~CodePatcher() {
1597 // Indicate that code has changed.
1598 CPU::FlushICache(address_, size_);
1599
1600 // Check that the code was patched as expected.
1601 ASSERT(masm_.pc_ == address_ + size_);
1602 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1603}
1604
1605
1606} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001607
1608#endif // V8_TARGET_ARCH_IA32