blob: d0eeb7734cd08b34b4632dfdde35dd7dc7323836 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100101void MacroAssembler::RecordWrite(Register object,
102 int offset,
103 Register value,
104 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000105 // The compiled code assumes that record write doesn't change the
106 // context register, so we check that none of the clobbered
107 // registers are esi.
108 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
109
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100110 // First, check if a write barrier is even needed. The tests below
111 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 Label done;
113
114 // Skip barrier if writing a smi.
115 ASSERT_EQ(0, kSmiTag);
116 test(value, Immediate(kSmiTagMask));
117 j(zero, &done);
118
Steve Block6ded16b2010-05-10 14:33:55 +0100119 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000120
Steve Block6ded16b2010-05-10 14:33:55 +0100121 // The offset is relative to a tagged or untagged HeapObject pointer,
122 // so either offset or offset + kHeapObjectTag must be a
123 // multiple of kPointerSize.
124 ASSERT(IsAligned(offset, kPointerSize) ||
125 IsAligned(offset + kHeapObjectTag, kPointerSize));
126
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100127 Register dst = scratch;
128 if (offset != 0) {
129 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100131 // Array access: calculate the destination address in the same manner as
132 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
133 // into an array of words.
134 ASSERT_EQ(1, kSmiTagSize);
135 ASSERT_EQ(0, kSmiTag);
136 lea(dst, Operand(object, dst, times_half_pointer_size,
137 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000140
141 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000142
143 // Clobber all input registers when running with the debug-code flag
144 // turned on to provoke errors.
145 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100146 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
147 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
148 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000149 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block8defd9f2010-07-08 12:39:36 +0100153void MacroAssembler::RecordWrite(Register object,
154 Register address,
155 Register value) {
156 // The compiled code assumes that record write doesn't change the
157 // context register, so we check that none of the clobbered
158 // registers are esi.
159 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
160
161 // First, check if a write barrier is even needed. The tests below
162 // catch stores of Smis and stores into young gen.
163 Label done;
164
165 // Skip barrier if writing a smi.
166 ASSERT_EQ(0, kSmiTag);
167 test(value, Immediate(kSmiTagMask));
168 j(zero, &done);
169
170 InNewSpace(object, value, equal, &done);
171
172 RecordWriteHelper(object, address, value);
173
174 bind(&done);
175
176 // Clobber all input registers when running with the debug-code flag
177 // turned on to provoke errors.
178 if (FLAG_debug_code) {
179 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
180 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
181 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
182 }
183}
184
185
Steve Blockd0582a62009-12-15 09:54:21 +0000186void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
187 cmp(esp,
188 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
189 j(below, on_stack_overflow);
190}
191
192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193#ifdef ENABLE_DEBUGGER_SUPPORT
194void MacroAssembler::SaveRegistersToMemory(RegList regs) {
195 ASSERT((regs & ~kJSCallerSaved) == 0);
196 // Copy the content of registers to memory location.
197 for (int i = 0; i < kNumJSCallerSaved; i++) {
198 int r = JSCallerSavedCode(i);
199 if ((regs & (1 << r)) != 0) {
200 Register reg = { r };
201 ExternalReference reg_addr =
202 ExternalReference(Debug_Address::Register(i));
203 mov(Operand::StaticVariable(reg_addr), reg);
204 }
205 }
206}
207
208
209void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
210 ASSERT((regs & ~kJSCallerSaved) == 0);
211 // Copy the content of memory location to registers.
212 for (int i = kNumJSCallerSaved; --i >= 0;) {
213 int r = JSCallerSavedCode(i);
214 if ((regs & (1 << r)) != 0) {
215 Register reg = { r };
216 ExternalReference reg_addr =
217 ExternalReference(Debug_Address::Register(i));
218 mov(reg, Operand::StaticVariable(reg_addr));
219 }
220 }
221}
222
223
224void MacroAssembler::PushRegistersFromMemory(RegList regs) {
225 ASSERT((regs & ~kJSCallerSaved) == 0);
226 // Push the content of the memory location to the stack.
227 for (int i = 0; i < kNumJSCallerSaved; i++) {
228 int r = JSCallerSavedCode(i);
229 if ((regs & (1 << r)) != 0) {
230 ExternalReference reg_addr =
231 ExternalReference(Debug_Address::Register(i));
232 push(Operand::StaticVariable(reg_addr));
233 }
234 }
235}
236
237
238void MacroAssembler::PopRegistersToMemory(RegList regs) {
239 ASSERT((regs & ~kJSCallerSaved) == 0);
240 // Pop the content from the stack to the memory location.
241 for (int i = kNumJSCallerSaved; --i >= 0;) {
242 int r = JSCallerSavedCode(i);
243 if ((regs & (1 << r)) != 0) {
244 ExternalReference reg_addr =
245 ExternalReference(Debug_Address::Register(i));
246 pop(Operand::StaticVariable(reg_addr));
247 }
248 }
249}
250
251
252void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
253 Register scratch,
254 RegList regs) {
255 ASSERT((regs & ~kJSCallerSaved) == 0);
256 // Copy the content of the stack to the memory location and adjust base.
257 for (int i = kNumJSCallerSaved; --i >= 0;) {
258 int r = JSCallerSavedCode(i);
259 if ((regs & (1 << r)) != 0) {
260 mov(scratch, Operand(base, 0));
261 ExternalReference reg_addr =
262 ExternalReference(Debug_Address::Register(i));
263 mov(Operand::StaticVariable(reg_addr), scratch);
264 lea(base, Operand(base, kPointerSize));
265 }
266 }
267}
Andrei Popescu402d9372010-02-26 13:31:12 +0000268
269void MacroAssembler::DebugBreak() {
270 Set(eax, Immediate(0));
271 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
272 CEntryStub ces(1);
273 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
274}
Steve Blocka7e24c12009-10-30 11:49:00 +0000275#endif
276
277void MacroAssembler::Set(Register dst, const Immediate& x) {
278 if (x.is_zero()) {
279 xor_(dst, Operand(dst)); // shorter than mov
280 } else {
281 mov(dst, x);
282 }
283}
284
285
286void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
287 mov(dst, x);
288}
289
290
291void MacroAssembler::CmpObjectType(Register heap_object,
292 InstanceType type,
293 Register map) {
294 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
295 CmpInstanceType(map, type);
296}
297
298
299void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
300 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
301 static_cast<int8_t>(type));
302}
303
304
Andrei Popescu31002712010-02-23 13:46:05 +0000305void MacroAssembler::CheckMap(Register obj,
306 Handle<Map> map,
307 Label* fail,
308 bool is_heap_object) {
309 if (!is_heap_object) {
310 test(obj, Immediate(kSmiTagMask));
311 j(zero, fail);
312 }
313 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
314 j(not_equal, fail);
315}
316
317
Leon Clarkee46be812010-01-19 14:06:41 +0000318Condition MacroAssembler::IsObjectStringType(Register heap_object,
319 Register map,
320 Register instance_type) {
321 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
322 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
323 ASSERT(kNotStringTag != 0);
324 test(instance_type, Immediate(kIsNotStringMask));
325 return zero;
326}
327
328
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100329void MacroAssembler::IsObjectJSObjectType(Register heap_object,
330 Register map,
331 Register scratch,
332 Label* fail) {
333 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
334 IsInstanceJSObjectType(map, scratch, fail);
335}
336
337
338void MacroAssembler::IsInstanceJSObjectType(Register map,
339 Register scratch,
340 Label* fail) {
341 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
342 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
343 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
344 j(above, fail);
345}
346
347
Steve Blocka7e24c12009-10-30 11:49:00 +0000348void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000349 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000350 fucomip();
351 ffree(0);
352 fincstp();
353 } else {
354 fucompp();
355 push(eax);
356 fnstsw_ax();
357 sahf();
358 pop(eax);
359 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000360}
361
362
Steve Block6ded16b2010-05-10 14:33:55 +0100363void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000364 Label ok;
365 test(object, Immediate(kSmiTagMask));
366 j(zero, &ok);
367 cmp(FieldOperand(object, HeapObject::kMapOffset),
368 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100369 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000370 bind(&ok);
371}
372
373
Steve Block6ded16b2010-05-10 14:33:55 +0100374void MacroAssembler::AbortIfNotSmi(Register object) {
375 test(object, Immediate(kSmiTagMask));
376 Assert(equal, "Operand not a smi");
377}
378
379
Steve Blocka7e24c12009-10-30 11:49:00 +0000380void MacroAssembler::EnterFrame(StackFrame::Type type) {
381 push(ebp);
382 mov(ebp, Operand(esp));
383 push(esi);
384 push(Immediate(Smi::FromInt(type)));
385 push(Immediate(CodeObject()));
386 if (FLAG_debug_code) {
387 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
388 Check(not_equal, "code object not properly patched");
389 }
390}
391
392
393void MacroAssembler::LeaveFrame(StackFrame::Type type) {
394 if (FLAG_debug_code) {
395 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
396 Immediate(Smi::FromInt(type)));
397 Check(equal, "stack frame types must match");
398 }
399 leave();
400}
401
Steve Blockd0582a62009-12-15 09:54:21 +0000402void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000403 // Setup the frame structure on the stack.
404 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
405 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
406 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
407 push(ebp);
408 mov(ebp, Operand(esp));
409
410 // Reserve room for entry stack pointer and push the debug marker.
411 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000412 push(Immediate(0)); // Saved entry sp, patched before call.
413 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000414
415 // Save the frame pointer and the context in top.
416 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
417 ExternalReference context_address(Top::k_context_address);
418 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
419 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000420}
Steve Blocka7e24c12009-10-30 11:49:00 +0000421
Steve Blockd0582a62009-12-15 09:54:21 +0000422void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000423#ifdef ENABLE_DEBUGGER_SUPPORT
424 // Save the state of all registers to the stack from the memory
425 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000426 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 // TODO(1243899): This should be symmetric to
428 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
429 // correct here, but computed for the other call. Very error
430 // prone! FIX THIS. Actually there are deeper problems with
431 // register saving than this asymmetry (see the bug report
432 // associated with this issue).
433 PushRegistersFromMemory(kJSCallerSaved);
434 }
435#endif
436
Steve Blockd0582a62009-12-15 09:54:21 +0000437 // Reserve space for arguments.
438 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000439
440 // Get the required frame alignment for the OS.
441 static const int kFrameAlignment = OS::ActivationFrameAlignment();
442 if (kFrameAlignment > 0) {
443 ASSERT(IsPowerOf2(kFrameAlignment));
444 and_(esp, -kFrameAlignment);
445 }
446
447 // Patch the saved entry sp.
448 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
449}
450
451
Steve Blockd0582a62009-12-15 09:54:21 +0000452void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
453 EnterExitFramePrologue(mode);
454
455 // Setup argc and argv in callee-saved registers.
456 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
457 mov(edi, Operand(eax));
458 lea(esi, Operand(ebp, eax, times_4, offset));
459
460 EnterExitFrameEpilogue(mode, 2);
461}
462
463
464void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
465 int stack_space,
466 int argc) {
467 EnterExitFramePrologue(mode);
468
469 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
470 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
471
472 EnterExitFrameEpilogue(mode, argc);
473}
474
475
476void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000477#ifdef ENABLE_DEBUGGER_SUPPORT
478 // Restore the memory copy of the registers by digging them out from
479 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000480 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000481 // It's okay to clobber register ebx below because we don't need
482 // the function pointer after this.
483 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000484 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000485 lea(ebx, Operand(ebp, kOffset));
486 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
487 }
488#endif
489
490 // Get the return address from the stack and restore the frame pointer.
491 mov(ecx, Operand(ebp, 1 * kPointerSize));
492 mov(ebp, Operand(ebp, 0 * kPointerSize));
493
494 // Pop the arguments and the receiver from the caller stack.
495 lea(esp, Operand(esi, 1 * kPointerSize));
496
497 // Restore current context from top and clear it in debug mode.
498 ExternalReference context_address(Top::k_context_address);
499 mov(esi, Operand::StaticVariable(context_address));
500#ifdef DEBUG
501 mov(Operand::StaticVariable(context_address), Immediate(0));
502#endif
503
504 // Push the return address to get ready to return.
505 push(ecx);
506
507 // Clear the top frame.
508 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
509 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
510}
511
512
513void MacroAssembler::PushTryHandler(CodeLocation try_location,
514 HandlerType type) {
515 // Adjust this code if not the case.
516 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
517 // The pc (return address) is already on TOS.
518 if (try_location == IN_JAVASCRIPT) {
519 if (type == TRY_CATCH_HANDLER) {
520 push(Immediate(StackHandler::TRY_CATCH));
521 } else {
522 push(Immediate(StackHandler::TRY_FINALLY));
523 }
524 push(ebp);
525 } else {
526 ASSERT(try_location == IN_JS_ENTRY);
527 // The frame pointer does not point to a JS frame so we save NULL
528 // for ebp. We expect the code throwing an exception to check ebp
529 // before dereferencing it to restore the context.
530 push(Immediate(StackHandler::ENTRY));
531 push(Immediate(0)); // NULL frame pointer.
532 }
533 // Save the current handler as the next handler.
534 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
535 // Link this handler as the new current one.
536 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
537}
538
539
Leon Clarkee46be812010-01-19 14:06:41 +0000540void MacroAssembler::PopTryHandler() {
541 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
542 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
543 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
544}
545
546
Steve Blocka7e24c12009-10-30 11:49:00 +0000547void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
548 Register scratch,
549 Label* miss) {
550 Label same_contexts;
551
552 ASSERT(!holder_reg.is(scratch));
553
554 // Load current lexical context from the stack frame.
555 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
556
557 // When generating debug code, make sure the lexical context is set.
558 if (FLAG_debug_code) {
559 cmp(Operand(scratch), Immediate(0));
560 Check(not_equal, "we should not have an empty lexical context");
561 }
562 // Load the global context of the current context.
563 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
564 mov(scratch, FieldOperand(scratch, offset));
565 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
566
567 // Check the context is a global context.
568 if (FLAG_debug_code) {
569 push(scratch);
570 // Read the first word and compare to global_context_map.
571 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
572 cmp(scratch, Factory::global_context_map());
573 Check(equal, "JSGlobalObject::global_context should be a global context.");
574 pop(scratch);
575 }
576
577 // Check if both contexts are the same.
578 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
579 j(equal, &same_contexts, taken);
580
581 // Compare security tokens, save holder_reg on the stack so we can use it
582 // as a temporary register.
583 //
584 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
585 push(holder_reg);
586 // Check that the security token in the calling global object is
587 // compatible with the security token in the receiving global
588 // object.
589 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
590
591 // Check the context is a global context.
592 if (FLAG_debug_code) {
593 cmp(holder_reg, Factory::null_value());
594 Check(not_equal, "JSGlobalProxy::context() should not be null.");
595
596 push(holder_reg);
597 // Read the first word and compare to global_context_map(),
598 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
599 cmp(holder_reg, Factory::global_context_map());
600 Check(equal, "JSGlobalObject::global_context should be a global context.");
601 pop(holder_reg);
602 }
603
604 int token_offset = Context::kHeaderSize +
605 Context::SECURITY_TOKEN_INDEX * kPointerSize;
606 mov(scratch, FieldOperand(scratch, token_offset));
607 cmp(scratch, FieldOperand(holder_reg, token_offset));
608 pop(holder_reg);
609 j(not_equal, miss, not_taken);
610
611 bind(&same_contexts);
612}
613
614
615void MacroAssembler::LoadAllocationTopHelper(Register result,
616 Register result_end,
617 Register scratch,
618 AllocationFlags flags) {
619 ExternalReference new_space_allocation_top =
620 ExternalReference::new_space_allocation_top_address();
621
622 // Just return if allocation top is already known.
623 if ((flags & RESULT_CONTAINS_TOP) != 0) {
624 // No use of scratch if allocation top is provided.
625 ASSERT(scratch.is(no_reg));
626#ifdef DEBUG
627 // Assert that result actually contains top on entry.
628 cmp(result, Operand::StaticVariable(new_space_allocation_top));
629 Check(equal, "Unexpected allocation top");
630#endif
631 return;
632 }
633
634 // Move address of new object to result. Use scratch register if available.
635 if (scratch.is(no_reg)) {
636 mov(result, Operand::StaticVariable(new_space_allocation_top));
637 } else {
638 ASSERT(!scratch.is(result_end));
639 mov(Operand(scratch), Immediate(new_space_allocation_top));
640 mov(result, Operand(scratch, 0));
641 }
642}
643
644
645void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
646 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000647 if (FLAG_debug_code) {
648 test(result_end, Immediate(kObjectAlignmentMask));
649 Check(zero, "Unaligned allocation in new space");
650 }
651
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 ExternalReference new_space_allocation_top =
653 ExternalReference::new_space_allocation_top_address();
654
655 // Update new top. Use scratch if available.
656 if (scratch.is(no_reg)) {
657 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
658 } else {
659 mov(Operand(scratch, 0), result_end);
660 }
661}
662
663
664void MacroAssembler::AllocateInNewSpace(int object_size,
665 Register result,
666 Register result_end,
667 Register scratch,
668 Label* gc_required,
669 AllocationFlags flags) {
670 ASSERT(!result.is(result_end));
671
672 // Load address of new object into result.
673 LoadAllocationTopHelper(result, result_end, scratch, flags);
674
Ben Murdochbb769b22010-08-11 14:56:33 +0100675 Register top_reg = result_end.is_valid() ? result_end : result;
676
Steve Blocka7e24c12009-10-30 11:49:00 +0000677 // Calculate new top and bail out if new space is exhausted.
678 ExternalReference new_space_allocation_limit =
679 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100680
681 if (top_reg.is(result)) {
682 add(Operand(top_reg), Immediate(object_size));
683 } else {
684 lea(top_reg, Operand(result, object_size));
685 }
686 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000687 j(above, gc_required, not_taken);
688
Leon Clarkee46be812010-01-19 14:06:41 +0000689 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100690 UpdateAllocationTopHelper(top_reg, scratch);
691
692 // Tag result if requested.
693 if (top_reg.is(result)) {
694 if ((flags & TAG_OBJECT) != 0) {
695 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
696 } else {
697 sub(Operand(result), Immediate(object_size));
698 }
699 } else if ((flags & TAG_OBJECT) != 0) {
700 add(Operand(result), Immediate(kHeapObjectTag));
701 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000702}
703
704
705void MacroAssembler::AllocateInNewSpace(int header_size,
706 ScaleFactor element_size,
707 Register element_count,
708 Register result,
709 Register result_end,
710 Register scratch,
711 Label* gc_required,
712 AllocationFlags flags) {
713 ASSERT(!result.is(result_end));
714
715 // Load address of new object into result.
716 LoadAllocationTopHelper(result, result_end, scratch, flags);
717
718 // Calculate new top and bail out if new space is exhausted.
719 ExternalReference new_space_allocation_limit =
720 ExternalReference::new_space_allocation_limit_address();
721 lea(result_end, Operand(result, element_count, element_size, header_size));
722 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
723 j(above, gc_required);
724
Steve Blocka7e24c12009-10-30 11:49:00 +0000725 // Tag result if requested.
726 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000727 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000728 }
Leon Clarkee46be812010-01-19 14:06:41 +0000729
730 // Update allocation top.
731 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000732}
733
734
735void MacroAssembler::AllocateInNewSpace(Register object_size,
736 Register result,
737 Register result_end,
738 Register scratch,
739 Label* gc_required,
740 AllocationFlags flags) {
741 ASSERT(!result.is(result_end));
742
743 // Load address of new object into result.
744 LoadAllocationTopHelper(result, result_end, scratch, flags);
745
746 // Calculate new top and bail out if new space is exhausted.
747 ExternalReference new_space_allocation_limit =
748 ExternalReference::new_space_allocation_limit_address();
749 if (!object_size.is(result_end)) {
750 mov(result_end, object_size);
751 }
752 add(result_end, Operand(result));
753 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
754 j(above, gc_required, not_taken);
755
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 // Tag result if requested.
757 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000758 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000759 }
Leon Clarkee46be812010-01-19 14:06:41 +0000760
761 // Update allocation top.
762 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000763}
764
765
766void MacroAssembler::UndoAllocationInNewSpace(Register object) {
767 ExternalReference new_space_allocation_top =
768 ExternalReference::new_space_allocation_top_address();
769
770 // Make sure the object has no tag before resetting top.
771 and_(Operand(object), Immediate(~kHeapObjectTagMask));
772#ifdef DEBUG
773 cmp(object, Operand::StaticVariable(new_space_allocation_top));
774 Check(below, "Undo allocation of non allocated memory");
775#endif
776 mov(Operand::StaticVariable(new_space_allocation_top), object);
777}
778
779
Steve Block3ce2e202009-11-05 08:53:23 +0000780void MacroAssembler::AllocateHeapNumber(Register result,
781 Register scratch1,
782 Register scratch2,
783 Label* gc_required) {
784 // Allocate heap number in new space.
785 AllocateInNewSpace(HeapNumber::kSize,
786 result,
787 scratch1,
788 scratch2,
789 gc_required,
790 TAG_OBJECT);
791
792 // Set the map.
793 mov(FieldOperand(result, HeapObject::kMapOffset),
794 Immediate(Factory::heap_number_map()));
795}
796
797
Steve Blockd0582a62009-12-15 09:54:21 +0000798void MacroAssembler::AllocateTwoByteString(Register result,
799 Register length,
800 Register scratch1,
801 Register scratch2,
802 Register scratch3,
803 Label* gc_required) {
804 // Calculate the number of bytes needed for the characters in the string while
805 // observing object alignment.
806 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000807 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000808 // scratch1 = length * 2 + kObjectAlignmentMask.
809 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000810 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
811
812 // Allocate two byte string in new space.
813 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
814 times_1,
815 scratch1,
816 result,
817 scratch2,
818 scratch3,
819 gc_required,
820 TAG_OBJECT);
821
822 // Set the map, length and hash field.
823 mov(FieldOperand(result, HeapObject::kMapOffset),
824 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100825 mov(scratch1, length);
826 SmiTag(scratch1);
827 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000828 mov(FieldOperand(result, String::kHashFieldOffset),
829 Immediate(String::kEmptyHashField));
830}
831
832
833void MacroAssembler::AllocateAsciiString(Register result,
834 Register length,
835 Register scratch1,
836 Register scratch2,
837 Register scratch3,
838 Label* gc_required) {
839 // Calculate the number of bytes needed for the characters in the string while
840 // observing object alignment.
841 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
842 mov(scratch1, length);
843 ASSERT(kCharSize == 1);
844 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
845 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
846
847 // Allocate ascii string in new space.
848 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
849 times_1,
850 scratch1,
851 result,
852 scratch2,
853 scratch3,
854 gc_required,
855 TAG_OBJECT);
856
857 // Set the map, length and hash field.
858 mov(FieldOperand(result, HeapObject::kMapOffset),
859 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100860 mov(scratch1, length);
861 SmiTag(scratch1);
862 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000863 mov(FieldOperand(result, String::kHashFieldOffset),
864 Immediate(String::kEmptyHashField));
865}
866
867
868void MacroAssembler::AllocateConsString(Register result,
869 Register scratch1,
870 Register scratch2,
871 Label* gc_required) {
872 // Allocate heap number in new space.
873 AllocateInNewSpace(ConsString::kSize,
874 result,
875 scratch1,
876 scratch2,
877 gc_required,
878 TAG_OBJECT);
879
880 // Set the map. The other fields are left uninitialized.
881 mov(FieldOperand(result, HeapObject::kMapOffset),
882 Immediate(Factory::cons_string_map()));
883}
884
885
886void MacroAssembler::AllocateAsciiConsString(Register result,
887 Register scratch1,
888 Register scratch2,
889 Label* gc_required) {
890 // Allocate heap number in new space.
891 AllocateInNewSpace(ConsString::kSize,
892 result,
893 scratch1,
894 scratch2,
895 gc_required,
896 TAG_OBJECT);
897
898 // Set the map. The other fields are left uninitialized.
899 mov(FieldOperand(result, HeapObject::kMapOffset),
900 Immediate(Factory::cons_ascii_string_map()));
901}
902
903
Steve Blocka7e24c12009-10-30 11:49:00 +0000904void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
905 Register result,
906 Register op,
907 JumpTarget* then_target) {
908 JumpTarget ok;
909 test(result, Operand(result));
910 ok.Branch(not_zero, taken);
911 test(op, Operand(op));
912 then_target->Branch(sign, not_taken);
913 ok.Bind();
914}
915
916
917void MacroAssembler::NegativeZeroTest(Register result,
918 Register op,
919 Label* then_label) {
920 Label ok;
921 test(result, Operand(result));
922 j(not_zero, &ok, taken);
923 test(op, Operand(op));
924 j(sign, then_label, not_taken);
925 bind(&ok);
926}
927
928
929void MacroAssembler::NegativeZeroTest(Register result,
930 Register op1,
931 Register op2,
932 Register scratch,
933 Label* then_label) {
934 Label ok;
935 test(result, Operand(result));
936 j(not_zero, &ok, taken);
937 mov(scratch, Operand(op1));
938 or_(scratch, Operand(op2));
939 j(sign, then_label, not_taken);
940 bind(&ok);
941}
942
943
944void MacroAssembler::TryGetFunctionPrototype(Register function,
945 Register result,
946 Register scratch,
947 Label* miss) {
948 // Check that the receiver isn't a smi.
949 test(function, Immediate(kSmiTagMask));
950 j(zero, miss, not_taken);
951
952 // Check that the function really is a function.
953 CmpObjectType(function, JS_FUNCTION_TYPE, result);
954 j(not_equal, miss, not_taken);
955
956 // Make sure that the function has an instance prototype.
957 Label non_instance;
958 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
959 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
960 j(not_zero, &non_instance, not_taken);
961
962 // Get the prototype or initial map from the function.
963 mov(result,
964 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
965
966 // If the prototype or initial map is the hole, don't return it and
967 // simply miss the cache instead. This will allow us to allocate a
968 // prototype object on-demand in the runtime system.
969 cmp(Operand(result), Immediate(Factory::the_hole_value()));
970 j(equal, miss, not_taken);
971
972 // If the function does not have an initial map, we're done.
973 Label done;
974 CmpObjectType(result, MAP_TYPE, scratch);
975 j(not_equal, &done);
976
977 // Get the prototype from the initial map.
978 mov(result, FieldOperand(result, Map::kPrototypeOffset));
979 jmp(&done);
980
981 // Non-instance prototype: Fetch prototype from constructor field
982 // in initial map.
983 bind(&non_instance);
984 mov(result, FieldOperand(result, Map::kConstructorOffset));
985
986 // All done.
987 bind(&done);
988}
989
990
991void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000992 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +0000993 call(stub->GetCode(), RelocInfo::CODE_TARGET);
994}
995
996
Leon Clarkee46be812010-01-19 14:06:41 +0000997Object* MacroAssembler::TryCallStub(CodeStub* stub) {
998 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
999 Object* result = stub->TryGetCode();
1000 if (!result->IsFailure()) {
1001 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1002 }
1003 return result;
1004}
1005
1006
Steve Blockd0582a62009-12-15 09:54:21 +00001007void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001008 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001009 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1010}
1011
1012
Leon Clarkee46be812010-01-19 14:06:41 +00001013Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1014 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1015 Object* result = stub->TryGetCode();
1016 if (!result->IsFailure()) {
1017 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1018 }
1019 return result;
1020}
1021
1022
Steve Blocka7e24c12009-10-30 11:49:00 +00001023void MacroAssembler::StubReturn(int argc) {
1024 ASSERT(argc >= 1 && generating_stub());
1025 ret((argc - 1) * kPointerSize);
1026}
1027
1028
1029void MacroAssembler::IllegalOperation(int num_arguments) {
1030 if (num_arguments > 0) {
1031 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1032 }
1033 mov(eax, Immediate(Factory::undefined_value()));
1034}
1035
1036
1037void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1038 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1039}
1040
1041
Leon Clarkee46be812010-01-19 14:06:41 +00001042Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1043 int num_arguments) {
1044 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1045}
1046
1047
Steve Blocka7e24c12009-10-30 11:49:00 +00001048void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1049 // If the expected number of arguments of the runtime function is
1050 // constant, we check that the actual number of arguments match the
1051 // expectation.
1052 if (f->nargs >= 0 && f->nargs != num_arguments) {
1053 IllegalOperation(num_arguments);
1054 return;
1055 }
1056
Leon Clarke4515c472010-02-03 11:58:03 +00001057 // TODO(1236192): Most runtime routines don't need the number of
1058 // arguments passed in because it is constant. At some point we
1059 // should remove this need and make the runtime routine entry code
1060 // smarter.
1061 Set(eax, Immediate(num_arguments));
1062 mov(ebx, Immediate(ExternalReference(f)));
1063 CEntryStub ces(1);
1064 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001065}
1066
1067
Leon Clarkee46be812010-01-19 14:06:41 +00001068Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1069 int num_arguments) {
1070 if (f->nargs >= 0 && f->nargs != num_arguments) {
1071 IllegalOperation(num_arguments);
1072 // Since we did not call the stub, there was no allocation failure.
1073 // Return some non-failure object.
1074 return Heap::undefined_value();
1075 }
1076
Leon Clarke4515c472010-02-03 11:58:03 +00001077 // TODO(1236192): Most runtime routines don't need the number of
1078 // arguments passed in because it is constant. At some point we
1079 // should remove this need and make the runtime routine entry code
1080 // smarter.
1081 Set(eax, Immediate(num_arguments));
1082 mov(ebx, Immediate(ExternalReference(f)));
1083 CEntryStub ces(1);
1084 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001085}
1086
1087
Ben Murdochbb769b22010-08-11 14:56:33 +01001088void MacroAssembler::CallExternalReference(ExternalReference ref,
1089 int num_arguments) {
1090 mov(eax, Immediate(num_arguments));
1091 mov(ebx, Immediate(ref));
1092
1093 CEntryStub stub(1);
1094 CallStub(&stub);
1095}
1096
1097
Steve Block6ded16b2010-05-10 14:33:55 +01001098void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1099 int num_arguments,
1100 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001101 // TODO(1236192): Most runtime routines don't need the number of
1102 // arguments passed in because it is constant. At some point we
1103 // should remove this need and make the runtime routine entry code
1104 // smarter.
1105 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001106 JumpToExternalReference(ext);
1107}
1108
1109
1110void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1111 int num_arguments,
1112 int result_size) {
1113 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001114}
1115
1116
Steve Blockd0582a62009-12-15 09:54:21 +00001117void MacroAssembler::PushHandleScope(Register scratch) {
1118 // Push the number of extensions, smi-tagged so the gc will ignore it.
1119 ExternalReference extensions_address =
1120 ExternalReference::handle_scope_extensions_address();
1121 mov(scratch, Operand::StaticVariable(extensions_address));
Ben Murdochbb769b22010-08-11 14:56:33 +01001122 SmiTag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001123 push(scratch);
1124 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1125 // Push next and limit pointers which will be wordsize aligned and
1126 // hence automatically smi tagged.
1127 ExternalReference next_address =
1128 ExternalReference::handle_scope_next_address();
1129 push(Operand::StaticVariable(next_address));
1130 ExternalReference limit_address =
1131 ExternalReference::handle_scope_limit_address();
1132 push(Operand::StaticVariable(limit_address));
1133}
1134
1135
Leon Clarkee46be812010-01-19 14:06:41 +00001136Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1137 Register scratch,
1138 bool gc_allowed) {
1139 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001140 ExternalReference extensions_address =
1141 ExternalReference::handle_scope_extensions_address();
1142 Label write_back;
1143 mov(scratch, Operand::StaticVariable(extensions_address));
1144 cmp(Operand(scratch), Immediate(0));
1145 j(equal, &write_back);
Ben Murdochbb769b22010-08-11 14:56:33 +01001146 push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001147 if (gc_allowed) {
1148 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1149 } else {
1150 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1151 if (result->IsFailure()) return result;
1152 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001153 pop(saved);
Steve Blockd0582a62009-12-15 09:54:21 +00001154
1155 bind(&write_back);
1156 ExternalReference limit_address =
1157 ExternalReference::handle_scope_limit_address();
1158 pop(Operand::StaticVariable(limit_address));
1159 ExternalReference next_address =
1160 ExternalReference::handle_scope_next_address();
1161 pop(Operand::StaticVariable(next_address));
1162 pop(scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +01001163 SmiUntag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001164 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001165
1166 return result;
1167}
1168
1169
1170void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1171 PopHandleScopeHelper(saved, scratch, true);
1172}
1173
1174
1175Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1176 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001177}
1178
1179
Steve Block6ded16b2010-05-10 14:33:55 +01001180void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001181 // Set the entry point and jump to the C entry runtime stub.
1182 mov(ebx, Immediate(ext));
1183 CEntryStub ces(1);
1184 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1185}
1186
1187
1188void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1189 const ParameterCount& actual,
1190 Handle<Code> code_constant,
1191 const Operand& code_operand,
1192 Label* done,
1193 InvokeFlag flag) {
1194 bool definitely_matches = false;
1195 Label invoke;
1196 if (expected.is_immediate()) {
1197 ASSERT(actual.is_immediate());
1198 if (expected.immediate() == actual.immediate()) {
1199 definitely_matches = true;
1200 } else {
1201 mov(eax, actual.immediate());
1202 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1203 if (expected.immediate() == sentinel) {
1204 // Don't worry about adapting arguments for builtins that
1205 // don't want that done. Skip adaption code by making it look
1206 // like we have a match between expected and actual number of
1207 // arguments.
1208 definitely_matches = true;
1209 } else {
1210 mov(ebx, expected.immediate());
1211 }
1212 }
1213 } else {
1214 if (actual.is_immediate()) {
1215 // Expected is in register, actual is immediate. This is the
1216 // case when we invoke function values without going through the
1217 // IC mechanism.
1218 cmp(expected.reg(), actual.immediate());
1219 j(equal, &invoke);
1220 ASSERT(expected.reg().is(ebx));
1221 mov(eax, actual.immediate());
1222 } else if (!expected.reg().is(actual.reg())) {
1223 // Both expected and actual are in (different) registers. This
1224 // is the case when we invoke functions using call and apply.
1225 cmp(expected.reg(), Operand(actual.reg()));
1226 j(equal, &invoke);
1227 ASSERT(actual.reg().is(eax));
1228 ASSERT(expected.reg().is(ebx));
1229 }
1230 }
1231
1232 if (!definitely_matches) {
1233 Handle<Code> adaptor =
1234 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1235 if (!code_constant.is_null()) {
1236 mov(edx, Immediate(code_constant));
1237 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1238 } else if (!code_operand.is_reg(edx)) {
1239 mov(edx, code_operand);
1240 }
1241
1242 if (flag == CALL_FUNCTION) {
1243 call(adaptor, RelocInfo::CODE_TARGET);
1244 jmp(done);
1245 } else {
1246 jmp(adaptor, RelocInfo::CODE_TARGET);
1247 }
1248 bind(&invoke);
1249 }
1250}
1251
1252
1253void MacroAssembler::InvokeCode(const Operand& code,
1254 const ParameterCount& expected,
1255 const ParameterCount& actual,
1256 InvokeFlag flag) {
1257 Label done;
1258 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1259 if (flag == CALL_FUNCTION) {
1260 call(code);
1261 } else {
1262 ASSERT(flag == JUMP_FUNCTION);
1263 jmp(code);
1264 }
1265 bind(&done);
1266}
1267
1268
1269void MacroAssembler::InvokeCode(Handle<Code> code,
1270 const ParameterCount& expected,
1271 const ParameterCount& actual,
1272 RelocInfo::Mode rmode,
1273 InvokeFlag flag) {
1274 Label done;
1275 Operand dummy(eax);
1276 InvokePrologue(expected, actual, code, dummy, &done, flag);
1277 if (flag == CALL_FUNCTION) {
1278 call(code, rmode);
1279 } else {
1280 ASSERT(flag == JUMP_FUNCTION);
1281 jmp(code, rmode);
1282 }
1283 bind(&done);
1284}
1285
1286
1287void MacroAssembler::InvokeFunction(Register fun,
1288 const ParameterCount& actual,
1289 InvokeFlag flag) {
1290 ASSERT(fun.is(edi));
1291 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1292 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1293 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001294 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001295 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1296 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1297
1298 ParameterCount expected(ebx);
1299 InvokeCode(Operand(edx), expected, actual, flag);
1300}
1301
1302
Andrei Popescu402d9372010-02-26 13:31:12 +00001303void MacroAssembler::InvokeFunction(JSFunction* function,
1304 const ParameterCount& actual,
1305 InvokeFlag flag) {
1306 ASSERT(function->is_compiled());
1307 // Get the function and setup the context.
1308 mov(edi, Immediate(Handle<JSFunction>(function)));
1309 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001310
Andrei Popescu402d9372010-02-26 13:31:12 +00001311 // Invoke the cached code.
1312 Handle<Code> code(function->code());
1313 ParameterCount expected(function->shared()->formal_parameter_count());
1314 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1315}
1316
1317
1318void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001319 // Calls are not allowed in some stubs.
1320 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1321
1322 // Rely on the assertion to check that the number of provided
1323 // arguments match the expected number of arguments. Fake a
1324 // parameter count to avoid emitting code to do the check.
1325 ParameterCount expected(0);
Andrei Popescu402d9372010-02-26 13:31:12 +00001326 GetBuiltinEntry(edx, id);
1327 InvokeCode(Operand(edx), expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001328}
1329
1330
1331void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001332 ASSERT(!target.is(edi));
1333
1334 // Load the builtins object into target register.
1335 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1336 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1337
Andrei Popescu402d9372010-02-26 13:31:12 +00001338 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001339 mov(edi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1340
1341 // Load the code entry point from the builtins object.
1342 mov(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
1343 if (FLAG_debug_code) {
1344 // Make sure the code objects in the builtins object and in the
1345 // builtin function are the same.
1346 push(target);
1347 mov(target, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1348 mov(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
1349 cmp(target, Operand(esp, 0));
1350 Assert(equal, "Builtin code object changed");
1351 pop(target);
1352 }
1353 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001354}
1355
1356
Steve Blockd0582a62009-12-15 09:54:21 +00001357void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1358 if (context_chain_length > 0) {
1359 // Move up the chain of contexts to the context containing the slot.
1360 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1361 // Load the function context (which is the incoming, outer context).
1362 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1363 for (int i = 1; i < context_chain_length; i++) {
1364 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1365 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1366 }
1367 // The context may be an intermediate context, not a function context.
1368 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1369 } else { // Slot is in the current function context.
1370 // The context may be an intermediate context, not a function context.
1371 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1372 }
1373}
1374
1375
1376
Steve Blocka7e24c12009-10-30 11:49:00 +00001377void MacroAssembler::Ret() {
1378 ret(0);
1379}
1380
1381
Leon Clarkee46be812010-01-19 14:06:41 +00001382void MacroAssembler::Drop(int stack_elements) {
1383 if (stack_elements > 0) {
1384 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1385 }
1386}
1387
1388
1389void MacroAssembler::Move(Register dst, Handle<Object> value) {
1390 mov(dst, value);
1391}
1392
1393
Steve Blocka7e24c12009-10-30 11:49:00 +00001394void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1395 if (FLAG_native_code_counters && counter->Enabled()) {
1396 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1397 }
1398}
1399
1400
1401void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1402 ASSERT(value > 0);
1403 if (FLAG_native_code_counters && counter->Enabled()) {
1404 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1405 if (value == 1) {
1406 inc(operand);
1407 } else {
1408 add(operand, Immediate(value));
1409 }
1410 }
1411}
1412
1413
1414void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1415 ASSERT(value > 0);
1416 if (FLAG_native_code_counters && counter->Enabled()) {
1417 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1418 if (value == 1) {
1419 dec(operand);
1420 } else {
1421 sub(operand, Immediate(value));
1422 }
1423 }
1424}
1425
1426
Leon Clarked91b9f72010-01-27 17:25:45 +00001427void MacroAssembler::IncrementCounter(Condition cc,
1428 StatsCounter* counter,
1429 int value) {
1430 ASSERT(value > 0);
1431 if (FLAG_native_code_counters && counter->Enabled()) {
1432 Label skip;
1433 j(NegateCondition(cc), &skip);
1434 pushfd();
1435 IncrementCounter(counter, value);
1436 popfd();
1437 bind(&skip);
1438 }
1439}
1440
1441
1442void MacroAssembler::DecrementCounter(Condition cc,
1443 StatsCounter* counter,
1444 int value) {
1445 ASSERT(value > 0);
1446 if (FLAG_native_code_counters && counter->Enabled()) {
1447 Label skip;
1448 j(NegateCondition(cc), &skip);
1449 pushfd();
1450 DecrementCounter(counter, value);
1451 popfd();
1452 bind(&skip);
1453 }
1454}
1455
1456
Steve Blocka7e24c12009-10-30 11:49:00 +00001457void MacroAssembler::Assert(Condition cc, const char* msg) {
1458 if (FLAG_debug_code) Check(cc, msg);
1459}
1460
1461
1462void MacroAssembler::Check(Condition cc, const char* msg) {
1463 Label L;
1464 j(cc, &L, taken);
1465 Abort(msg);
1466 // will not return here
1467 bind(&L);
1468}
1469
1470
Steve Block6ded16b2010-05-10 14:33:55 +01001471void MacroAssembler::CheckStackAlignment() {
1472 int frame_alignment = OS::ActivationFrameAlignment();
1473 int frame_alignment_mask = frame_alignment - 1;
1474 if (frame_alignment > kPointerSize) {
1475 ASSERT(IsPowerOf2(frame_alignment));
1476 Label alignment_as_expected;
1477 test(esp, Immediate(frame_alignment_mask));
1478 j(zero, &alignment_as_expected);
1479 // Abort if stack is not aligned.
1480 int3();
1481 bind(&alignment_as_expected);
1482 }
1483}
1484
1485
Steve Blocka7e24c12009-10-30 11:49:00 +00001486void MacroAssembler::Abort(const char* msg) {
1487 // We want to pass the msg string like a smi to avoid GC
1488 // problems, however msg is not guaranteed to be aligned
1489 // properly. Instead, we pass an aligned pointer that is
1490 // a proper v8 smi, but also pass the alignment difference
1491 // from the real pointer as a smi.
1492 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1493 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1494 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1495#ifdef DEBUG
1496 if (msg != NULL) {
1497 RecordComment("Abort message: ");
1498 RecordComment(msg);
1499 }
1500#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001501 // Disable stub call restrictions to always allow calls to abort.
1502 set_allow_stub_calls(true);
1503
Steve Blocka7e24c12009-10-30 11:49:00 +00001504 push(eax);
1505 push(Immediate(p0));
1506 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1507 CallRuntime(Runtime::kAbort, 2);
1508 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001509 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001510}
1511
1512
Andrei Popescu402d9372010-02-26 13:31:12 +00001513void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1514 Register instance_type,
1515 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001516 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001517 if (!scratch.is(instance_type)) {
1518 mov(scratch, instance_type);
1519 }
1520 and_(scratch,
1521 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1522 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1523 j(not_equal, failure);
1524}
1525
1526
Leon Clarked91b9f72010-01-27 17:25:45 +00001527void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1528 Register object2,
1529 Register scratch1,
1530 Register scratch2,
1531 Label* failure) {
1532 // Check that both objects are not smis.
1533 ASSERT_EQ(0, kSmiTag);
1534 mov(scratch1, Operand(object1));
1535 and_(scratch1, Operand(object2));
1536 test(scratch1, Immediate(kSmiTagMask));
1537 j(zero, failure);
1538
1539 // Load instance type for both strings.
1540 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1541 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1542 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1543 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1544
1545 // Check that both are flat ascii strings.
1546 const int kFlatAsciiStringMask =
1547 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1548 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1549 // Interleave bits from both instance types and compare them in one check.
1550 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1551 and_(scratch1, kFlatAsciiStringMask);
1552 and_(scratch2, kFlatAsciiStringMask);
1553 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1554 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1555 j(not_equal, failure);
1556}
1557
1558
Steve Block6ded16b2010-05-10 14:33:55 +01001559void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1560 int frameAlignment = OS::ActivationFrameAlignment();
1561 if (frameAlignment != 0) {
1562 // Make stack end at alignment and make room for num_arguments words
1563 // and the original value of esp.
1564 mov(scratch, esp);
1565 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1566 ASSERT(IsPowerOf2(frameAlignment));
1567 and_(esp, -frameAlignment);
1568 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1569 } else {
1570 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1571 }
1572}
1573
1574
1575void MacroAssembler::CallCFunction(ExternalReference function,
1576 int num_arguments) {
1577 // Trashing eax is ok as it will be the return value.
1578 mov(Operand(eax), Immediate(function));
1579 CallCFunction(eax, num_arguments);
1580}
1581
1582
1583void MacroAssembler::CallCFunction(Register function,
1584 int num_arguments) {
1585 // Check stack alignment.
1586 if (FLAG_debug_code) {
1587 CheckStackAlignment();
1588 }
1589
1590 call(Operand(function));
1591 if (OS::ActivationFrameAlignment() != 0) {
1592 mov(esp, Operand(esp, num_arguments * kPointerSize));
1593 } else {
1594 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1595 }
1596}
1597
1598
Steve Blocka7e24c12009-10-30 11:49:00 +00001599CodePatcher::CodePatcher(byte* address, int size)
1600 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1601 // Create a new macro assembler pointing to the address of the code to patch.
1602 // The size is adjusted with kGap on order for the assembler to generate size
1603 // bytes of instructions without failing with buffer size constraints.
1604 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1605}
1606
1607
1608CodePatcher::~CodePatcher() {
1609 // Indicate that code has changed.
1610 CPU::FlushICache(address_, size_);
1611
1612 // Check that the code was patched as expected.
1613 ASSERT(masm_.pc_ == address_ + size_);
1614 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1615}
1616
1617
1618} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001619
1620#endif // V8_TARGET_ARCH_IA32