blob: b83f9bc75b94deba5c04b09f45a9b50307eb4fda [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100101// For page containing |object| mark region covering [object+offset] dirty.
Steve Blocka7e24c12009-10-30 11:49:00 +0000102// object is the object being stored into, value is the object being stored.
103// If offset is zero, then the scratch register contains the array index into
104// the elements array represented as a Smi.
105// All registers are clobbered by the operation.
106void MacroAssembler::RecordWrite(Register object, int offset,
107 Register value, Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000108 // The compiled code assumes that record write doesn't change the
109 // context register, so we check that none of the clobbered
110 // registers are esi.
111 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
112
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100113 // First, check if a write barrier is even needed. The tests below
114 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000115 Label done;
116
117 // Skip barrier if writing a smi.
118 ASSERT_EQ(0, kSmiTag);
119 test(value, Immediate(kSmiTagMask));
120 j(zero, &done);
121
Steve Block6ded16b2010-05-10 14:33:55 +0100122 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000123
Steve Block6ded16b2010-05-10 14:33:55 +0100124 // The offset is relative to a tagged or untagged HeapObject pointer,
125 // so either offset or offset + kHeapObjectTag must be a
126 // multiple of kPointerSize.
127 ASSERT(IsAligned(offset, kPointerSize) ||
128 IsAligned(offset + kHeapObjectTag, kPointerSize));
129
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100130 Register dst = scratch;
131 if (offset != 0) {
132 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000133 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100134 // Array access: calculate the destination address in the same manner as
135 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
136 // into an array of words.
137 ASSERT_EQ(1, kSmiTagSize);
138 ASSERT_EQ(0, kSmiTag);
139 lea(dst, Operand(object, dst, times_half_pointer_size,
140 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000141 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100142 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000143
144 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000145
146 // Clobber all input registers when running with the debug-code flag
147 // turned on to provoke errors.
148 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100149 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
150 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
151 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000152 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000153}
154
155
Steve Blockd0582a62009-12-15 09:54:21 +0000156void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
157 cmp(esp,
158 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
159 j(below, on_stack_overflow);
160}
161
162
Steve Blocka7e24c12009-10-30 11:49:00 +0000163#ifdef ENABLE_DEBUGGER_SUPPORT
164void MacroAssembler::SaveRegistersToMemory(RegList regs) {
165 ASSERT((regs & ~kJSCallerSaved) == 0);
166 // Copy the content of registers to memory location.
167 for (int i = 0; i < kNumJSCallerSaved; i++) {
168 int r = JSCallerSavedCode(i);
169 if ((regs & (1 << r)) != 0) {
170 Register reg = { r };
171 ExternalReference reg_addr =
172 ExternalReference(Debug_Address::Register(i));
173 mov(Operand::StaticVariable(reg_addr), reg);
174 }
175 }
176}
177
178
179void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
180 ASSERT((regs & ~kJSCallerSaved) == 0);
181 // Copy the content of memory location to registers.
182 for (int i = kNumJSCallerSaved; --i >= 0;) {
183 int r = JSCallerSavedCode(i);
184 if ((regs & (1 << r)) != 0) {
185 Register reg = { r };
186 ExternalReference reg_addr =
187 ExternalReference(Debug_Address::Register(i));
188 mov(reg, Operand::StaticVariable(reg_addr));
189 }
190 }
191}
192
193
194void MacroAssembler::PushRegistersFromMemory(RegList regs) {
195 ASSERT((regs & ~kJSCallerSaved) == 0);
196 // Push the content of the memory location to the stack.
197 for (int i = 0; i < kNumJSCallerSaved; i++) {
198 int r = JSCallerSavedCode(i);
199 if ((regs & (1 << r)) != 0) {
200 ExternalReference reg_addr =
201 ExternalReference(Debug_Address::Register(i));
202 push(Operand::StaticVariable(reg_addr));
203 }
204 }
205}
206
207
208void MacroAssembler::PopRegistersToMemory(RegList regs) {
209 ASSERT((regs & ~kJSCallerSaved) == 0);
210 // Pop the content from the stack to the memory location.
211 for (int i = kNumJSCallerSaved; --i >= 0;) {
212 int r = JSCallerSavedCode(i);
213 if ((regs & (1 << r)) != 0) {
214 ExternalReference reg_addr =
215 ExternalReference(Debug_Address::Register(i));
216 pop(Operand::StaticVariable(reg_addr));
217 }
218 }
219}
220
221
222void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
223 Register scratch,
224 RegList regs) {
225 ASSERT((regs & ~kJSCallerSaved) == 0);
226 // Copy the content of the stack to the memory location and adjust base.
227 for (int i = kNumJSCallerSaved; --i >= 0;) {
228 int r = JSCallerSavedCode(i);
229 if ((regs & (1 << r)) != 0) {
230 mov(scratch, Operand(base, 0));
231 ExternalReference reg_addr =
232 ExternalReference(Debug_Address::Register(i));
233 mov(Operand::StaticVariable(reg_addr), scratch);
234 lea(base, Operand(base, kPointerSize));
235 }
236 }
237}
Andrei Popescu402d9372010-02-26 13:31:12 +0000238
239void MacroAssembler::DebugBreak() {
240 Set(eax, Immediate(0));
241 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
242 CEntryStub ces(1);
243 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
244}
Steve Blocka7e24c12009-10-30 11:49:00 +0000245#endif
246
247void MacroAssembler::Set(Register dst, const Immediate& x) {
248 if (x.is_zero()) {
249 xor_(dst, Operand(dst)); // shorter than mov
250 } else {
251 mov(dst, x);
252 }
253}
254
255
256void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
257 mov(dst, x);
258}
259
260
261void MacroAssembler::CmpObjectType(Register heap_object,
262 InstanceType type,
263 Register map) {
264 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
265 CmpInstanceType(map, type);
266}
267
268
269void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
270 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
271 static_cast<int8_t>(type));
272}
273
274
Andrei Popescu31002712010-02-23 13:46:05 +0000275void MacroAssembler::CheckMap(Register obj,
276 Handle<Map> map,
277 Label* fail,
278 bool is_heap_object) {
279 if (!is_heap_object) {
280 test(obj, Immediate(kSmiTagMask));
281 j(zero, fail);
282 }
283 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
284 j(not_equal, fail);
285}
286
287
Leon Clarkee46be812010-01-19 14:06:41 +0000288Condition MacroAssembler::IsObjectStringType(Register heap_object,
289 Register map,
290 Register instance_type) {
291 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
292 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
293 ASSERT(kNotStringTag != 0);
294 test(instance_type, Immediate(kIsNotStringMask));
295 return zero;
296}
297
298
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100299void MacroAssembler::IsObjectJSObjectType(Register heap_object,
300 Register map,
301 Register scratch,
302 Label* fail) {
303 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
304 IsInstanceJSObjectType(map, scratch, fail);
305}
306
307
308void MacroAssembler::IsInstanceJSObjectType(Register map,
309 Register scratch,
310 Label* fail) {
311 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
312 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
313 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
314 j(above, fail);
315}
316
317
Steve Blocka7e24c12009-10-30 11:49:00 +0000318void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000319 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000320 fucomip();
321 ffree(0);
322 fincstp();
323 } else {
324 fucompp();
325 push(eax);
326 fnstsw_ax();
327 sahf();
328 pop(eax);
329 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000330}
331
332
Steve Block6ded16b2010-05-10 14:33:55 +0100333void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000334 Label ok;
335 test(object, Immediate(kSmiTagMask));
336 j(zero, &ok);
337 cmp(FieldOperand(object, HeapObject::kMapOffset),
338 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100339 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000340 bind(&ok);
341}
342
343
Steve Block6ded16b2010-05-10 14:33:55 +0100344void MacroAssembler::AbortIfNotSmi(Register object) {
345 test(object, Immediate(kSmiTagMask));
346 Assert(equal, "Operand not a smi");
347}
348
349
Steve Blocka7e24c12009-10-30 11:49:00 +0000350void MacroAssembler::EnterFrame(StackFrame::Type type) {
351 push(ebp);
352 mov(ebp, Operand(esp));
353 push(esi);
354 push(Immediate(Smi::FromInt(type)));
355 push(Immediate(CodeObject()));
356 if (FLAG_debug_code) {
357 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
358 Check(not_equal, "code object not properly patched");
359 }
360}
361
362
363void MacroAssembler::LeaveFrame(StackFrame::Type type) {
364 if (FLAG_debug_code) {
365 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
366 Immediate(Smi::FromInt(type)));
367 Check(equal, "stack frame types must match");
368 }
369 leave();
370}
371
Steve Blockd0582a62009-12-15 09:54:21 +0000372void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000373 // Setup the frame structure on the stack.
374 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
375 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
376 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
377 push(ebp);
378 mov(ebp, Operand(esp));
379
380 // Reserve room for entry stack pointer and push the debug marker.
381 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000382 push(Immediate(0)); // Saved entry sp, patched before call.
383 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000384
385 // Save the frame pointer and the context in top.
386 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
387 ExternalReference context_address(Top::k_context_address);
388 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
389 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000390}
Steve Blocka7e24c12009-10-30 11:49:00 +0000391
Steve Blockd0582a62009-12-15 09:54:21 +0000392void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000393#ifdef ENABLE_DEBUGGER_SUPPORT
394 // Save the state of all registers to the stack from the memory
395 // location. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000396 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000397 // TODO(1243899): This should be symmetric to
398 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
399 // correct here, but computed for the other call. Very error
400 // prone! FIX THIS. Actually there are deeper problems with
401 // register saving than this asymmetry (see the bug report
402 // associated with this issue).
403 PushRegistersFromMemory(kJSCallerSaved);
404 }
405#endif
406
Steve Blockd0582a62009-12-15 09:54:21 +0000407 // Reserve space for arguments.
408 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000409
410 // Get the required frame alignment for the OS.
411 static const int kFrameAlignment = OS::ActivationFrameAlignment();
412 if (kFrameAlignment > 0) {
413 ASSERT(IsPowerOf2(kFrameAlignment));
414 and_(esp, -kFrameAlignment);
415 }
416
417 // Patch the saved entry sp.
418 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
419}
420
421
Steve Blockd0582a62009-12-15 09:54:21 +0000422void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) {
423 EnterExitFramePrologue(mode);
424
425 // Setup argc and argv in callee-saved registers.
426 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
427 mov(edi, Operand(eax));
428 lea(esi, Operand(ebp, eax, times_4, offset));
429
430 EnterExitFrameEpilogue(mode, 2);
431}
432
433
434void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode,
435 int stack_space,
436 int argc) {
437 EnterExitFramePrologue(mode);
438
439 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
440 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
441
442 EnterExitFrameEpilogue(mode, argc);
443}
444
445
446void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000447#ifdef ENABLE_DEBUGGER_SUPPORT
448 // Restore the memory copy of the registers by digging them out from
449 // the stack. This is needed to allow nested break points.
Steve Blockd0582a62009-12-15 09:54:21 +0000450 if (mode == ExitFrame::MODE_DEBUG) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000451 // It's okay to clobber register ebx below because we don't need
452 // the function pointer after this.
453 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
Steve Blockd0582a62009-12-15 09:54:21 +0000454 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000455 lea(ebx, Operand(ebp, kOffset));
456 CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
457 }
458#endif
459
460 // Get the return address from the stack and restore the frame pointer.
461 mov(ecx, Operand(ebp, 1 * kPointerSize));
462 mov(ebp, Operand(ebp, 0 * kPointerSize));
463
464 // Pop the arguments and the receiver from the caller stack.
465 lea(esp, Operand(esi, 1 * kPointerSize));
466
467 // Restore current context from top and clear it in debug mode.
468 ExternalReference context_address(Top::k_context_address);
469 mov(esi, Operand::StaticVariable(context_address));
470#ifdef DEBUG
471 mov(Operand::StaticVariable(context_address), Immediate(0));
472#endif
473
474 // Push the return address to get ready to return.
475 push(ecx);
476
477 // Clear the top frame.
478 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
479 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
480}
481
482
483void MacroAssembler::PushTryHandler(CodeLocation try_location,
484 HandlerType type) {
485 // Adjust this code if not the case.
486 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
487 // The pc (return address) is already on TOS.
488 if (try_location == IN_JAVASCRIPT) {
489 if (type == TRY_CATCH_HANDLER) {
490 push(Immediate(StackHandler::TRY_CATCH));
491 } else {
492 push(Immediate(StackHandler::TRY_FINALLY));
493 }
494 push(ebp);
495 } else {
496 ASSERT(try_location == IN_JS_ENTRY);
497 // The frame pointer does not point to a JS frame so we save NULL
498 // for ebp. We expect the code throwing an exception to check ebp
499 // before dereferencing it to restore the context.
500 push(Immediate(StackHandler::ENTRY));
501 push(Immediate(0)); // NULL frame pointer.
502 }
503 // Save the current handler as the next handler.
504 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
505 // Link this handler as the new current one.
506 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
507}
508
509
Leon Clarkee46be812010-01-19 14:06:41 +0000510void MacroAssembler::PopTryHandler() {
511 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
512 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
513 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
514}
515
516
Steve Blocka7e24c12009-10-30 11:49:00 +0000517Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
518 JSObject* holder, Register holder_reg,
519 Register scratch,
Andrei Popescu402d9372010-02-26 13:31:12 +0000520 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 Label* miss) {
522 // Make sure there's no overlap between scratch and the other
523 // registers.
524 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
525
526 // Keep track of the current object in register reg.
527 Register reg = object_reg;
Andrei Popescu402d9372010-02-26 13:31:12 +0000528 int depth = 0;
529
530 if (save_at_depth == depth) {
531 mov(Operand(esp, kPointerSize), object_reg);
532 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000533
534 // Check the maps in the prototype chain.
535 // Traverse the prototype chain from the object and do map checks.
536 while (object != holder) {
537 depth++;
538
539 // Only global objects and objects that do not require access
540 // checks are allowed in stubs.
541 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
542
543 JSObject* prototype = JSObject::cast(object->GetPrototype());
544 if (Heap::InNewSpace(prototype)) {
545 // Get the map of the current object.
546 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
547 cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
548 // Branch on the result of the map check.
549 j(not_equal, miss, not_taken);
550 // Check access rights to the global object. This has to happen
551 // after the map check so that we know that the object is
552 // actually a global object.
553 if (object->IsJSGlobalProxy()) {
554 CheckAccessGlobalProxy(reg, scratch, miss);
555
556 // Restore scratch register to be the map of the object.
557 // We load the prototype from the map in the scratch register.
558 mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
559 }
560 // The prototype is in new space; we cannot store a reference
561 // to it in the code. Load it from the map.
562 reg = holder_reg; // from now the object is in holder_reg
563 mov(reg, FieldOperand(scratch, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 } else {
565 // Check the map of the current object.
566 cmp(FieldOperand(reg, HeapObject::kMapOffset),
567 Immediate(Handle<Map>(object->map())));
568 // Branch on the result of the map check.
569 j(not_equal, miss, not_taken);
570 // Check access rights to the global object. This has to happen
571 // after the map check so that we know that the object is
572 // actually a global object.
573 if (object->IsJSGlobalProxy()) {
574 CheckAccessGlobalProxy(reg, scratch, miss);
575 }
576 // The prototype is in old space; load it directly.
577 reg = holder_reg; // from now the object is in holder_reg
578 mov(reg, Handle<JSObject>(prototype));
579 }
580
Andrei Popescu402d9372010-02-26 13:31:12 +0000581 if (save_at_depth == depth) {
582 mov(Operand(esp, kPointerSize), reg);
583 }
584
Steve Blocka7e24c12009-10-30 11:49:00 +0000585 // Go to the next object in the prototype chain.
586 object = prototype;
587 }
588
589 // Check the holder map.
590 cmp(FieldOperand(reg, HeapObject::kMapOffset),
591 Immediate(Handle<Map>(holder->map())));
592 j(not_equal, miss, not_taken);
593
594 // Log the check depth.
Andrei Popescu402d9372010-02-26 13:31:12 +0000595 LOG(IntEvent("check-maps-depth", depth + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000596
597 // Perform security check for access to the global object and return
598 // the holder register.
599 ASSERT(object == holder);
600 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
601 if (object->IsJSGlobalProxy()) {
602 CheckAccessGlobalProxy(reg, scratch, miss);
603 }
604 return reg;
605}
606
607
608void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
609 Register scratch,
610 Label* miss) {
611 Label same_contexts;
612
613 ASSERT(!holder_reg.is(scratch));
614
615 // Load current lexical context from the stack frame.
616 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
617
618 // When generating debug code, make sure the lexical context is set.
619 if (FLAG_debug_code) {
620 cmp(Operand(scratch), Immediate(0));
621 Check(not_equal, "we should not have an empty lexical context");
622 }
623 // Load the global context of the current context.
624 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
625 mov(scratch, FieldOperand(scratch, offset));
626 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
627
628 // Check the context is a global context.
629 if (FLAG_debug_code) {
630 push(scratch);
631 // Read the first word and compare to global_context_map.
632 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
633 cmp(scratch, Factory::global_context_map());
634 Check(equal, "JSGlobalObject::global_context should be a global context.");
635 pop(scratch);
636 }
637
638 // Check if both contexts are the same.
639 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
640 j(equal, &same_contexts, taken);
641
642 // Compare security tokens, save holder_reg on the stack so we can use it
643 // as a temporary register.
644 //
645 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
646 push(holder_reg);
647 // Check that the security token in the calling global object is
648 // compatible with the security token in the receiving global
649 // object.
650 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
651
652 // Check the context is a global context.
653 if (FLAG_debug_code) {
654 cmp(holder_reg, Factory::null_value());
655 Check(not_equal, "JSGlobalProxy::context() should not be null.");
656
657 push(holder_reg);
658 // Read the first word and compare to global_context_map(),
659 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
660 cmp(holder_reg, Factory::global_context_map());
661 Check(equal, "JSGlobalObject::global_context should be a global context.");
662 pop(holder_reg);
663 }
664
665 int token_offset = Context::kHeaderSize +
666 Context::SECURITY_TOKEN_INDEX * kPointerSize;
667 mov(scratch, FieldOperand(scratch, token_offset));
668 cmp(scratch, FieldOperand(holder_reg, token_offset));
669 pop(holder_reg);
670 j(not_equal, miss, not_taken);
671
672 bind(&same_contexts);
673}
674
675
676void MacroAssembler::LoadAllocationTopHelper(Register result,
677 Register result_end,
678 Register scratch,
679 AllocationFlags flags) {
680 ExternalReference new_space_allocation_top =
681 ExternalReference::new_space_allocation_top_address();
682
683 // Just return if allocation top is already known.
684 if ((flags & RESULT_CONTAINS_TOP) != 0) {
685 // No use of scratch if allocation top is provided.
686 ASSERT(scratch.is(no_reg));
687#ifdef DEBUG
688 // Assert that result actually contains top on entry.
689 cmp(result, Operand::StaticVariable(new_space_allocation_top));
690 Check(equal, "Unexpected allocation top");
691#endif
692 return;
693 }
694
695 // Move address of new object to result. Use scratch register if available.
696 if (scratch.is(no_reg)) {
697 mov(result, Operand::StaticVariable(new_space_allocation_top));
698 } else {
699 ASSERT(!scratch.is(result_end));
700 mov(Operand(scratch), Immediate(new_space_allocation_top));
701 mov(result, Operand(scratch, 0));
702 }
703}
704
705
706void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
707 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000708 if (FLAG_debug_code) {
709 test(result_end, Immediate(kObjectAlignmentMask));
710 Check(zero, "Unaligned allocation in new space");
711 }
712
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 ExternalReference new_space_allocation_top =
714 ExternalReference::new_space_allocation_top_address();
715
716 // Update new top. Use scratch if available.
717 if (scratch.is(no_reg)) {
718 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
719 } else {
720 mov(Operand(scratch, 0), result_end);
721 }
722}
723
724
725void MacroAssembler::AllocateInNewSpace(int object_size,
726 Register result,
727 Register result_end,
728 Register scratch,
729 Label* gc_required,
730 AllocationFlags flags) {
731 ASSERT(!result.is(result_end));
732
733 // Load address of new object into result.
734 LoadAllocationTopHelper(result, result_end, scratch, flags);
735
736 // Calculate new top and bail out if new space is exhausted.
737 ExternalReference new_space_allocation_limit =
738 ExternalReference::new_space_allocation_limit_address();
739 lea(result_end, Operand(result, object_size));
740 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
741 j(above, gc_required, not_taken);
742
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 // Tag result if requested.
744 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000745 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 }
Leon Clarkee46be812010-01-19 14:06:41 +0000747
748 // Update allocation top.
749 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000750}
751
752
753void MacroAssembler::AllocateInNewSpace(int header_size,
754 ScaleFactor element_size,
755 Register element_count,
756 Register result,
757 Register result_end,
758 Register scratch,
759 Label* gc_required,
760 AllocationFlags flags) {
761 ASSERT(!result.is(result_end));
762
763 // Load address of new object into result.
764 LoadAllocationTopHelper(result, result_end, scratch, flags);
765
766 // Calculate new top and bail out if new space is exhausted.
767 ExternalReference new_space_allocation_limit =
768 ExternalReference::new_space_allocation_limit_address();
769 lea(result_end, Operand(result, element_count, element_size, header_size));
770 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
771 j(above, gc_required);
772
Steve Blocka7e24c12009-10-30 11:49:00 +0000773 // Tag result if requested.
774 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000775 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000776 }
Leon Clarkee46be812010-01-19 14:06:41 +0000777
778 // Update allocation top.
779 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000780}
781
782
783void MacroAssembler::AllocateInNewSpace(Register object_size,
784 Register result,
785 Register result_end,
786 Register scratch,
787 Label* gc_required,
788 AllocationFlags flags) {
789 ASSERT(!result.is(result_end));
790
791 // Load address of new object into result.
792 LoadAllocationTopHelper(result, result_end, scratch, flags);
793
794 // Calculate new top and bail out if new space is exhausted.
795 ExternalReference new_space_allocation_limit =
796 ExternalReference::new_space_allocation_limit_address();
797 if (!object_size.is(result_end)) {
798 mov(result_end, object_size);
799 }
800 add(result_end, Operand(result));
801 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
802 j(above, gc_required, not_taken);
803
Steve Blocka7e24c12009-10-30 11:49:00 +0000804 // Tag result if requested.
805 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000806 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000807 }
Leon Clarkee46be812010-01-19 14:06:41 +0000808
809 // Update allocation top.
810 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000811}
812
813
814void MacroAssembler::UndoAllocationInNewSpace(Register object) {
815 ExternalReference new_space_allocation_top =
816 ExternalReference::new_space_allocation_top_address();
817
818 // Make sure the object has no tag before resetting top.
819 and_(Operand(object), Immediate(~kHeapObjectTagMask));
820#ifdef DEBUG
821 cmp(object, Operand::StaticVariable(new_space_allocation_top));
822 Check(below, "Undo allocation of non allocated memory");
823#endif
824 mov(Operand::StaticVariable(new_space_allocation_top), object);
825}
826
827
Steve Block3ce2e202009-11-05 08:53:23 +0000828void MacroAssembler::AllocateHeapNumber(Register result,
829 Register scratch1,
830 Register scratch2,
831 Label* gc_required) {
832 // Allocate heap number in new space.
833 AllocateInNewSpace(HeapNumber::kSize,
834 result,
835 scratch1,
836 scratch2,
837 gc_required,
838 TAG_OBJECT);
839
840 // Set the map.
841 mov(FieldOperand(result, HeapObject::kMapOffset),
842 Immediate(Factory::heap_number_map()));
843}
844
845
Steve Blockd0582a62009-12-15 09:54:21 +0000846void MacroAssembler::AllocateTwoByteString(Register result,
847 Register length,
848 Register scratch1,
849 Register scratch2,
850 Register scratch3,
851 Label* gc_required) {
852 // Calculate the number of bytes needed for the characters in the string while
853 // observing object alignment.
854 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000855 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000856 // scratch1 = length * 2 + kObjectAlignmentMask.
857 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000858 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
859
860 // Allocate two byte string in new space.
861 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
862 times_1,
863 scratch1,
864 result,
865 scratch2,
866 scratch3,
867 gc_required,
868 TAG_OBJECT);
869
870 // Set the map, length and hash field.
871 mov(FieldOperand(result, HeapObject::kMapOffset),
872 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100873 mov(scratch1, length);
874 SmiTag(scratch1);
875 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000876 mov(FieldOperand(result, String::kHashFieldOffset),
877 Immediate(String::kEmptyHashField));
878}
879
880
881void MacroAssembler::AllocateAsciiString(Register result,
882 Register length,
883 Register scratch1,
884 Register scratch2,
885 Register scratch3,
886 Label* gc_required) {
887 // Calculate the number of bytes needed for the characters in the string while
888 // observing object alignment.
889 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
890 mov(scratch1, length);
891 ASSERT(kCharSize == 1);
892 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
893 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
894
895 // Allocate ascii string in new space.
896 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
897 times_1,
898 scratch1,
899 result,
900 scratch2,
901 scratch3,
902 gc_required,
903 TAG_OBJECT);
904
905 // Set the map, length and hash field.
906 mov(FieldOperand(result, HeapObject::kMapOffset),
907 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100908 mov(scratch1, length);
909 SmiTag(scratch1);
910 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000911 mov(FieldOperand(result, String::kHashFieldOffset),
912 Immediate(String::kEmptyHashField));
913}
914
915
916void MacroAssembler::AllocateConsString(Register result,
917 Register scratch1,
918 Register scratch2,
919 Label* gc_required) {
920 // Allocate heap number in new space.
921 AllocateInNewSpace(ConsString::kSize,
922 result,
923 scratch1,
924 scratch2,
925 gc_required,
926 TAG_OBJECT);
927
928 // Set the map. The other fields are left uninitialized.
929 mov(FieldOperand(result, HeapObject::kMapOffset),
930 Immediate(Factory::cons_string_map()));
931}
932
933
934void MacroAssembler::AllocateAsciiConsString(Register result,
935 Register scratch1,
936 Register scratch2,
937 Label* gc_required) {
938 // Allocate heap number in new space.
939 AllocateInNewSpace(ConsString::kSize,
940 result,
941 scratch1,
942 scratch2,
943 gc_required,
944 TAG_OBJECT);
945
946 // Set the map. The other fields are left uninitialized.
947 mov(FieldOperand(result, HeapObject::kMapOffset),
948 Immediate(Factory::cons_ascii_string_map()));
949}
950
951
Steve Blocka7e24c12009-10-30 11:49:00 +0000952void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
953 Register result,
954 Register op,
955 JumpTarget* then_target) {
956 JumpTarget ok;
957 test(result, Operand(result));
958 ok.Branch(not_zero, taken);
959 test(op, Operand(op));
960 then_target->Branch(sign, not_taken);
961 ok.Bind();
962}
963
964
965void MacroAssembler::NegativeZeroTest(Register result,
966 Register op,
967 Label* then_label) {
968 Label ok;
969 test(result, Operand(result));
970 j(not_zero, &ok, taken);
971 test(op, Operand(op));
972 j(sign, then_label, not_taken);
973 bind(&ok);
974}
975
976
977void MacroAssembler::NegativeZeroTest(Register result,
978 Register op1,
979 Register op2,
980 Register scratch,
981 Label* then_label) {
982 Label ok;
983 test(result, Operand(result));
984 j(not_zero, &ok, taken);
985 mov(scratch, Operand(op1));
986 or_(scratch, Operand(op2));
987 j(sign, then_label, not_taken);
988 bind(&ok);
989}
990
991
992void MacroAssembler::TryGetFunctionPrototype(Register function,
993 Register result,
994 Register scratch,
995 Label* miss) {
996 // Check that the receiver isn't a smi.
997 test(function, Immediate(kSmiTagMask));
998 j(zero, miss, not_taken);
999
1000 // Check that the function really is a function.
1001 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1002 j(not_equal, miss, not_taken);
1003
1004 // Make sure that the function has an instance prototype.
1005 Label non_instance;
1006 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1007 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1008 j(not_zero, &non_instance, not_taken);
1009
1010 // Get the prototype or initial map from the function.
1011 mov(result,
1012 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1013
1014 // If the prototype or initial map is the hole, don't return it and
1015 // simply miss the cache instead. This will allow us to allocate a
1016 // prototype object on-demand in the runtime system.
1017 cmp(Operand(result), Immediate(Factory::the_hole_value()));
1018 j(equal, miss, not_taken);
1019
1020 // If the function does not have an initial map, we're done.
1021 Label done;
1022 CmpObjectType(result, MAP_TYPE, scratch);
1023 j(not_equal, &done);
1024
1025 // Get the prototype from the initial map.
1026 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1027 jmp(&done);
1028
1029 // Non-instance prototype: Fetch prototype from constructor field
1030 // in initial map.
1031 bind(&non_instance);
1032 mov(result, FieldOperand(result, Map::kConstructorOffset));
1033
1034 // All done.
1035 bind(&done);
1036}
1037
1038
1039void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001040 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +00001041 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1042}
1043
1044
Leon Clarkee46be812010-01-19 14:06:41 +00001045Object* MacroAssembler::TryCallStub(CodeStub* stub) {
1046 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1047 Object* result = stub->TryGetCode();
1048 if (!result->IsFailure()) {
1049 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1050 }
1051 return result;
1052}
1053
1054
Steve Blockd0582a62009-12-15 09:54:21 +00001055void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001056 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001057 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1058}
1059
1060
Leon Clarkee46be812010-01-19 14:06:41 +00001061Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
1062 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
1063 Object* result = stub->TryGetCode();
1064 if (!result->IsFailure()) {
1065 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
1066 }
1067 return result;
1068}
1069
1070
Steve Blocka7e24c12009-10-30 11:49:00 +00001071void MacroAssembler::StubReturn(int argc) {
1072 ASSERT(argc >= 1 && generating_stub());
1073 ret((argc - 1) * kPointerSize);
1074}
1075
1076
1077void MacroAssembler::IllegalOperation(int num_arguments) {
1078 if (num_arguments > 0) {
1079 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1080 }
1081 mov(eax, Immediate(Factory::undefined_value()));
1082}
1083
1084
1085void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1086 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1087}
1088
1089
Leon Clarkee46be812010-01-19 14:06:41 +00001090Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1091 int num_arguments) {
1092 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1093}
1094
1095
Steve Blocka7e24c12009-10-30 11:49:00 +00001096void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1097 // If the expected number of arguments of the runtime function is
1098 // constant, we check that the actual number of arguments match the
1099 // expectation.
1100 if (f->nargs >= 0 && f->nargs != num_arguments) {
1101 IllegalOperation(num_arguments);
1102 return;
1103 }
1104
Leon Clarke4515c472010-02-03 11:58:03 +00001105 // TODO(1236192): Most runtime routines don't need the number of
1106 // arguments passed in because it is constant. At some point we
1107 // should remove this need and make the runtime routine entry code
1108 // smarter.
1109 Set(eax, Immediate(num_arguments));
1110 mov(ebx, Immediate(ExternalReference(f)));
1111 CEntryStub ces(1);
1112 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001113}
1114
1115
Andrei Popescu402d9372010-02-26 13:31:12 +00001116void MacroAssembler::CallExternalReference(ExternalReference ref,
1117 int num_arguments) {
1118 mov(eax, Immediate(num_arguments));
1119 mov(ebx, Immediate(ref));
1120
1121 CEntryStub stub(1);
1122 CallStub(&stub);
1123}
1124
1125
Leon Clarkee46be812010-01-19 14:06:41 +00001126Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1127 int num_arguments) {
1128 if (f->nargs >= 0 && f->nargs != num_arguments) {
1129 IllegalOperation(num_arguments);
1130 // Since we did not call the stub, there was no allocation failure.
1131 // Return some non-failure object.
1132 return Heap::undefined_value();
1133 }
1134
Leon Clarke4515c472010-02-03 11:58:03 +00001135 // TODO(1236192): Most runtime routines don't need the number of
1136 // arguments passed in because it is constant. At some point we
1137 // should remove this need and make the runtime routine entry code
1138 // smarter.
1139 Set(eax, Immediate(num_arguments));
1140 mov(ebx, Immediate(ExternalReference(f)));
1141 CEntryStub ces(1);
1142 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001143}
1144
1145
Steve Block6ded16b2010-05-10 14:33:55 +01001146void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1147 int num_arguments,
1148 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001149 // TODO(1236192): Most runtime routines don't need the number of
1150 // arguments passed in because it is constant. At some point we
1151 // should remove this need and make the runtime routine entry code
1152 // smarter.
1153 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001154 JumpToExternalReference(ext);
1155}
1156
1157
1158void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1159 int num_arguments,
1160 int result_size) {
1161 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001162}
1163
1164
Steve Blockd0582a62009-12-15 09:54:21 +00001165void MacroAssembler::PushHandleScope(Register scratch) {
1166 // Push the number of extensions, smi-tagged so the gc will ignore it.
1167 ExternalReference extensions_address =
1168 ExternalReference::handle_scope_extensions_address();
1169 mov(scratch, Operand::StaticVariable(extensions_address));
1170 ASSERT_EQ(0, kSmiTag);
1171 shl(scratch, kSmiTagSize);
1172 push(scratch);
1173 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1174 // Push next and limit pointers which will be wordsize aligned and
1175 // hence automatically smi tagged.
1176 ExternalReference next_address =
1177 ExternalReference::handle_scope_next_address();
1178 push(Operand::StaticVariable(next_address));
1179 ExternalReference limit_address =
1180 ExternalReference::handle_scope_limit_address();
1181 push(Operand::StaticVariable(limit_address));
1182}
1183
1184
Leon Clarkee46be812010-01-19 14:06:41 +00001185Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1186 Register scratch,
1187 bool gc_allowed) {
1188 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001189 ExternalReference extensions_address =
1190 ExternalReference::handle_scope_extensions_address();
1191 Label write_back;
1192 mov(scratch, Operand::StaticVariable(extensions_address));
1193 cmp(Operand(scratch), Immediate(0));
1194 j(equal, &write_back);
1195 // Calling a runtime function messes with registers so we save and
1196 // restore any one we're asked not to change
1197 if (saved.is_valid()) push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001198 if (gc_allowed) {
1199 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1200 } else {
1201 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1202 if (result->IsFailure()) return result;
1203 }
Steve Blockd0582a62009-12-15 09:54:21 +00001204 if (saved.is_valid()) pop(saved);
1205
1206 bind(&write_back);
1207 ExternalReference limit_address =
1208 ExternalReference::handle_scope_limit_address();
1209 pop(Operand::StaticVariable(limit_address));
1210 ExternalReference next_address =
1211 ExternalReference::handle_scope_next_address();
1212 pop(Operand::StaticVariable(next_address));
1213 pop(scratch);
1214 shr(scratch, kSmiTagSize);
1215 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001216
1217 return result;
1218}
1219
1220
1221void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1222 PopHandleScopeHelper(saved, scratch, true);
1223}
1224
1225
1226Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1227 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001228}
1229
1230
Steve Block6ded16b2010-05-10 14:33:55 +01001231void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001232 // Set the entry point and jump to the C entry runtime stub.
1233 mov(ebx, Immediate(ext));
1234 CEntryStub ces(1);
1235 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1236}
1237
1238
1239void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1240 const ParameterCount& actual,
1241 Handle<Code> code_constant,
1242 const Operand& code_operand,
1243 Label* done,
1244 InvokeFlag flag) {
1245 bool definitely_matches = false;
1246 Label invoke;
1247 if (expected.is_immediate()) {
1248 ASSERT(actual.is_immediate());
1249 if (expected.immediate() == actual.immediate()) {
1250 definitely_matches = true;
1251 } else {
1252 mov(eax, actual.immediate());
1253 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1254 if (expected.immediate() == sentinel) {
1255 // Don't worry about adapting arguments for builtins that
1256 // don't want that done. Skip adaption code by making it look
1257 // like we have a match between expected and actual number of
1258 // arguments.
1259 definitely_matches = true;
1260 } else {
1261 mov(ebx, expected.immediate());
1262 }
1263 }
1264 } else {
1265 if (actual.is_immediate()) {
1266 // Expected is in register, actual is immediate. This is the
1267 // case when we invoke function values without going through the
1268 // IC mechanism.
1269 cmp(expected.reg(), actual.immediate());
1270 j(equal, &invoke);
1271 ASSERT(expected.reg().is(ebx));
1272 mov(eax, actual.immediate());
1273 } else if (!expected.reg().is(actual.reg())) {
1274 // Both expected and actual are in (different) registers. This
1275 // is the case when we invoke functions using call and apply.
1276 cmp(expected.reg(), Operand(actual.reg()));
1277 j(equal, &invoke);
1278 ASSERT(actual.reg().is(eax));
1279 ASSERT(expected.reg().is(ebx));
1280 }
1281 }
1282
1283 if (!definitely_matches) {
1284 Handle<Code> adaptor =
1285 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1286 if (!code_constant.is_null()) {
1287 mov(edx, Immediate(code_constant));
1288 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1289 } else if (!code_operand.is_reg(edx)) {
1290 mov(edx, code_operand);
1291 }
1292
1293 if (flag == CALL_FUNCTION) {
1294 call(adaptor, RelocInfo::CODE_TARGET);
1295 jmp(done);
1296 } else {
1297 jmp(adaptor, RelocInfo::CODE_TARGET);
1298 }
1299 bind(&invoke);
1300 }
1301}
1302
1303
1304void MacroAssembler::InvokeCode(const Operand& code,
1305 const ParameterCount& expected,
1306 const ParameterCount& actual,
1307 InvokeFlag flag) {
1308 Label done;
1309 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1310 if (flag == CALL_FUNCTION) {
1311 call(code);
1312 } else {
1313 ASSERT(flag == JUMP_FUNCTION);
1314 jmp(code);
1315 }
1316 bind(&done);
1317}
1318
1319
1320void MacroAssembler::InvokeCode(Handle<Code> code,
1321 const ParameterCount& expected,
1322 const ParameterCount& actual,
1323 RelocInfo::Mode rmode,
1324 InvokeFlag flag) {
1325 Label done;
1326 Operand dummy(eax);
1327 InvokePrologue(expected, actual, code, dummy, &done, flag);
1328 if (flag == CALL_FUNCTION) {
1329 call(code, rmode);
1330 } else {
1331 ASSERT(flag == JUMP_FUNCTION);
1332 jmp(code, rmode);
1333 }
1334 bind(&done);
1335}
1336
1337
1338void MacroAssembler::InvokeFunction(Register fun,
1339 const ParameterCount& actual,
1340 InvokeFlag flag) {
1341 ASSERT(fun.is(edi));
1342 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1343 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1344 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001345 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001346 mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
1347 lea(edx, FieldOperand(edx, Code::kHeaderSize));
1348
1349 ParameterCount expected(ebx);
1350 InvokeCode(Operand(edx), expected, actual, flag);
1351}
1352
1353
Andrei Popescu402d9372010-02-26 13:31:12 +00001354void MacroAssembler::InvokeFunction(JSFunction* function,
1355 const ParameterCount& actual,
1356 InvokeFlag flag) {
1357 ASSERT(function->is_compiled());
1358 // Get the function and setup the context.
1359 mov(edi, Immediate(Handle<JSFunction>(function)));
1360 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001361
Andrei Popescu402d9372010-02-26 13:31:12 +00001362 // Invoke the cached code.
1363 Handle<Code> code(function->code());
1364 ParameterCount expected(function->shared()->formal_parameter_count());
1365 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1366}
1367
1368
1369void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001370 // Calls are not allowed in some stubs.
1371 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1372
1373 // Rely on the assertion to check that the number of provided
1374 // arguments match the expected number of arguments. Fake a
1375 // parameter count to avoid emitting code to do the check.
1376 ParameterCount expected(0);
Andrei Popescu402d9372010-02-26 13:31:12 +00001377 GetBuiltinEntry(edx, id);
1378 InvokeCode(Operand(edx), expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001379}
1380
1381
1382void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001383 ASSERT(!target.is(edi));
1384
1385 // Load the builtins object into target register.
1386 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1387 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1388
Andrei Popescu402d9372010-02-26 13:31:12 +00001389 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01001390 mov(edi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1391
1392 // Load the code entry point from the builtins object.
1393 mov(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
1394 if (FLAG_debug_code) {
1395 // Make sure the code objects in the builtins object and in the
1396 // builtin function are the same.
1397 push(target);
1398 mov(target, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1399 mov(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
1400 cmp(target, Operand(esp, 0));
1401 Assert(equal, "Builtin code object changed");
1402 pop(target);
1403 }
1404 lea(target, FieldOperand(target, Code::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001405}
1406
1407
Steve Blockd0582a62009-12-15 09:54:21 +00001408void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1409 if (context_chain_length > 0) {
1410 // Move up the chain of contexts to the context containing the slot.
1411 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1412 // Load the function context (which is the incoming, outer context).
1413 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1414 for (int i = 1; i < context_chain_length; i++) {
1415 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1416 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1417 }
1418 // The context may be an intermediate context, not a function context.
1419 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1420 } else { // Slot is in the current function context.
1421 // The context may be an intermediate context, not a function context.
1422 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1423 }
1424}
1425
1426
1427
Steve Blocka7e24c12009-10-30 11:49:00 +00001428void MacroAssembler::Ret() {
1429 ret(0);
1430}
1431
1432
Leon Clarkee46be812010-01-19 14:06:41 +00001433void MacroAssembler::Drop(int stack_elements) {
1434 if (stack_elements > 0) {
1435 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1436 }
1437}
1438
1439
1440void MacroAssembler::Move(Register dst, Handle<Object> value) {
1441 mov(dst, value);
1442}
1443
1444
Steve Blocka7e24c12009-10-30 11:49:00 +00001445void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1446 if (FLAG_native_code_counters && counter->Enabled()) {
1447 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1448 }
1449}
1450
1451
1452void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1453 ASSERT(value > 0);
1454 if (FLAG_native_code_counters && counter->Enabled()) {
1455 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1456 if (value == 1) {
1457 inc(operand);
1458 } else {
1459 add(operand, Immediate(value));
1460 }
1461 }
1462}
1463
1464
1465void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1466 ASSERT(value > 0);
1467 if (FLAG_native_code_counters && counter->Enabled()) {
1468 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1469 if (value == 1) {
1470 dec(operand);
1471 } else {
1472 sub(operand, Immediate(value));
1473 }
1474 }
1475}
1476
1477
Leon Clarked91b9f72010-01-27 17:25:45 +00001478void MacroAssembler::IncrementCounter(Condition cc,
1479 StatsCounter* counter,
1480 int value) {
1481 ASSERT(value > 0);
1482 if (FLAG_native_code_counters && counter->Enabled()) {
1483 Label skip;
1484 j(NegateCondition(cc), &skip);
1485 pushfd();
1486 IncrementCounter(counter, value);
1487 popfd();
1488 bind(&skip);
1489 }
1490}
1491
1492
1493void MacroAssembler::DecrementCounter(Condition cc,
1494 StatsCounter* counter,
1495 int value) {
1496 ASSERT(value > 0);
1497 if (FLAG_native_code_counters && counter->Enabled()) {
1498 Label skip;
1499 j(NegateCondition(cc), &skip);
1500 pushfd();
1501 DecrementCounter(counter, value);
1502 popfd();
1503 bind(&skip);
1504 }
1505}
1506
1507
Steve Blocka7e24c12009-10-30 11:49:00 +00001508void MacroAssembler::Assert(Condition cc, const char* msg) {
1509 if (FLAG_debug_code) Check(cc, msg);
1510}
1511
1512
1513void MacroAssembler::Check(Condition cc, const char* msg) {
1514 Label L;
1515 j(cc, &L, taken);
1516 Abort(msg);
1517 // will not return here
1518 bind(&L);
1519}
1520
1521
Steve Block6ded16b2010-05-10 14:33:55 +01001522void MacroAssembler::CheckStackAlignment() {
1523 int frame_alignment = OS::ActivationFrameAlignment();
1524 int frame_alignment_mask = frame_alignment - 1;
1525 if (frame_alignment > kPointerSize) {
1526 ASSERT(IsPowerOf2(frame_alignment));
1527 Label alignment_as_expected;
1528 test(esp, Immediate(frame_alignment_mask));
1529 j(zero, &alignment_as_expected);
1530 // Abort if stack is not aligned.
1531 int3();
1532 bind(&alignment_as_expected);
1533 }
1534}
1535
1536
Steve Blocka7e24c12009-10-30 11:49:00 +00001537void MacroAssembler::Abort(const char* msg) {
1538 // We want to pass the msg string like a smi to avoid GC
1539 // problems, however msg is not guaranteed to be aligned
1540 // properly. Instead, we pass an aligned pointer that is
1541 // a proper v8 smi, but also pass the alignment difference
1542 // from the real pointer as a smi.
1543 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1544 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1545 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1546#ifdef DEBUG
1547 if (msg != NULL) {
1548 RecordComment("Abort message: ");
1549 RecordComment(msg);
1550 }
1551#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001552 // Disable stub call restrictions to always allow calls to abort.
1553 set_allow_stub_calls(true);
1554
Steve Blocka7e24c12009-10-30 11:49:00 +00001555 push(eax);
1556 push(Immediate(p0));
1557 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1558 CallRuntime(Runtime::kAbort, 2);
1559 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001560 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001561}
1562
1563
Andrei Popescu402d9372010-02-26 13:31:12 +00001564void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1565 Register instance_type,
1566 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001567 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001568 if (!scratch.is(instance_type)) {
1569 mov(scratch, instance_type);
1570 }
1571 and_(scratch,
1572 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1573 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1574 j(not_equal, failure);
1575}
1576
1577
Leon Clarked91b9f72010-01-27 17:25:45 +00001578void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1579 Register object2,
1580 Register scratch1,
1581 Register scratch2,
1582 Label* failure) {
1583 // Check that both objects are not smis.
1584 ASSERT_EQ(0, kSmiTag);
1585 mov(scratch1, Operand(object1));
1586 and_(scratch1, Operand(object2));
1587 test(scratch1, Immediate(kSmiTagMask));
1588 j(zero, failure);
1589
1590 // Load instance type for both strings.
1591 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1592 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1593 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1594 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1595
1596 // Check that both are flat ascii strings.
1597 const int kFlatAsciiStringMask =
1598 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1599 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1600 // Interleave bits from both instance types and compare them in one check.
1601 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1602 and_(scratch1, kFlatAsciiStringMask);
1603 and_(scratch2, kFlatAsciiStringMask);
1604 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1605 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1606 j(not_equal, failure);
1607}
1608
1609
Steve Block6ded16b2010-05-10 14:33:55 +01001610void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1611 int frameAlignment = OS::ActivationFrameAlignment();
1612 if (frameAlignment != 0) {
1613 // Make stack end at alignment and make room for num_arguments words
1614 // and the original value of esp.
1615 mov(scratch, esp);
1616 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1617 ASSERT(IsPowerOf2(frameAlignment));
1618 and_(esp, -frameAlignment);
1619 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1620 } else {
1621 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1622 }
1623}
1624
1625
1626void MacroAssembler::CallCFunction(ExternalReference function,
1627 int num_arguments) {
1628 // Trashing eax is ok as it will be the return value.
1629 mov(Operand(eax), Immediate(function));
1630 CallCFunction(eax, num_arguments);
1631}
1632
1633
1634void MacroAssembler::CallCFunction(Register function,
1635 int num_arguments) {
1636 // Check stack alignment.
1637 if (FLAG_debug_code) {
1638 CheckStackAlignment();
1639 }
1640
1641 call(Operand(function));
1642 if (OS::ActivationFrameAlignment() != 0) {
1643 mov(esp, Operand(esp, num_arguments * kPointerSize));
1644 } else {
1645 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1646 }
1647}
1648
1649
Steve Blocka7e24c12009-10-30 11:49:00 +00001650CodePatcher::CodePatcher(byte* address, int size)
1651 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1652 // Create a new macro assembler pointing to the address of the code to patch.
1653 // The size is adjusted with kGap on order for the assembler to generate size
1654 // bytes of instructions without failing with buffer size constraints.
1655 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1656}
1657
1658
1659CodePatcher::~CodePatcher() {
1660 // Indicate that code has changed.
1661 CPU::FlushICache(address_, size_);
1662
1663 // Check that the code was patched as expected.
1664 ASSERT(masm_.pc_ == address_ + size_);
1665 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1666}
1667
1668
1669} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001670
1671#endif // V8_TARGET_ARCH_IA32