blob: 7c339065274b18b3d0eb932e98c934560e1d332f [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Kristian Monsen50ef84f2010-07-29 15:18:00 +010077void MacroAssembler::RecordWrite(Register object,
78 int offset,
79 Register value,
80 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +000081 // The compiled code assumes that record write doesn't change the
82 // context register, so we check that none of the clobbered
83 // registers are esi.
84 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
85
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010086 // First, check if a write barrier is even needed. The tests below
87 // catch stores of Smis and stores into young gen.
Ben Murdochb0fe1622011-05-05 13:52:32 +010088 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +000089
90 // Skip barrier if writing a smi.
91 ASSERT_EQ(0, kSmiTag);
92 test(value, Immediate(kSmiTagMask));
93 j(zero, &done);
94
Steve Block6ded16b2010-05-10 14:33:55 +010095 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +000096
Steve Block6ded16b2010-05-10 14:33:55 +010097 // The offset is relative to a tagged or untagged HeapObject pointer,
98 // so either offset or offset + kHeapObjectTag must be a
99 // multiple of kPointerSize.
100 ASSERT(IsAligned(offset, kPointerSize) ||
101 IsAligned(offset + kHeapObjectTag, kPointerSize));
102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 Register dst = scratch;
104 if (offset != 0) {
105 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100107 // Array access: calculate the destination address in the same manner as
108 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
109 // into an array of words.
110 ASSERT_EQ(1, kSmiTagSize);
111 ASSERT_EQ(0, kSmiTag);
112 lea(dst, Operand(object, dst, times_half_pointer_size,
113 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000114 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100115 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000116
117 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000118
119 // Clobber all input registers when running with the debug-code flag
120 // turned on to provoke errors.
121 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100122 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
123 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
124 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000125 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000126}
127
128
Steve Block8defd9f2010-07-08 12:39:36 +0100129void MacroAssembler::RecordWrite(Register object,
130 Register address,
131 Register value) {
132 // The compiled code assumes that record write doesn't change the
133 // context register, so we check that none of the clobbered
134 // registers are esi.
135 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
136
137 // First, check if a write barrier is even needed. The tests below
138 // catch stores of Smis and stores into young gen.
139 Label done;
140
141 // Skip barrier if writing a smi.
142 ASSERT_EQ(0, kSmiTag);
143 test(value, Immediate(kSmiTagMask));
144 j(zero, &done);
145
146 InNewSpace(object, value, equal, &done);
147
148 RecordWriteHelper(object, address, value);
149
150 bind(&done);
151
152 // Clobber all input registers when running with the debug-code flag
153 // turned on to provoke errors.
154 if (FLAG_debug_code) {
155 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
156 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
157 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
158 }
159}
160
161
Steve Blocka7e24c12009-10-30 11:49:00 +0000162#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000163void MacroAssembler::DebugBreak() {
164 Set(eax, Immediate(0));
165 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
166 CEntryStub ces(1);
167 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
168}
Steve Blocka7e24c12009-10-30 11:49:00 +0000169#endif
170
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100171
Steve Blocka7e24c12009-10-30 11:49:00 +0000172void MacroAssembler::Set(Register dst, const Immediate& x) {
173 if (x.is_zero()) {
174 xor_(dst, Operand(dst)); // shorter than mov
175 } else {
176 mov(dst, x);
177 }
178}
179
180
181void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
182 mov(dst, x);
183}
184
185
186void MacroAssembler::CmpObjectType(Register heap_object,
187 InstanceType type,
188 Register map) {
189 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
190 CmpInstanceType(map, type);
191}
192
193
194void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
195 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
196 static_cast<int8_t>(type));
197}
198
199
Andrei Popescu31002712010-02-23 13:46:05 +0000200void MacroAssembler::CheckMap(Register obj,
201 Handle<Map> map,
202 Label* fail,
203 bool is_heap_object) {
204 if (!is_heap_object) {
205 test(obj, Immediate(kSmiTagMask));
206 j(zero, fail);
207 }
208 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
209 j(not_equal, fail);
210}
211
212
Leon Clarkee46be812010-01-19 14:06:41 +0000213Condition MacroAssembler::IsObjectStringType(Register heap_object,
214 Register map,
215 Register instance_type) {
216 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
217 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
218 ASSERT(kNotStringTag != 0);
219 test(instance_type, Immediate(kIsNotStringMask));
220 return zero;
221}
222
223
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100224void MacroAssembler::IsObjectJSObjectType(Register heap_object,
225 Register map,
226 Register scratch,
227 Label* fail) {
228 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
229 IsInstanceJSObjectType(map, scratch, fail);
230}
231
232
233void MacroAssembler::IsInstanceJSObjectType(Register map,
234 Register scratch,
235 Label* fail) {
236 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
237 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
238 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
239 j(above, fail);
240}
241
242
Steve Blocka7e24c12009-10-30 11:49:00 +0000243void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000244 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000245 fucomip();
246 ffree(0);
247 fincstp();
248 } else {
249 fucompp();
250 push(eax);
251 fnstsw_ax();
252 sahf();
253 pop(eax);
254 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000255}
256
257
Steve Block6ded16b2010-05-10 14:33:55 +0100258void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000259 Label ok;
260 test(object, Immediate(kSmiTagMask));
261 j(zero, &ok);
262 cmp(FieldOperand(object, HeapObject::kMapOffset),
263 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100264 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000265 bind(&ok);
266}
267
268
Steve Block6ded16b2010-05-10 14:33:55 +0100269void MacroAssembler::AbortIfNotSmi(Register object) {
270 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100271 Assert(equal, "Operand is not a smi");
272}
273
274
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100275void MacroAssembler::AbortIfNotString(Register object) {
276 test(object, Immediate(kSmiTagMask));
277 Assert(not_equal, "Operand is not a string");
278 push(object);
279 mov(object, FieldOperand(object, HeapObject::kMapOffset));
280 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
281 pop(object);
282 Assert(below, "Operand is not a string");
283}
284
285
Iain Merrick75681382010-08-19 15:07:18 +0100286void MacroAssembler::AbortIfSmi(Register object) {
287 test(object, Immediate(kSmiTagMask));
288 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100289}
290
291
Steve Blocka7e24c12009-10-30 11:49:00 +0000292void MacroAssembler::EnterFrame(StackFrame::Type type) {
293 push(ebp);
294 mov(ebp, Operand(esp));
295 push(esi);
296 push(Immediate(Smi::FromInt(type)));
297 push(Immediate(CodeObject()));
298 if (FLAG_debug_code) {
299 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
300 Check(not_equal, "code object not properly patched");
301 }
302}
303
304
305void MacroAssembler::LeaveFrame(StackFrame::Type type) {
306 if (FLAG_debug_code) {
307 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
308 Immediate(Smi::FromInt(type)));
309 Check(equal, "stack frame types must match");
310 }
311 leave();
312}
313
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100314
315void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000316 // Setup the frame structure on the stack.
317 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
318 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
319 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
320 push(ebp);
321 mov(ebp, Operand(esp));
322
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100323 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000324 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000325 push(Immediate(0)); // Saved entry sp, patched before call.
326 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000327
328 // Save the frame pointer and the context in top.
329 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
330 ExternalReference context_address(Top::k_context_address);
331 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
332 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000333}
Steve Blocka7e24c12009-10-30 11:49:00 +0000334
Steve Blocka7e24c12009-10-30 11:49:00 +0000335
Ben Murdochb0fe1622011-05-05 13:52:32 +0100336void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
337 // Optionally save all XMM registers.
338 if (save_doubles) {
339 CpuFeatures::Scope scope(SSE2);
340 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
341 sub(Operand(esp), Immediate(space));
342 int offset = -2 * kPointerSize;
343 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
344 XMMRegister reg = XMMRegister::from_code(i);
345 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
346 }
347 } else {
348 sub(Operand(esp), Immediate(argc * kPointerSize));
349 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000350
351 // Get the required frame alignment for the OS.
352 static const int kFrameAlignment = OS::ActivationFrameAlignment();
353 if (kFrameAlignment > 0) {
354 ASSERT(IsPowerOf2(kFrameAlignment));
355 and_(esp, -kFrameAlignment);
356 }
357
358 // Patch the saved entry sp.
359 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
360}
361
362
Ben Murdochb0fe1622011-05-05 13:52:32 +0100363void MacroAssembler::EnterExitFrame(bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100364 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000365
366 // Setup argc and argv in callee-saved registers.
367 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
368 mov(edi, Operand(eax));
369 lea(esi, Operand(ebp, eax, times_4, offset));
370
Ben Murdochb0fe1622011-05-05 13:52:32 +0100371 EnterExitFrameEpilogue(2, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000372}
373
374
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800375void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100376 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100377 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +0000378}
379
380
Ben Murdochb0fe1622011-05-05 13:52:32 +0100381void MacroAssembler::LeaveExitFrame(bool save_doubles) {
382 // Optionally restore all XMM registers.
383 if (save_doubles) {
384 CpuFeatures::Scope scope(SSE2);
385 int offset = -2 * kPointerSize;
386 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
387 XMMRegister reg = XMMRegister::from_code(i);
388 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
389 }
390 }
391
Steve Blocka7e24c12009-10-30 11:49:00 +0000392 // Get the return address from the stack and restore the frame pointer.
393 mov(ecx, Operand(ebp, 1 * kPointerSize));
394 mov(ebp, Operand(ebp, 0 * kPointerSize));
395
396 // Pop the arguments and the receiver from the caller stack.
397 lea(esp, Operand(esi, 1 * kPointerSize));
398
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800399 // Push the return address to get ready to return.
400 push(ecx);
401
402 LeaveExitFrameEpilogue();
403}
404
405void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000406 // Restore current context from top and clear it in debug mode.
407 ExternalReference context_address(Top::k_context_address);
408 mov(esi, Operand::StaticVariable(context_address));
409#ifdef DEBUG
410 mov(Operand::StaticVariable(context_address), Immediate(0));
411#endif
412
Steve Blocka7e24c12009-10-30 11:49:00 +0000413 // Clear the top frame.
414 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
415 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
416}
417
418
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800419void MacroAssembler::LeaveApiExitFrame() {
420 mov(esp, Operand(ebp));
421 pop(ebp);
422
423 LeaveExitFrameEpilogue();
424}
425
426
Steve Blocka7e24c12009-10-30 11:49:00 +0000427void MacroAssembler::PushTryHandler(CodeLocation try_location,
428 HandlerType type) {
429 // Adjust this code if not the case.
430 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
431 // The pc (return address) is already on TOS.
432 if (try_location == IN_JAVASCRIPT) {
433 if (type == TRY_CATCH_HANDLER) {
434 push(Immediate(StackHandler::TRY_CATCH));
435 } else {
436 push(Immediate(StackHandler::TRY_FINALLY));
437 }
438 push(ebp);
439 } else {
440 ASSERT(try_location == IN_JS_ENTRY);
441 // The frame pointer does not point to a JS frame so we save NULL
442 // for ebp. We expect the code throwing an exception to check ebp
443 // before dereferencing it to restore the context.
444 push(Immediate(StackHandler::ENTRY));
445 push(Immediate(0)); // NULL frame pointer.
446 }
447 // Save the current handler as the next handler.
448 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
449 // Link this handler as the new current one.
450 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
451}
452
453
Leon Clarkee46be812010-01-19 14:06:41 +0000454void MacroAssembler::PopTryHandler() {
455 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
456 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
457 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
458}
459
460
Steve Blocka7e24c12009-10-30 11:49:00 +0000461void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
462 Register scratch,
463 Label* miss) {
464 Label same_contexts;
465
466 ASSERT(!holder_reg.is(scratch));
467
468 // Load current lexical context from the stack frame.
469 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
470
471 // When generating debug code, make sure the lexical context is set.
472 if (FLAG_debug_code) {
473 cmp(Operand(scratch), Immediate(0));
474 Check(not_equal, "we should not have an empty lexical context");
475 }
476 // Load the global context of the current context.
477 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
478 mov(scratch, FieldOperand(scratch, offset));
479 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
480
481 // Check the context is a global context.
482 if (FLAG_debug_code) {
483 push(scratch);
484 // Read the first word and compare to global_context_map.
485 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
486 cmp(scratch, Factory::global_context_map());
487 Check(equal, "JSGlobalObject::global_context should be a global context.");
488 pop(scratch);
489 }
490
491 // Check if both contexts are the same.
492 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
493 j(equal, &same_contexts, taken);
494
495 // Compare security tokens, save holder_reg on the stack so we can use it
496 // as a temporary register.
497 //
498 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
499 push(holder_reg);
500 // Check that the security token in the calling global object is
501 // compatible with the security token in the receiving global
502 // object.
503 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
504
505 // Check the context is a global context.
506 if (FLAG_debug_code) {
507 cmp(holder_reg, Factory::null_value());
508 Check(not_equal, "JSGlobalProxy::context() should not be null.");
509
510 push(holder_reg);
511 // Read the first word and compare to global_context_map(),
512 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
513 cmp(holder_reg, Factory::global_context_map());
514 Check(equal, "JSGlobalObject::global_context should be a global context.");
515 pop(holder_reg);
516 }
517
518 int token_offset = Context::kHeaderSize +
519 Context::SECURITY_TOKEN_INDEX * kPointerSize;
520 mov(scratch, FieldOperand(scratch, token_offset));
521 cmp(scratch, FieldOperand(holder_reg, token_offset));
522 pop(holder_reg);
523 j(not_equal, miss, not_taken);
524
525 bind(&same_contexts);
526}
527
528
529void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000530 Register scratch,
531 AllocationFlags flags) {
532 ExternalReference new_space_allocation_top =
533 ExternalReference::new_space_allocation_top_address();
534
535 // Just return if allocation top is already known.
536 if ((flags & RESULT_CONTAINS_TOP) != 0) {
537 // No use of scratch if allocation top is provided.
538 ASSERT(scratch.is(no_reg));
539#ifdef DEBUG
540 // Assert that result actually contains top on entry.
541 cmp(result, Operand::StaticVariable(new_space_allocation_top));
542 Check(equal, "Unexpected allocation top");
543#endif
544 return;
545 }
546
547 // Move address of new object to result. Use scratch register if available.
548 if (scratch.is(no_reg)) {
549 mov(result, Operand::StaticVariable(new_space_allocation_top));
550 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +0000551 mov(Operand(scratch), Immediate(new_space_allocation_top));
552 mov(result, Operand(scratch, 0));
553 }
554}
555
556
557void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
558 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000559 if (FLAG_debug_code) {
560 test(result_end, Immediate(kObjectAlignmentMask));
561 Check(zero, "Unaligned allocation in new space");
562 }
563
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 ExternalReference new_space_allocation_top =
565 ExternalReference::new_space_allocation_top_address();
566
567 // Update new top. Use scratch if available.
568 if (scratch.is(no_reg)) {
569 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
570 } else {
571 mov(Operand(scratch, 0), result_end);
572 }
573}
574
575
576void MacroAssembler::AllocateInNewSpace(int object_size,
577 Register result,
578 Register result_end,
579 Register scratch,
580 Label* gc_required,
581 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700582 if (!FLAG_inline_new) {
583 if (FLAG_debug_code) {
584 // Trash the registers to simulate an allocation failure.
585 mov(result, Immediate(0x7091));
586 if (result_end.is_valid()) {
587 mov(result_end, Immediate(0x7191));
588 }
589 if (scratch.is_valid()) {
590 mov(scratch, Immediate(0x7291));
591 }
592 }
593 jmp(gc_required);
594 return;
595 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000596 ASSERT(!result.is(result_end));
597
598 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800599 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000600
Ben Murdochbb769b22010-08-11 14:56:33 +0100601 Register top_reg = result_end.is_valid() ? result_end : result;
602
Steve Blocka7e24c12009-10-30 11:49:00 +0000603 // Calculate new top and bail out if new space is exhausted.
604 ExternalReference new_space_allocation_limit =
605 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100606
607 if (top_reg.is(result)) {
608 add(Operand(top_reg), Immediate(object_size));
609 } else {
610 lea(top_reg, Operand(result, object_size));
611 }
612 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000613 j(above, gc_required, not_taken);
614
Leon Clarkee46be812010-01-19 14:06:41 +0000615 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100616 UpdateAllocationTopHelper(top_reg, scratch);
617
618 // Tag result if requested.
619 if (top_reg.is(result)) {
620 if ((flags & TAG_OBJECT) != 0) {
621 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
622 } else {
623 sub(Operand(result), Immediate(object_size));
624 }
625 } else if ((flags & TAG_OBJECT) != 0) {
626 add(Operand(result), Immediate(kHeapObjectTag));
627 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000628}
629
630
631void MacroAssembler::AllocateInNewSpace(int header_size,
632 ScaleFactor element_size,
633 Register element_count,
634 Register result,
635 Register result_end,
636 Register scratch,
637 Label* gc_required,
638 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700639 if (!FLAG_inline_new) {
640 if (FLAG_debug_code) {
641 // Trash the registers to simulate an allocation failure.
642 mov(result, Immediate(0x7091));
643 mov(result_end, Immediate(0x7191));
644 if (scratch.is_valid()) {
645 mov(scratch, Immediate(0x7291));
646 }
647 // Register element_count is not modified by the function.
648 }
649 jmp(gc_required);
650 return;
651 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 ASSERT(!result.is(result_end));
653
654 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800655 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000656
657 // Calculate new top and bail out if new space is exhausted.
658 ExternalReference new_space_allocation_limit =
659 ExternalReference::new_space_allocation_limit_address();
660 lea(result_end, Operand(result, element_count, element_size, header_size));
661 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
662 j(above, gc_required);
663
Steve Blocka7e24c12009-10-30 11:49:00 +0000664 // Tag result if requested.
665 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000666 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000667 }
Leon Clarkee46be812010-01-19 14:06:41 +0000668
669 // Update allocation top.
670 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000671}
672
673
674void MacroAssembler::AllocateInNewSpace(Register object_size,
675 Register result,
676 Register result_end,
677 Register scratch,
678 Label* gc_required,
679 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700680 if (!FLAG_inline_new) {
681 if (FLAG_debug_code) {
682 // Trash the registers to simulate an allocation failure.
683 mov(result, Immediate(0x7091));
684 mov(result_end, Immediate(0x7191));
685 if (scratch.is_valid()) {
686 mov(scratch, Immediate(0x7291));
687 }
688 // object_size is left unchanged by this function.
689 }
690 jmp(gc_required);
691 return;
692 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 ASSERT(!result.is(result_end));
694
695 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800696 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000697
698 // Calculate new top and bail out if new space is exhausted.
699 ExternalReference new_space_allocation_limit =
700 ExternalReference::new_space_allocation_limit_address();
701 if (!object_size.is(result_end)) {
702 mov(result_end, object_size);
703 }
704 add(result_end, Operand(result));
705 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
706 j(above, gc_required, not_taken);
707
Steve Blocka7e24c12009-10-30 11:49:00 +0000708 // Tag result if requested.
709 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000710 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 }
Leon Clarkee46be812010-01-19 14:06:41 +0000712
713 // Update allocation top.
714 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000715}
716
717
718void MacroAssembler::UndoAllocationInNewSpace(Register object) {
719 ExternalReference new_space_allocation_top =
720 ExternalReference::new_space_allocation_top_address();
721
722 // Make sure the object has no tag before resetting top.
723 and_(Operand(object), Immediate(~kHeapObjectTagMask));
724#ifdef DEBUG
725 cmp(object, Operand::StaticVariable(new_space_allocation_top));
726 Check(below, "Undo allocation of non allocated memory");
727#endif
728 mov(Operand::StaticVariable(new_space_allocation_top), object);
729}
730
731
Steve Block3ce2e202009-11-05 08:53:23 +0000732void MacroAssembler::AllocateHeapNumber(Register result,
733 Register scratch1,
734 Register scratch2,
735 Label* gc_required) {
736 // Allocate heap number in new space.
737 AllocateInNewSpace(HeapNumber::kSize,
738 result,
739 scratch1,
740 scratch2,
741 gc_required,
742 TAG_OBJECT);
743
744 // Set the map.
745 mov(FieldOperand(result, HeapObject::kMapOffset),
746 Immediate(Factory::heap_number_map()));
747}
748
749
Steve Blockd0582a62009-12-15 09:54:21 +0000750void MacroAssembler::AllocateTwoByteString(Register result,
751 Register length,
752 Register scratch1,
753 Register scratch2,
754 Register scratch3,
755 Label* gc_required) {
756 // Calculate the number of bytes needed for the characters in the string while
757 // observing object alignment.
758 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000759 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000760 // scratch1 = length * 2 + kObjectAlignmentMask.
761 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000762 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
763
764 // Allocate two byte string in new space.
765 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
766 times_1,
767 scratch1,
768 result,
769 scratch2,
770 scratch3,
771 gc_required,
772 TAG_OBJECT);
773
774 // Set the map, length and hash field.
775 mov(FieldOperand(result, HeapObject::kMapOffset),
776 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100777 mov(scratch1, length);
778 SmiTag(scratch1);
779 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000780 mov(FieldOperand(result, String::kHashFieldOffset),
781 Immediate(String::kEmptyHashField));
782}
783
784
785void MacroAssembler::AllocateAsciiString(Register result,
786 Register length,
787 Register scratch1,
788 Register scratch2,
789 Register scratch3,
790 Label* gc_required) {
791 // Calculate the number of bytes needed for the characters in the string while
792 // observing object alignment.
793 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
794 mov(scratch1, length);
795 ASSERT(kCharSize == 1);
796 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
797 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
798
799 // Allocate ascii string in new space.
800 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
801 times_1,
802 scratch1,
803 result,
804 scratch2,
805 scratch3,
806 gc_required,
807 TAG_OBJECT);
808
809 // Set the map, length and hash field.
810 mov(FieldOperand(result, HeapObject::kMapOffset),
811 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100812 mov(scratch1, length);
813 SmiTag(scratch1);
814 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000815 mov(FieldOperand(result, String::kHashFieldOffset),
816 Immediate(String::kEmptyHashField));
817}
818
819
Iain Merrick9ac36c92010-09-13 15:29:50 +0100820void MacroAssembler::AllocateAsciiString(Register result,
821 int length,
822 Register scratch1,
823 Register scratch2,
824 Label* gc_required) {
825 ASSERT(length > 0);
826
827 // Allocate ascii string in new space.
828 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
829 result,
830 scratch1,
831 scratch2,
832 gc_required,
833 TAG_OBJECT);
834
835 // Set the map, length and hash field.
836 mov(FieldOperand(result, HeapObject::kMapOffset),
837 Immediate(Factory::ascii_string_map()));
838 mov(FieldOperand(result, String::kLengthOffset),
839 Immediate(Smi::FromInt(length)));
840 mov(FieldOperand(result, String::kHashFieldOffset),
841 Immediate(String::kEmptyHashField));
842}
843
844
Steve Blockd0582a62009-12-15 09:54:21 +0000845void MacroAssembler::AllocateConsString(Register result,
846 Register scratch1,
847 Register scratch2,
848 Label* gc_required) {
849 // Allocate heap number in new space.
850 AllocateInNewSpace(ConsString::kSize,
851 result,
852 scratch1,
853 scratch2,
854 gc_required,
855 TAG_OBJECT);
856
857 // Set the map. The other fields are left uninitialized.
858 mov(FieldOperand(result, HeapObject::kMapOffset),
859 Immediate(Factory::cons_string_map()));
860}
861
862
863void MacroAssembler::AllocateAsciiConsString(Register result,
864 Register scratch1,
865 Register scratch2,
866 Label* gc_required) {
867 // Allocate heap number in new space.
868 AllocateInNewSpace(ConsString::kSize,
869 result,
870 scratch1,
871 scratch2,
872 gc_required,
873 TAG_OBJECT);
874
875 // Set the map. The other fields are left uninitialized.
876 mov(FieldOperand(result, HeapObject::kMapOffset),
877 Immediate(Factory::cons_ascii_string_map()));
878}
879
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800880// All registers must be distinct. Only current_string needs valid contents
881// on entry. All registers may be invalid on exit. result_operand is
882// unchanged, padding_chars is updated correctly.
883void MacroAssembler::AppendStringToTopOfNewSpace(
884 Register current_string, // Tagged pointer to string to copy.
885 Register current_string_length,
886 Register result_pos,
887 Register scratch,
888 Register new_padding_chars,
889 Operand operand_result,
890 Operand operand_padding_chars,
891 Label* bailout) {
892 mov(current_string_length,
893 FieldOperand(current_string, String::kLengthOffset));
894 shr(current_string_length, 1);
895 sub(current_string_length, operand_padding_chars);
896 mov(new_padding_chars, current_string_length);
897 add(Operand(current_string_length), Immediate(kObjectAlignmentMask));
898 and_(Operand(current_string_length), Immediate(~kObjectAlignmentMask));
899 sub(new_padding_chars, Operand(current_string_length));
900 neg(new_padding_chars);
901 // We need an allocation even if current_string_length is 0, to fetch
902 // result_pos. Consider using a faster fetch of result_pos in that case.
903 AllocateInNewSpace(current_string_length, result_pos, scratch, no_reg,
904 bailout, NO_ALLOCATION_FLAGS);
905 sub(result_pos, operand_padding_chars);
906 mov(operand_padding_chars, new_padding_chars);
907
908 Register scratch_2 = new_padding_chars; // Used to compute total length.
909 // Copy string to the end of result.
910 mov(current_string_length,
911 FieldOperand(current_string, String::kLengthOffset));
912 mov(scratch, operand_result);
913 mov(scratch_2, current_string_length);
914 add(scratch_2, FieldOperand(scratch, String::kLengthOffset));
915 mov(FieldOperand(scratch, String::kLengthOffset), scratch_2);
916 shr(current_string_length, 1);
917 lea(current_string,
918 FieldOperand(current_string, SeqAsciiString::kHeaderSize));
919 // Loop condition: while (--current_string_length >= 0).
920 Label copy_loop;
921 Label copy_loop_entry;
922 jmp(&copy_loop_entry);
923 bind(&copy_loop);
924 mov_b(scratch, Operand(current_string, current_string_length, times_1, 0));
925 mov_b(Operand(result_pos, current_string_length, times_1, 0), scratch);
926 bind(&copy_loop_entry);
927 sub(Operand(current_string_length), Immediate(1));
928 j(greater_equal, &copy_loop);
929}
930
Steve Blockd0582a62009-12-15 09:54:21 +0000931
Steve Blocka7e24c12009-10-30 11:49:00 +0000932void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
933 Register result,
934 Register op,
935 JumpTarget* then_target) {
936 JumpTarget ok;
937 test(result, Operand(result));
938 ok.Branch(not_zero, taken);
939 test(op, Operand(op));
940 then_target->Branch(sign, not_taken);
941 ok.Bind();
942}
943
944
945void MacroAssembler::NegativeZeroTest(Register result,
946 Register op,
947 Label* then_label) {
948 Label ok;
949 test(result, Operand(result));
950 j(not_zero, &ok, taken);
951 test(op, Operand(op));
952 j(sign, then_label, not_taken);
953 bind(&ok);
954}
955
956
957void MacroAssembler::NegativeZeroTest(Register result,
958 Register op1,
959 Register op2,
960 Register scratch,
961 Label* then_label) {
962 Label ok;
963 test(result, Operand(result));
964 j(not_zero, &ok, taken);
965 mov(scratch, Operand(op1));
966 or_(scratch, Operand(op2));
967 j(sign, then_label, not_taken);
968 bind(&ok);
969}
970
971
972void MacroAssembler::TryGetFunctionPrototype(Register function,
973 Register result,
974 Register scratch,
975 Label* miss) {
976 // Check that the receiver isn't a smi.
977 test(function, Immediate(kSmiTagMask));
978 j(zero, miss, not_taken);
979
980 // Check that the function really is a function.
981 CmpObjectType(function, JS_FUNCTION_TYPE, result);
982 j(not_equal, miss, not_taken);
983
984 // Make sure that the function has an instance prototype.
985 Label non_instance;
986 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
987 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
988 j(not_zero, &non_instance, not_taken);
989
990 // Get the prototype or initial map from the function.
991 mov(result,
992 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
993
994 // If the prototype or initial map is the hole, don't return it and
995 // simply miss the cache instead. This will allow us to allocate a
996 // prototype object on-demand in the runtime system.
997 cmp(Operand(result), Immediate(Factory::the_hole_value()));
998 j(equal, miss, not_taken);
999
1000 // If the function does not have an initial map, we're done.
1001 Label done;
1002 CmpObjectType(result, MAP_TYPE, scratch);
1003 j(not_equal, &done);
1004
1005 // Get the prototype from the initial map.
1006 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1007 jmp(&done);
1008
1009 // Non-instance prototype: Fetch prototype from constructor field
1010 // in initial map.
1011 bind(&non_instance);
1012 mov(result, FieldOperand(result, Map::kConstructorOffset));
1013
1014 // All done.
1015 bind(&done);
1016}
1017
1018
1019void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001020 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +00001021 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1022}
1023
1024
John Reck59135872010-11-02 12:39:01 -07001025MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001026 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001027 Object* result;
1028 { MaybeObject* maybe_result = stub->TryGetCode();
1029 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001030 }
John Reck59135872010-11-02 12:39:01 -07001031 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001032 return result;
1033}
1034
1035
Steve Blockd0582a62009-12-15 09:54:21 +00001036void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001037 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001038 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1039}
1040
1041
John Reck59135872010-11-02 12:39:01 -07001042MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001043 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001044 Object* result;
1045 { MaybeObject* maybe_result = stub->TryGetCode();
1046 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001047 }
John Reck59135872010-11-02 12:39:01 -07001048 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001049 return result;
1050}
1051
1052
Steve Blocka7e24c12009-10-30 11:49:00 +00001053void MacroAssembler::StubReturn(int argc) {
1054 ASSERT(argc >= 1 && generating_stub());
1055 ret((argc - 1) * kPointerSize);
1056}
1057
1058
1059void MacroAssembler::IllegalOperation(int num_arguments) {
1060 if (num_arguments > 0) {
1061 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1062 }
1063 mov(eax, Immediate(Factory::undefined_value()));
1064}
1065
1066
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001067void MacroAssembler::IndexFromHash(Register hash, Register index) {
1068 // The assert checks that the constants for the maximum number of digits
1069 // for an array index cached in the hash field and the number of bits
1070 // reserved for it does not conflict.
1071 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1072 (1 << String::kArrayIndexValueBits));
1073 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1074 // the low kHashShift bits.
1075 and_(hash, String::kArrayIndexValueMask);
1076 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1077 if (String::kHashShift > kSmiTagSize) {
1078 shr(hash, String::kHashShift - kSmiTagSize);
1079 }
1080 if (!index.is(hash)) {
1081 mov(index, hash);
1082 }
1083}
1084
1085
Steve Blocka7e24c12009-10-30 11:49:00 +00001086void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1087 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1088}
1089
1090
Ben Murdochb0fe1622011-05-05 13:52:32 +01001091void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1092 Runtime::Function* function = Runtime::FunctionForId(id);
1093 Set(eax, Immediate(function->nargs));
1094 mov(ebx, Immediate(ExternalReference(function)));
1095 CEntryStub ces(1);
1096 ces.SaveDoubles();
1097 CallStub(&ces);
1098}
1099
1100
John Reck59135872010-11-02 12:39:01 -07001101MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1102 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001103 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1104}
1105
1106
Steve Blocka7e24c12009-10-30 11:49:00 +00001107void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1108 // If the expected number of arguments of the runtime function is
1109 // constant, we check that the actual number of arguments match the
1110 // expectation.
1111 if (f->nargs >= 0 && f->nargs != num_arguments) {
1112 IllegalOperation(num_arguments);
1113 return;
1114 }
1115
Leon Clarke4515c472010-02-03 11:58:03 +00001116 // TODO(1236192): Most runtime routines don't need the number of
1117 // arguments passed in because it is constant. At some point we
1118 // should remove this need and make the runtime routine entry code
1119 // smarter.
1120 Set(eax, Immediate(num_arguments));
1121 mov(ebx, Immediate(ExternalReference(f)));
1122 CEntryStub ces(1);
1123 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001124}
1125
1126
John Reck59135872010-11-02 12:39:01 -07001127MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1128 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001129 if (f->nargs >= 0 && f->nargs != num_arguments) {
1130 IllegalOperation(num_arguments);
1131 // Since we did not call the stub, there was no allocation failure.
1132 // Return some non-failure object.
1133 return Heap::undefined_value();
1134 }
1135
Leon Clarke4515c472010-02-03 11:58:03 +00001136 // TODO(1236192): Most runtime routines don't need the number of
1137 // arguments passed in because it is constant. At some point we
1138 // should remove this need and make the runtime routine entry code
1139 // smarter.
1140 Set(eax, Immediate(num_arguments));
1141 mov(ebx, Immediate(ExternalReference(f)));
1142 CEntryStub ces(1);
1143 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001144}
1145
1146
Ben Murdochbb769b22010-08-11 14:56:33 +01001147void MacroAssembler::CallExternalReference(ExternalReference ref,
1148 int num_arguments) {
1149 mov(eax, Immediate(num_arguments));
1150 mov(ebx, Immediate(ref));
1151
1152 CEntryStub stub(1);
1153 CallStub(&stub);
1154}
1155
1156
Steve Block6ded16b2010-05-10 14:33:55 +01001157void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1158 int num_arguments,
1159 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001160 // TODO(1236192): Most runtime routines don't need the number of
1161 // arguments passed in because it is constant. At some point we
1162 // should remove this need and make the runtime routine entry code
1163 // smarter.
1164 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001165 JumpToExternalReference(ext);
1166}
1167
1168
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001169MaybeObject* MacroAssembler::TryTailCallExternalReference(
1170 const ExternalReference& ext, int num_arguments, int result_size) {
1171 // TODO(1236192): Most runtime routines don't need the number of
1172 // arguments passed in because it is constant. At some point we
1173 // should remove this need and make the runtime routine entry code
1174 // smarter.
1175 Set(eax, Immediate(num_arguments));
1176 return TryJumpToExternalReference(ext);
1177}
1178
1179
Steve Block6ded16b2010-05-10 14:33:55 +01001180void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1181 int num_arguments,
1182 int result_size) {
1183 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001184}
1185
1186
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001187MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
1188 int num_arguments,
1189 int result_size) {
1190 return TryTailCallExternalReference(
1191 ExternalReference(fid), num_arguments, result_size);
1192}
1193
1194
Ben Murdochb0fe1622011-05-05 13:52:32 +01001195// If true, a Handle<T> returned by value from a function with cdecl calling
1196// convention will be returned directly as a value of location_ field in a
1197// register eax.
1198// If false, it is returned as a pointer to a preallocated by caller memory
1199// region. Pointer to this region should be passed to a function as an
1200// implicit first argument.
1201#if defined(USING_BSD_ABI) || defined(__MINGW32__)
1202static const bool kReturnHandlesDirectly = true;
John Reck59135872010-11-02 12:39:01 -07001203#else
Ben Murdochb0fe1622011-05-05 13:52:32 +01001204static const bool kReturnHandlesDirectly = false;
John Reck59135872010-11-02 12:39:01 -07001205#endif
1206
1207
1208Operand ApiParameterOperand(int index) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001209 return Operand(
1210 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001211}
1212
1213
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001214void MacroAssembler::PrepareCallApiFunction(int argc, Register scratch) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001215 if (kReturnHandlesDirectly) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001216 EnterApiExitFrame(argc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001217 // When handles are returned directly we don't have to allocate extra
John Reck59135872010-11-02 12:39:01 -07001218 // space for and pass an out parameter.
1219 } else {
1220 // We allocate two additional slots: return value and pointer to it.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001221 EnterApiExitFrame(argc + 2);
John Reck59135872010-11-02 12:39:01 -07001222
John Reck59135872010-11-02 12:39:01 -07001223 // The argument slots are filled as follows:
1224 //
1225 // n + 1: output cell
1226 // n: arg n
1227 // ...
1228 // 1: arg1
1229 // 0: pointer to the output cell
1230 //
1231 // Note that this is one more "argument" than the function expects
1232 // so the out cell will have to be popped explicitly after returning
1233 // from the function. The out cell contains Handle.
John Reck59135872010-11-02 12:39:01 -07001234
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001235 // pointer to out cell.
1236 lea(scratch, Operand(esp, (argc + 1) * kPointerSize));
1237 mov(Operand(esp, 0 * kPointerSize), scratch); // output.
1238 if (FLAG_debug_code) {
1239 mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0)); // out cell.
1240 }
1241 }
1242}
1243
1244
1245MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
1246 int stack_space) {
Steve Blockd0582a62009-12-15 09:54:21 +00001247 ExternalReference next_address =
1248 ExternalReference::handle_scope_next_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001249 ExternalReference limit_address =
1250 ExternalReference::handle_scope_limit_address();
John Reck59135872010-11-02 12:39:01 -07001251 ExternalReference level_address =
1252 ExternalReference::handle_scope_level_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001253
John Reck59135872010-11-02 12:39:01 -07001254 // Allocate HandleScope in callee-save registers.
1255 mov(ebx, Operand::StaticVariable(next_address));
1256 mov(edi, Operand::StaticVariable(limit_address));
1257 add(Operand::StaticVariable(level_address), Immediate(1));
Steve Blockd0582a62009-12-15 09:54:21 +00001258
John Reck59135872010-11-02 12:39:01 -07001259 // Call the api function!
1260 call(function->address(), RelocInfo::RUNTIME_ENTRY);
1261
Ben Murdochb0fe1622011-05-05 13:52:32 +01001262 if (!kReturnHandlesDirectly) {
John Reck59135872010-11-02 12:39:01 -07001263 // The returned value is a pointer to the handle holding the result.
1264 // Dereference this to get to the location.
1265 mov(eax, Operand(eax, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001266 }
Steve Blockd0582a62009-12-15 09:54:21 +00001267
John Reck59135872010-11-02 12:39:01 -07001268 Label empty_handle;
1269 Label prologue;
1270 Label promote_scheduled_exception;
1271 Label delete_allocated_handles;
1272 Label leave_exit_frame;
Leon Clarkee46be812010-01-19 14:06:41 +00001273
John Reck59135872010-11-02 12:39:01 -07001274 // Check if the result handle holds 0.
1275 test(eax, Operand(eax));
1276 j(zero, &empty_handle, not_taken);
1277 // It was non-zero. Dereference to get the result value.
1278 mov(eax, Operand(eax, 0));
1279 bind(&prologue);
1280 // No more valid handles (the result handle was the last one). Restore
1281 // previous handle scope.
1282 mov(Operand::StaticVariable(next_address), ebx);
1283 sub(Operand::StaticVariable(level_address), Immediate(1));
1284 Assert(above_equal, "Invalid HandleScope level");
1285 cmp(edi, Operand::StaticVariable(limit_address));
1286 j(not_equal, &delete_allocated_handles, not_taken);
1287 bind(&leave_exit_frame);
Leon Clarkee46be812010-01-19 14:06:41 +00001288
John Reck59135872010-11-02 12:39:01 -07001289 // Check if the function scheduled an exception.
1290 ExternalReference scheduled_exception_address =
1291 ExternalReference::scheduled_exception_address();
1292 cmp(Operand::StaticVariable(scheduled_exception_address),
1293 Immediate(Factory::the_hole_value()));
1294 j(not_equal, &promote_scheduled_exception, not_taken);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001295 LeaveApiExitFrame();
1296 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001297 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001298 MaybeObject* result =
1299 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1300 if (result->IsFailure()) {
1301 return result;
1302 }
John Reck59135872010-11-02 12:39:01 -07001303 bind(&empty_handle);
1304 // It was zero; the result is undefined.
1305 mov(eax, Factory::undefined_value());
1306 jmp(&prologue);
Leon Clarkee46be812010-01-19 14:06:41 +00001307
John Reck59135872010-11-02 12:39:01 -07001308 // HandleScope limit has changed. Delete allocated extensions.
1309 bind(&delete_allocated_handles);
1310 mov(Operand::StaticVariable(limit_address), edi);
1311 mov(edi, eax);
1312 mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions()));
1313 call(Operand(eax));
1314 mov(eax, edi);
1315 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001316
1317 return result;
Steve Blockd0582a62009-12-15 09:54:21 +00001318}
1319
1320
Steve Block6ded16b2010-05-10 14:33:55 +01001321void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001322 // Set the entry point and jump to the C entry runtime stub.
1323 mov(ebx, Immediate(ext));
1324 CEntryStub ces(1);
1325 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1326}
1327
1328
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001329MaybeObject* MacroAssembler::TryJumpToExternalReference(
1330 const ExternalReference& ext) {
1331 // Set the entry point and jump to the C entry runtime stub.
1332 mov(ebx, Immediate(ext));
1333 CEntryStub ces(1);
1334 return TryTailCallStub(&ces);
1335}
1336
1337
Steve Blocka7e24c12009-10-30 11:49:00 +00001338void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1339 const ParameterCount& actual,
1340 Handle<Code> code_constant,
1341 const Operand& code_operand,
1342 Label* done,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001343 InvokeFlag flag,
1344 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001345 bool definitely_matches = false;
1346 Label invoke;
1347 if (expected.is_immediate()) {
1348 ASSERT(actual.is_immediate());
1349 if (expected.immediate() == actual.immediate()) {
1350 definitely_matches = true;
1351 } else {
1352 mov(eax, actual.immediate());
1353 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1354 if (expected.immediate() == sentinel) {
1355 // Don't worry about adapting arguments for builtins that
1356 // don't want that done. Skip adaption code by making it look
1357 // like we have a match between expected and actual number of
1358 // arguments.
1359 definitely_matches = true;
1360 } else {
1361 mov(ebx, expected.immediate());
1362 }
1363 }
1364 } else {
1365 if (actual.is_immediate()) {
1366 // Expected is in register, actual is immediate. This is the
1367 // case when we invoke function values without going through the
1368 // IC mechanism.
1369 cmp(expected.reg(), actual.immediate());
1370 j(equal, &invoke);
1371 ASSERT(expected.reg().is(ebx));
1372 mov(eax, actual.immediate());
1373 } else if (!expected.reg().is(actual.reg())) {
1374 // Both expected and actual are in (different) registers. This
1375 // is the case when we invoke functions using call and apply.
1376 cmp(expected.reg(), Operand(actual.reg()));
1377 j(equal, &invoke);
1378 ASSERT(actual.reg().is(eax));
1379 ASSERT(expected.reg().is(ebx));
1380 }
1381 }
1382
1383 if (!definitely_matches) {
1384 Handle<Code> adaptor =
1385 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1386 if (!code_constant.is_null()) {
1387 mov(edx, Immediate(code_constant));
1388 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1389 } else if (!code_operand.is_reg(edx)) {
1390 mov(edx, code_operand);
1391 }
1392
1393 if (flag == CALL_FUNCTION) {
1394 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001395 if (post_call_generator != NULL) post_call_generator->Generate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001396 jmp(done);
1397 } else {
1398 jmp(adaptor, RelocInfo::CODE_TARGET);
1399 }
1400 bind(&invoke);
1401 }
1402}
1403
1404
1405void MacroAssembler::InvokeCode(const Operand& code,
1406 const ParameterCount& expected,
1407 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001408 InvokeFlag flag,
1409 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001410 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001411 InvokePrologue(expected, actual, Handle<Code>::null(), code,
1412 &done, flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001413 if (flag == CALL_FUNCTION) {
1414 call(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001415 if (post_call_generator != NULL) post_call_generator->Generate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001416 } else {
1417 ASSERT(flag == JUMP_FUNCTION);
1418 jmp(code);
1419 }
1420 bind(&done);
1421}
1422
1423
1424void MacroAssembler::InvokeCode(Handle<Code> code,
1425 const ParameterCount& expected,
1426 const ParameterCount& actual,
1427 RelocInfo::Mode rmode,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001428 InvokeFlag flag,
1429 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001430 Label done;
1431 Operand dummy(eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001432 InvokePrologue(expected, actual, code, dummy, &done,
1433 flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001434 if (flag == CALL_FUNCTION) {
1435 call(code, rmode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001436 if (post_call_generator != NULL) post_call_generator->Generate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001437 } else {
1438 ASSERT(flag == JUMP_FUNCTION);
1439 jmp(code, rmode);
1440 }
1441 bind(&done);
1442}
1443
1444
1445void MacroAssembler::InvokeFunction(Register fun,
1446 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001447 InvokeFlag flag,
1448 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001449 ASSERT(fun.is(edi));
1450 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1451 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1452 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001453 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001454
1455 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001456 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdochb0fe1622011-05-05 13:52:32 +01001457 expected, actual, flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001458}
1459
1460
Andrei Popescu402d9372010-02-26 13:31:12 +00001461void MacroAssembler::InvokeFunction(JSFunction* function,
1462 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001463 InvokeFlag flag,
1464 PostCallGenerator* post_call_generator) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001465 ASSERT(function->is_compiled());
1466 // Get the function and setup the context.
1467 mov(edi, Immediate(Handle<JSFunction>(function)));
1468 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001469
Andrei Popescu402d9372010-02-26 13:31:12 +00001470 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001471 if (V8::UseCrankshaft()) {
1472 // TODO(kasperl): For now, we always call indirectly through the
1473 // code field in the function to allow recompilation to take effect
1474 // without changing any of the call sites.
1475 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1476 expected, actual, flag, post_call_generator);
1477 } else {
1478 Handle<Code> code(function->code());
1479 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
1480 flag, post_call_generator);
1481 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001482}
1483
1484
Ben Murdochb0fe1622011-05-05 13:52:32 +01001485void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1486 InvokeFlag flag,
1487 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001488 // Calls are not allowed in some stubs.
1489 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1490
1491 // Rely on the assertion to check that the number of provided
1492 // arguments match the expected number of arguments. Fake a
1493 // parameter count to avoid emitting code to do the check.
1494 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001495 GetBuiltinFunction(edi, id);
1496 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdochb0fe1622011-05-05 13:52:32 +01001497 expected, expected, flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001498}
1499
Steve Block791712a2010-08-27 10:21:07 +01001500void MacroAssembler::GetBuiltinFunction(Register target,
1501 Builtins::JavaScript id) {
1502 // Load the JavaScript builtin function from the builtins object.
1503 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1504 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1505 mov(target, FieldOperand(target,
1506 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1507}
Steve Blocka7e24c12009-10-30 11:49:00 +00001508
1509void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001510 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001511 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001512 GetBuiltinFunction(edi, id);
1513 // Load the code entry point from the function into the target register.
1514 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001515}
1516
1517
Steve Blockd0582a62009-12-15 09:54:21 +00001518void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1519 if (context_chain_length > 0) {
1520 // Move up the chain of contexts to the context containing the slot.
1521 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1522 // Load the function context (which is the incoming, outer context).
1523 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1524 for (int i = 1; i < context_chain_length; i++) {
1525 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1526 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1527 }
1528 // The context may be an intermediate context, not a function context.
1529 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1530 } else { // Slot is in the current function context.
1531 // The context may be an intermediate context, not a function context.
1532 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1533 }
1534}
1535
1536
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001537void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1538 // Load the global or builtins object from the current context.
1539 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1540 // Load the global context from the global or builtins object.
1541 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1542 // Load the function from the global context.
1543 mov(function, Operand(function, Context::SlotOffset(index)));
1544}
1545
1546
1547void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1548 Register map) {
1549 // Load the initial map. The global functions all have initial maps.
1550 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1551 if (FLAG_debug_code) {
1552 Label ok, fail;
1553 CheckMap(map, Factory::meta_map(), &fail, false);
1554 jmp(&ok);
1555 bind(&fail);
1556 Abort("Global functions must have initial map");
1557 bind(&ok);
1558 }
1559}
1560
Steve Blockd0582a62009-12-15 09:54:21 +00001561
Ben Murdochb0fe1622011-05-05 13:52:32 +01001562int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
1563 // The registers are pushed starting with the lowest encoding,
1564 // which means that lowest encodings are furthest away from
1565 // the stack pointer.
1566 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1567 return kNumSafepointRegisters - reg_code - 1;
1568}
1569
1570
Steve Blocka7e24c12009-10-30 11:49:00 +00001571void MacroAssembler::Ret() {
1572 ret(0);
1573}
1574
1575
Leon Clarkee46be812010-01-19 14:06:41 +00001576void MacroAssembler::Drop(int stack_elements) {
1577 if (stack_elements > 0) {
1578 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1579 }
1580}
1581
1582
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001583void MacroAssembler::Move(Register dst, Register src) {
1584 if (!dst.is(src)) {
1585 mov(dst, src);
1586 }
1587}
1588
1589
Leon Clarkee46be812010-01-19 14:06:41 +00001590void MacroAssembler::Move(Register dst, Handle<Object> value) {
1591 mov(dst, value);
1592}
1593
1594
Steve Blocka7e24c12009-10-30 11:49:00 +00001595void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1596 if (FLAG_native_code_counters && counter->Enabled()) {
1597 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1598 }
1599}
1600
1601
1602void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1603 ASSERT(value > 0);
1604 if (FLAG_native_code_counters && counter->Enabled()) {
1605 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1606 if (value == 1) {
1607 inc(operand);
1608 } else {
1609 add(operand, Immediate(value));
1610 }
1611 }
1612}
1613
1614
1615void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1616 ASSERT(value > 0);
1617 if (FLAG_native_code_counters && counter->Enabled()) {
1618 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1619 if (value == 1) {
1620 dec(operand);
1621 } else {
1622 sub(operand, Immediate(value));
1623 }
1624 }
1625}
1626
1627
Leon Clarked91b9f72010-01-27 17:25:45 +00001628void MacroAssembler::IncrementCounter(Condition cc,
1629 StatsCounter* counter,
1630 int value) {
1631 ASSERT(value > 0);
1632 if (FLAG_native_code_counters && counter->Enabled()) {
1633 Label skip;
1634 j(NegateCondition(cc), &skip);
1635 pushfd();
1636 IncrementCounter(counter, value);
1637 popfd();
1638 bind(&skip);
1639 }
1640}
1641
1642
1643void MacroAssembler::DecrementCounter(Condition cc,
1644 StatsCounter* counter,
1645 int value) {
1646 ASSERT(value > 0);
1647 if (FLAG_native_code_counters && counter->Enabled()) {
1648 Label skip;
1649 j(NegateCondition(cc), &skip);
1650 pushfd();
1651 DecrementCounter(counter, value);
1652 popfd();
1653 bind(&skip);
1654 }
1655}
1656
1657
Steve Blocka7e24c12009-10-30 11:49:00 +00001658void MacroAssembler::Assert(Condition cc, const char* msg) {
1659 if (FLAG_debug_code) Check(cc, msg);
1660}
1661
1662
Iain Merrick75681382010-08-19 15:07:18 +01001663void MacroAssembler::AssertFastElements(Register elements) {
1664 if (FLAG_debug_code) {
1665 Label ok;
1666 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1667 Immediate(Factory::fixed_array_map()));
1668 j(equal, &ok);
1669 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1670 Immediate(Factory::fixed_cow_array_map()));
1671 j(equal, &ok);
1672 Abort("JSObject with fast elements map has slow elements");
1673 bind(&ok);
1674 }
1675}
1676
1677
Steve Blocka7e24c12009-10-30 11:49:00 +00001678void MacroAssembler::Check(Condition cc, const char* msg) {
1679 Label L;
1680 j(cc, &L, taken);
1681 Abort(msg);
1682 // will not return here
1683 bind(&L);
1684}
1685
1686
Steve Block6ded16b2010-05-10 14:33:55 +01001687void MacroAssembler::CheckStackAlignment() {
1688 int frame_alignment = OS::ActivationFrameAlignment();
1689 int frame_alignment_mask = frame_alignment - 1;
1690 if (frame_alignment > kPointerSize) {
1691 ASSERT(IsPowerOf2(frame_alignment));
1692 Label alignment_as_expected;
1693 test(esp, Immediate(frame_alignment_mask));
1694 j(zero, &alignment_as_expected);
1695 // Abort if stack is not aligned.
1696 int3();
1697 bind(&alignment_as_expected);
1698 }
1699}
1700
1701
Steve Blocka7e24c12009-10-30 11:49:00 +00001702void MacroAssembler::Abort(const char* msg) {
1703 // We want to pass the msg string like a smi to avoid GC
1704 // problems, however msg is not guaranteed to be aligned
1705 // properly. Instead, we pass an aligned pointer that is
1706 // a proper v8 smi, but also pass the alignment difference
1707 // from the real pointer as a smi.
1708 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1709 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1710 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1711#ifdef DEBUG
1712 if (msg != NULL) {
1713 RecordComment("Abort message: ");
1714 RecordComment(msg);
1715 }
1716#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001717 // Disable stub call restrictions to always allow calls to abort.
1718 set_allow_stub_calls(true);
1719
Steve Blocka7e24c12009-10-30 11:49:00 +00001720 push(eax);
1721 push(Immediate(p0));
1722 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1723 CallRuntime(Runtime::kAbort, 2);
1724 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001725 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001726}
1727
1728
Iain Merrick75681382010-08-19 15:07:18 +01001729void MacroAssembler::JumpIfNotNumber(Register reg,
1730 TypeInfo info,
1731 Label* on_not_number) {
1732 if (FLAG_debug_code) AbortIfSmi(reg);
1733 if (!info.IsNumber()) {
1734 cmp(FieldOperand(reg, HeapObject::kMapOffset),
1735 Factory::heap_number_map());
1736 j(not_equal, on_not_number);
1737 }
1738}
1739
1740
1741void MacroAssembler::ConvertToInt32(Register dst,
1742 Register source,
1743 Register scratch,
1744 TypeInfo info,
1745 Label* on_not_int32) {
1746 if (FLAG_debug_code) {
1747 AbortIfSmi(source);
1748 AbortIfNotNumber(source);
1749 }
1750 if (info.IsInteger32()) {
1751 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1752 } else {
1753 Label done;
1754 bool push_pop = (scratch.is(no_reg) && dst.is(source));
1755 ASSERT(!scratch.is(source));
1756 if (push_pop) {
1757 push(dst);
1758 scratch = dst;
1759 }
1760 if (scratch.is(no_reg)) scratch = dst;
1761 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1762 cmp(scratch, 0x80000000u);
1763 if (push_pop) {
1764 j(not_equal, &done);
1765 pop(dst);
1766 jmp(on_not_int32);
1767 } else {
1768 j(equal, on_not_int32);
1769 }
1770
1771 bind(&done);
1772 if (push_pop) {
1773 add(Operand(esp), Immediate(kPointerSize)); // Pop.
1774 }
1775 if (!scratch.is(dst)) {
1776 mov(dst, scratch);
1777 }
1778 }
1779}
1780
1781
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001782void MacroAssembler::LoadPowerOf2(XMMRegister dst,
1783 Register scratch,
1784 int power) {
1785 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
1786 HeapNumber::kExponentBits));
1787 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
1788 movd(dst, Operand(scratch));
1789 psllq(dst, HeapNumber::kMantissaBits);
1790}
1791
1792
Andrei Popescu402d9372010-02-26 13:31:12 +00001793void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1794 Register instance_type,
1795 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001796 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001797 if (!scratch.is(instance_type)) {
1798 mov(scratch, instance_type);
1799 }
1800 and_(scratch,
1801 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1802 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1803 j(not_equal, failure);
1804}
1805
1806
Leon Clarked91b9f72010-01-27 17:25:45 +00001807void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1808 Register object2,
1809 Register scratch1,
1810 Register scratch2,
1811 Label* failure) {
1812 // Check that both objects are not smis.
1813 ASSERT_EQ(0, kSmiTag);
1814 mov(scratch1, Operand(object1));
1815 and_(scratch1, Operand(object2));
1816 test(scratch1, Immediate(kSmiTagMask));
1817 j(zero, failure);
1818
1819 // Load instance type for both strings.
1820 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1821 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1822 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1823 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1824
1825 // Check that both are flat ascii strings.
1826 const int kFlatAsciiStringMask =
1827 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1828 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1829 // Interleave bits from both instance types and compare them in one check.
1830 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1831 and_(scratch1, kFlatAsciiStringMask);
1832 and_(scratch2, kFlatAsciiStringMask);
1833 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1834 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1835 j(not_equal, failure);
1836}
1837
1838
Steve Block6ded16b2010-05-10 14:33:55 +01001839void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1840 int frameAlignment = OS::ActivationFrameAlignment();
1841 if (frameAlignment != 0) {
1842 // Make stack end at alignment and make room for num_arguments words
1843 // and the original value of esp.
1844 mov(scratch, esp);
1845 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1846 ASSERT(IsPowerOf2(frameAlignment));
1847 and_(esp, -frameAlignment);
1848 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1849 } else {
1850 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1851 }
1852}
1853
1854
1855void MacroAssembler::CallCFunction(ExternalReference function,
1856 int num_arguments) {
1857 // Trashing eax is ok as it will be the return value.
1858 mov(Operand(eax), Immediate(function));
1859 CallCFunction(eax, num_arguments);
1860}
1861
1862
1863void MacroAssembler::CallCFunction(Register function,
1864 int num_arguments) {
1865 // Check stack alignment.
1866 if (FLAG_debug_code) {
1867 CheckStackAlignment();
1868 }
1869
1870 call(Operand(function));
1871 if (OS::ActivationFrameAlignment() != 0) {
1872 mov(esp, Operand(esp, num_arguments * kPointerSize));
1873 } else {
1874 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1875 }
1876}
1877
1878
Steve Blocka7e24c12009-10-30 11:49:00 +00001879CodePatcher::CodePatcher(byte* address, int size)
1880 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1881 // Create a new macro assembler pointing to the address of the code to patch.
1882 // The size is adjusted with kGap on order for the assembler to generate size
1883 // bytes of instructions without failing with buffer size constraints.
1884 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1885}
1886
1887
1888CodePatcher::~CodePatcher() {
1889 // Indicate that code has changed.
1890 CPU::FlushICache(address_, size_);
1891
1892 // Check that the code was patched as expected.
1893 ASSERT(masm_.pc_ == address_ + size_);
1894 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1895}
1896
1897
1898} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001899
1900#endif // V8_TARGET_ARCH_IA32