blob: 10c942a5b3a6d6f2f555048d9a27212daad40a90 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Kristian Monsen50ef84f2010-07-29 15:18:00 +010077void MacroAssembler::RecordWrite(Register object,
78 int offset,
79 Register value,
80 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +000081 // The compiled code assumes that record write doesn't change the
82 // context register, so we check that none of the clobbered
83 // registers are esi.
84 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
85
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010086 // First, check if a write barrier is even needed. The tests below
87 // catch stores of Smis and stores into young gen.
Ben Murdochb0fe1622011-05-05 13:52:32 +010088 NearLabel done;
Steve Blocka7e24c12009-10-30 11:49:00 +000089
90 // Skip barrier if writing a smi.
91 ASSERT_EQ(0, kSmiTag);
92 test(value, Immediate(kSmiTagMask));
93 j(zero, &done);
94
Steve Block6ded16b2010-05-10 14:33:55 +010095 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +000096
Steve Block6ded16b2010-05-10 14:33:55 +010097 // The offset is relative to a tagged or untagged HeapObject pointer,
98 // so either offset or offset + kHeapObjectTag must be a
99 // multiple of kPointerSize.
100 ASSERT(IsAligned(offset, kPointerSize) ||
101 IsAligned(offset + kHeapObjectTag, kPointerSize));
102
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100103 Register dst = scratch;
104 if (offset != 0) {
105 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100107 // Array access: calculate the destination address in the same manner as
108 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
109 // into an array of words.
110 ASSERT_EQ(1, kSmiTagSize);
111 ASSERT_EQ(0, kSmiTag);
112 lea(dst, Operand(object, dst, times_half_pointer_size,
113 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000114 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100115 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000116
117 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000118
119 // Clobber all input registers when running with the debug-code flag
120 // turned on to provoke errors.
121 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100122 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
123 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
124 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000125 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000126}
127
128
Steve Block8defd9f2010-07-08 12:39:36 +0100129void MacroAssembler::RecordWrite(Register object,
130 Register address,
131 Register value) {
132 // The compiled code assumes that record write doesn't change the
133 // context register, so we check that none of the clobbered
134 // registers are esi.
135 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
136
137 // First, check if a write barrier is even needed. The tests below
138 // catch stores of Smis and stores into young gen.
139 Label done;
140
141 // Skip barrier if writing a smi.
142 ASSERT_EQ(0, kSmiTag);
143 test(value, Immediate(kSmiTagMask));
144 j(zero, &done);
145
146 InNewSpace(object, value, equal, &done);
147
148 RecordWriteHelper(object, address, value);
149
150 bind(&done);
151
152 // Clobber all input registers when running with the debug-code flag
153 // turned on to provoke errors.
154 if (FLAG_debug_code) {
155 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
156 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
157 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
158 }
159}
160
161
Steve Blocka7e24c12009-10-30 11:49:00 +0000162#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000163void MacroAssembler::DebugBreak() {
164 Set(eax, Immediate(0));
165 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
166 CEntryStub ces(1);
167 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
168}
Steve Blocka7e24c12009-10-30 11:49:00 +0000169#endif
170
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100171
Steve Blocka7e24c12009-10-30 11:49:00 +0000172void MacroAssembler::Set(Register dst, const Immediate& x) {
173 if (x.is_zero()) {
174 xor_(dst, Operand(dst)); // shorter than mov
175 } else {
176 mov(dst, x);
177 }
178}
179
180
181void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
182 mov(dst, x);
183}
184
185
186void MacroAssembler::CmpObjectType(Register heap_object,
187 InstanceType type,
188 Register map) {
189 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
190 CmpInstanceType(map, type);
191}
192
193
194void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
195 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
196 static_cast<int8_t>(type));
197}
198
199
Andrei Popescu31002712010-02-23 13:46:05 +0000200void MacroAssembler::CheckMap(Register obj,
201 Handle<Map> map,
202 Label* fail,
203 bool is_heap_object) {
204 if (!is_heap_object) {
205 test(obj, Immediate(kSmiTagMask));
206 j(zero, fail);
207 }
208 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
209 j(not_equal, fail);
210}
211
212
Leon Clarkee46be812010-01-19 14:06:41 +0000213Condition MacroAssembler::IsObjectStringType(Register heap_object,
214 Register map,
215 Register instance_type) {
216 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
217 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
218 ASSERT(kNotStringTag != 0);
219 test(instance_type, Immediate(kIsNotStringMask));
220 return zero;
221}
222
223
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100224void MacroAssembler::IsObjectJSObjectType(Register heap_object,
225 Register map,
226 Register scratch,
227 Label* fail) {
228 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
229 IsInstanceJSObjectType(map, scratch, fail);
230}
231
232
233void MacroAssembler::IsInstanceJSObjectType(Register map,
234 Register scratch,
235 Label* fail) {
236 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
237 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
238 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
239 j(above, fail);
240}
241
242
Steve Blocka7e24c12009-10-30 11:49:00 +0000243void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000244 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000245 fucomip();
246 ffree(0);
247 fincstp();
248 } else {
249 fucompp();
250 push(eax);
251 fnstsw_ax();
252 sahf();
253 pop(eax);
254 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000255}
256
257
Steve Block6ded16b2010-05-10 14:33:55 +0100258void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000259 Label ok;
260 test(object, Immediate(kSmiTagMask));
261 j(zero, &ok);
262 cmp(FieldOperand(object, HeapObject::kMapOffset),
263 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100264 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000265 bind(&ok);
266}
267
268
Steve Block6ded16b2010-05-10 14:33:55 +0100269void MacroAssembler::AbortIfNotSmi(Register object) {
270 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100271 Assert(equal, "Operand is not a smi");
272}
273
274
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100275void MacroAssembler::AbortIfNotString(Register object) {
276 test(object, Immediate(kSmiTagMask));
277 Assert(not_equal, "Operand is not a string");
278 push(object);
279 mov(object, FieldOperand(object, HeapObject::kMapOffset));
280 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
281 pop(object);
282 Assert(below, "Operand is not a string");
283}
284
285
Iain Merrick75681382010-08-19 15:07:18 +0100286void MacroAssembler::AbortIfSmi(Register object) {
287 test(object, Immediate(kSmiTagMask));
288 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100289}
290
291
Steve Blocka7e24c12009-10-30 11:49:00 +0000292void MacroAssembler::EnterFrame(StackFrame::Type type) {
293 push(ebp);
294 mov(ebp, Operand(esp));
295 push(esi);
296 push(Immediate(Smi::FromInt(type)));
297 push(Immediate(CodeObject()));
298 if (FLAG_debug_code) {
299 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
300 Check(not_equal, "code object not properly patched");
301 }
302}
303
304
305void MacroAssembler::LeaveFrame(StackFrame::Type type) {
306 if (FLAG_debug_code) {
307 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
308 Immediate(Smi::FromInt(type)));
309 Check(equal, "stack frame types must match");
310 }
311 leave();
312}
313
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100314
315void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000316 // Setup the frame structure on the stack.
317 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
318 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
319 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
320 push(ebp);
321 mov(ebp, Operand(esp));
322
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100323 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000324 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000325 push(Immediate(0)); // Saved entry sp, patched before call.
326 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000327
328 // Save the frame pointer and the context in top.
329 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
330 ExternalReference context_address(Top::k_context_address);
331 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
332 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000333}
Steve Blocka7e24c12009-10-30 11:49:00 +0000334
Steve Blocka7e24c12009-10-30 11:49:00 +0000335
Ben Murdochb0fe1622011-05-05 13:52:32 +0100336void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
337 // Optionally save all XMM registers.
338 if (save_doubles) {
339 CpuFeatures::Scope scope(SSE2);
340 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
341 sub(Operand(esp), Immediate(space));
342 int offset = -2 * kPointerSize;
343 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
344 XMMRegister reg = XMMRegister::from_code(i);
345 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
346 }
347 } else {
348 sub(Operand(esp), Immediate(argc * kPointerSize));
349 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000350
351 // Get the required frame alignment for the OS.
352 static const int kFrameAlignment = OS::ActivationFrameAlignment();
353 if (kFrameAlignment > 0) {
354 ASSERT(IsPowerOf2(kFrameAlignment));
355 and_(esp, -kFrameAlignment);
356 }
357
358 // Patch the saved entry sp.
359 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
360}
361
362
Ben Murdochb0fe1622011-05-05 13:52:32 +0100363void MacroAssembler::EnterExitFrame(bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100364 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000365
366 // Setup argc and argv in callee-saved registers.
367 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
368 mov(edi, Operand(eax));
369 lea(esi, Operand(ebp, eax, times_4, offset));
370
Ben Murdochb0fe1622011-05-05 13:52:32 +0100371 EnterExitFrameEpilogue(2, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000372}
373
374
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800375void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100376 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100377 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +0000378}
379
380
Ben Murdochb0fe1622011-05-05 13:52:32 +0100381void MacroAssembler::LeaveExitFrame(bool save_doubles) {
382 // Optionally restore all XMM registers.
383 if (save_doubles) {
384 CpuFeatures::Scope scope(SSE2);
385 int offset = -2 * kPointerSize;
386 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
387 XMMRegister reg = XMMRegister::from_code(i);
388 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
389 }
390 }
391
Steve Blocka7e24c12009-10-30 11:49:00 +0000392 // Get the return address from the stack and restore the frame pointer.
393 mov(ecx, Operand(ebp, 1 * kPointerSize));
394 mov(ebp, Operand(ebp, 0 * kPointerSize));
395
396 // Pop the arguments and the receiver from the caller stack.
397 lea(esp, Operand(esi, 1 * kPointerSize));
398
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800399 // Push the return address to get ready to return.
400 push(ecx);
401
402 LeaveExitFrameEpilogue();
403}
404
405void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000406 // Restore current context from top and clear it in debug mode.
407 ExternalReference context_address(Top::k_context_address);
408 mov(esi, Operand::StaticVariable(context_address));
409#ifdef DEBUG
410 mov(Operand::StaticVariable(context_address), Immediate(0));
411#endif
412
Steve Blocka7e24c12009-10-30 11:49:00 +0000413 // Clear the top frame.
414 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
415 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
416}
417
418
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800419void MacroAssembler::LeaveApiExitFrame() {
420 mov(esp, Operand(ebp));
421 pop(ebp);
422
423 LeaveExitFrameEpilogue();
424}
425
426
Steve Blocka7e24c12009-10-30 11:49:00 +0000427void MacroAssembler::PushTryHandler(CodeLocation try_location,
428 HandlerType type) {
429 // Adjust this code if not the case.
430 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
431 // The pc (return address) is already on TOS.
432 if (try_location == IN_JAVASCRIPT) {
433 if (type == TRY_CATCH_HANDLER) {
434 push(Immediate(StackHandler::TRY_CATCH));
435 } else {
436 push(Immediate(StackHandler::TRY_FINALLY));
437 }
438 push(ebp);
439 } else {
440 ASSERT(try_location == IN_JS_ENTRY);
441 // The frame pointer does not point to a JS frame so we save NULL
442 // for ebp. We expect the code throwing an exception to check ebp
443 // before dereferencing it to restore the context.
444 push(Immediate(StackHandler::ENTRY));
445 push(Immediate(0)); // NULL frame pointer.
446 }
447 // Save the current handler as the next handler.
448 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
449 // Link this handler as the new current one.
450 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
451}
452
453
Leon Clarkee46be812010-01-19 14:06:41 +0000454void MacroAssembler::PopTryHandler() {
455 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
456 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
457 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
458}
459
460
Steve Blocka7e24c12009-10-30 11:49:00 +0000461void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
462 Register scratch,
463 Label* miss) {
464 Label same_contexts;
465
466 ASSERT(!holder_reg.is(scratch));
467
468 // Load current lexical context from the stack frame.
469 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
470
471 // When generating debug code, make sure the lexical context is set.
472 if (FLAG_debug_code) {
473 cmp(Operand(scratch), Immediate(0));
474 Check(not_equal, "we should not have an empty lexical context");
475 }
476 // Load the global context of the current context.
477 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
478 mov(scratch, FieldOperand(scratch, offset));
479 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
480
481 // Check the context is a global context.
482 if (FLAG_debug_code) {
483 push(scratch);
484 // Read the first word and compare to global_context_map.
485 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
486 cmp(scratch, Factory::global_context_map());
487 Check(equal, "JSGlobalObject::global_context should be a global context.");
488 pop(scratch);
489 }
490
491 // Check if both contexts are the same.
492 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
493 j(equal, &same_contexts, taken);
494
495 // Compare security tokens, save holder_reg on the stack so we can use it
496 // as a temporary register.
497 //
498 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
499 push(holder_reg);
500 // Check that the security token in the calling global object is
501 // compatible with the security token in the receiving global
502 // object.
503 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
504
505 // Check the context is a global context.
506 if (FLAG_debug_code) {
507 cmp(holder_reg, Factory::null_value());
508 Check(not_equal, "JSGlobalProxy::context() should not be null.");
509
510 push(holder_reg);
511 // Read the first word and compare to global_context_map(),
512 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
513 cmp(holder_reg, Factory::global_context_map());
514 Check(equal, "JSGlobalObject::global_context should be a global context.");
515 pop(holder_reg);
516 }
517
518 int token_offset = Context::kHeaderSize +
519 Context::SECURITY_TOKEN_INDEX * kPointerSize;
520 mov(scratch, FieldOperand(scratch, token_offset));
521 cmp(scratch, FieldOperand(holder_reg, token_offset));
522 pop(holder_reg);
523 j(not_equal, miss, not_taken);
524
525 bind(&same_contexts);
526}
527
528
529void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000530 Register scratch,
531 AllocationFlags flags) {
532 ExternalReference new_space_allocation_top =
533 ExternalReference::new_space_allocation_top_address();
534
535 // Just return if allocation top is already known.
536 if ((flags & RESULT_CONTAINS_TOP) != 0) {
537 // No use of scratch if allocation top is provided.
538 ASSERT(scratch.is(no_reg));
539#ifdef DEBUG
540 // Assert that result actually contains top on entry.
541 cmp(result, Operand::StaticVariable(new_space_allocation_top));
542 Check(equal, "Unexpected allocation top");
543#endif
544 return;
545 }
546
547 // Move address of new object to result. Use scratch register if available.
548 if (scratch.is(no_reg)) {
549 mov(result, Operand::StaticVariable(new_space_allocation_top));
550 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +0000551 mov(Operand(scratch), Immediate(new_space_allocation_top));
552 mov(result, Operand(scratch, 0));
553 }
554}
555
556
557void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
558 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000559 if (FLAG_debug_code) {
560 test(result_end, Immediate(kObjectAlignmentMask));
561 Check(zero, "Unaligned allocation in new space");
562 }
563
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 ExternalReference new_space_allocation_top =
565 ExternalReference::new_space_allocation_top_address();
566
567 // Update new top. Use scratch if available.
568 if (scratch.is(no_reg)) {
569 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
570 } else {
571 mov(Operand(scratch, 0), result_end);
572 }
573}
574
575
576void MacroAssembler::AllocateInNewSpace(int object_size,
577 Register result,
578 Register result_end,
579 Register scratch,
580 Label* gc_required,
581 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700582 if (!FLAG_inline_new) {
583 if (FLAG_debug_code) {
584 // Trash the registers to simulate an allocation failure.
585 mov(result, Immediate(0x7091));
586 if (result_end.is_valid()) {
587 mov(result_end, Immediate(0x7191));
588 }
589 if (scratch.is_valid()) {
590 mov(scratch, Immediate(0x7291));
591 }
592 }
593 jmp(gc_required);
594 return;
595 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000596 ASSERT(!result.is(result_end));
597
598 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800599 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000600
Ben Murdochbb769b22010-08-11 14:56:33 +0100601 Register top_reg = result_end.is_valid() ? result_end : result;
602
Steve Blocka7e24c12009-10-30 11:49:00 +0000603 // Calculate new top and bail out if new space is exhausted.
604 ExternalReference new_space_allocation_limit =
605 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100606
607 if (top_reg.is(result)) {
608 add(Operand(top_reg), Immediate(object_size));
609 } else {
610 lea(top_reg, Operand(result, object_size));
611 }
612 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000613 j(above, gc_required, not_taken);
614
Leon Clarkee46be812010-01-19 14:06:41 +0000615 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100616 UpdateAllocationTopHelper(top_reg, scratch);
617
618 // Tag result if requested.
619 if (top_reg.is(result)) {
620 if ((flags & TAG_OBJECT) != 0) {
621 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
622 } else {
623 sub(Operand(result), Immediate(object_size));
624 }
625 } else if ((flags & TAG_OBJECT) != 0) {
626 add(Operand(result), Immediate(kHeapObjectTag));
627 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000628}
629
630
631void MacroAssembler::AllocateInNewSpace(int header_size,
632 ScaleFactor element_size,
633 Register element_count,
634 Register result,
635 Register result_end,
636 Register scratch,
637 Label* gc_required,
638 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700639 if (!FLAG_inline_new) {
640 if (FLAG_debug_code) {
641 // Trash the registers to simulate an allocation failure.
642 mov(result, Immediate(0x7091));
643 mov(result_end, Immediate(0x7191));
644 if (scratch.is_valid()) {
645 mov(scratch, Immediate(0x7291));
646 }
647 // Register element_count is not modified by the function.
648 }
649 jmp(gc_required);
650 return;
651 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 ASSERT(!result.is(result_end));
653
654 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800655 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000656
657 // Calculate new top and bail out if new space is exhausted.
658 ExternalReference new_space_allocation_limit =
659 ExternalReference::new_space_allocation_limit_address();
660 lea(result_end, Operand(result, element_count, element_size, header_size));
661 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
662 j(above, gc_required);
663
Steve Blocka7e24c12009-10-30 11:49:00 +0000664 // Tag result if requested.
665 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000666 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000667 }
Leon Clarkee46be812010-01-19 14:06:41 +0000668
669 // Update allocation top.
670 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000671}
672
673
674void MacroAssembler::AllocateInNewSpace(Register object_size,
675 Register result,
676 Register result_end,
677 Register scratch,
678 Label* gc_required,
679 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700680 if (!FLAG_inline_new) {
681 if (FLAG_debug_code) {
682 // Trash the registers to simulate an allocation failure.
683 mov(result, Immediate(0x7091));
684 mov(result_end, Immediate(0x7191));
685 if (scratch.is_valid()) {
686 mov(scratch, Immediate(0x7291));
687 }
688 // object_size is left unchanged by this function.
689 }
690 jmp(gc_required);
691 return;
692 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 ASSERT(!result.is(result_end));
694
695 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800696 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000697
698 // Calculate new top and bail out if new space is exhausted.
699 ExternalReference new_space_allocation_limit =
700 ExternalReference::new_space_allocation_limit_address();
701 if (!object_size.is(result_end)) {
702 mov(result_end, object_size);
703 }
704 add(result_end, Operand(result));
705 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
706 j(above, gc_required, not_taken);
707
Steve Blocka7e24c12009-10-30 11:49:00 +0000708 // Tag result if requested.
709 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000710 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 }
Leon Clarkee46be812010-01-19 14:06:41 +0000712
713 // Update allocation top.
714 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000715}
716
717
718void MacroAssembler::UndoAllocationInNewSpace(Register object) {
719 ExternalReference new_space_allocation_top =
720 ExternalReference::new_space_allocation_top_address();
721
722 // Make sure the object has no tag before resetting top.
723 and_(Operand(object), Immediate(~kHeapObjectTagMask));
724#ifdef DEBUG
725 cmp(object, Operand::StaticVariable(new_space_allocation_top));
726 Check(below, "Undo allocation of non allocated memory");
727#endif
728 mov(Operand::StaticVariable(new_space_allocation_top), object);
729}
730
731
Steve Block3ce2e202009-11-05 08:53:23 +0000732void MacroAssembler::AllocateHeapNumber(Register result,
733 Register scratch1,
734 Register scratch2,
735 Label* gc_required) {
736 // Allocate heap number in new space.
737 AllocateInNewSpace(HeapNumber::kSize,
738 result,
739 scratch1,
740 scratch2,
741 gc_required,
742 TAG_OBJECT);
743
744 // Set the map.
745 mov(FieldOperand(result, HeapObject::kMapOffset),
746 Immediate(Factory::heap_number_map()));
747}
748
749
Steve Blockd0582a62009-12-15 09:54:21 +0000750void MacroAssembler::AllocateTwoByteString(Register result,
751 Register length,
752 Register scratch1,
753 Register scratch2,
754 Register scratch3,
755 Label* gc_required) {
756 // Calculate the number of bytes needed for the characters in the string while
757 // observing object alignment.
758 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000759 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000760 // scratch1 = length * 2 + kObjectAlignmentMask.
761 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000762 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
763
764 // Allocate two byte string in new space.
765 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
766 times_1,
767 scratch1,
768 result,
769 scratch2,
770 scratch3,
771 gc_required,
772 TAG_OBJECT);
773
774 // Set the map, length and hash field.
775 mov(FieldOperand(result, HeapObject::kMapOffset),
776 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100777 mov(scratch1, length);
778 SmiTag(scratch1);
779 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000780 mov(FieldOperand(result, String::kHashFieldOffset),
781 Immediate(String::kEmptyHashField));
782}
783
784
785void MacroAssembler::AllocateAsciiString(Register result,
786 Register length,
787 Register scratch1,
788 Register scratch2,
789 Register scratch3,
790 Label* gc_required) {
791 // Calculate the number of bytes needed for the characters in the string while
792 // observing object alignment.
793 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
794 mov(scratch1, length);
795 ASSERT(kCharSize == 1);
796 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
797 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
798
799 // Allocate ascii string in new space.
800 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
801 times_1,
802 scratch1,
803 result,
804 scratch2,
805 scratch3,
806 gc_required,
807 TAG_OBJECT);
808
809 // Set the map, length and hash field.
810 mov(FieldOperand(result, HeapObject::kMapOffset),
811 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100812 mov(scratch1, length);
813 SmiTag(scratch1);
814 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000815 mov(FieldOperand(result, String::kHashFieldOffset),
816 Immediate(String::kEmptyHashField));
817}
818
819
Iain Merrick9ac36c92010-09-13 15:29:50 +0100820void MacroAssembler::AllocateAsciiString(Register result,
821 int length,
822 Register scratch1,
823 Register scratch2,
824 Label* gc_required) {
825 ASSERT(length > 0);
826
827 // Allocate ascii string in new space.
828 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
829 result,
830 scratch1,
831 scratch2,
832 gc_required,
833 TAG_OBJECT);
834
835 // Set the map, length and hash field.
836 mov(FieldOperand(result, HeapObject::kMapOffset),
837 Immediate(Factory::ascii_string_map()));
838 mov(FieldOperand(result, String::kLengthOffset),
839 Immediate(Smi::FromInt(length)));
840 mov(FieldOperand(result, String::kHashFieldOffset),
841 Immediate(String::kEmptyHashField));
842}
843
844
Steve Blockd0582a62009-12-15 09:54:21 +0000845void MacroAssembler::AllocateConsString(Register result,
846 Register scratch1,
847 Register scratch2,
848 Label* gc_required) {
849 // Allocate heap number in new space.
850 AllocateInNewSpace(ConsString::kSize,
851 result,
852 scratch1,
853 scratch2,
854 gc_required,
855 TAG_OBJECT);
856
857 // Set the map. The other fields are left uninitialized.
858 mov(FieldOperand(result, HeapObject::kMapOffset),
859 Immediate(Factory::cons_string_map()));
860}
861
862
863void MacroAssembler::AllocateAsciiConsString(Register result,
864 Register scratch1,
865 Register scratch2,
866 Label* gc_required) {
867 // Allocate heap number in new space.
868 AllocateInNewSpace(ConsString::kSize,
869 result,
870 scratch1,
871 scratch2,
872 gc_required,
873 TAG_OBJECT);
874
875 // Set the map. The other fields are left uninitialized.
876 mov(FieldOperand(result, HeapObject::kMapOffset),
877 Immediate(Factory::cons_ascii_string_map()));
878}
879
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800880
Ben Murdochb8e0da22011-05-16 14:20:40 +0100881// Copy memory, byte-by-byte, from source to destination. Not optimized for
882// long or aligned copies. The contents of scratch and length are destroyed.
883// Source and destination are incremented by length.
884// Many variants of movsb, loop unrolling, word moves, and indexed operands
885// have been tried here already, and this is fastest.
886// A simpler loop is faster on small copies, but 30% slower on large ones.
887// The cld() instruction must have been emitted, to set the direction flag(),
888// before calling this function.
889void MacroAssembler::CopyBytes(Register source,
890 Register destination,
891 Register length,
892 Register scratch) {
893 Label loop, done, short_string, short_loop;
894 // Experimentation shows that the short string loop is faster if length < 10.
895 cmp(Operand(length), Immediate(10));
896 j(less_equal, &short_string);
897
898 ASSERT(source.is(esi));
899 ASSERT(destination.is(edi));
900 ASSERT(length.is(ecx));
901
902 // Because source is 4-byte aligned in our uses of this function,
903 // we keep source aligned for the rep_movs call by copying the odd bytes
904 // at the end of the ranges.
905 mov(scratch, Operand(source, length, times_1, -4));
906 mov(Operand(destination, length, times_1, -4), scratch);
907 mov(scratch, ecx);
908 shr(ecx, 2);
909 rep_movs();
910 and_(Operand(scratch), Immediate(0x3));
911 add(destination, Operand(scratch));
912 jmp(&done);
913
914 bind(&short_string);
915 test(length, Operand(length));
916 j(zero, &done);
917
918 bind(&short_loop);
919 mov_b(scratch, Operand(source, 0));
920 mov_b(Operand(destination, 0), scratch);
921 inc(source);
922 inc(destination);
923 dec(length);
924 j(not_zero, &short_loop);
925
926 bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800927}
928
Steve Blockd0582a62009-12-15 09:54:21 +0000929
Steve Blocka7e24c12009-10-30 11:49:00 +0000930void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
931 Register result,
932 Register op,
933 JumpTarget* then_target) {
934 JumpTarget ok;
935 test(result, Operand(result));
936 ok.Branch(not_zero, taken);
937 test(op, Operand(op));
938 then_target->Branch(sign, not_taken);
939 ok.Bind();
940}
941
942
943void MacroAssembler::NegativeZeroTest(Register result,
944 Register op,
945 Label* then_label) {
946 Label ok;
947 test(result, Operand(result));
948 j(not_zero, &ok, taken);
949 test(op, Operand(op));
950 j(sign, then_label, not_taken);
951 bind(&ok);
952}
953
954
955void MacroAssembler::NegativeZeroTest(Register result,
956 Register op1,
957 Register op2,
958 Register scratch,
959 Label* then_label) {
960 Label ok;
961 test(result, Operand(result));
962 j(not_zero, &ok, taken);
963 mov(scratch, Operand(op1));
964 or_(scratch, Operand(op2));
965 j(sign, then_label, not_taken);
966 bind(&ok);
967}
968
969
970void MacroAssembler::TryGetFunctionPrototype(Register function,
971 Register result,
972 Register scratch,
973 Label* miss) {
974 // Check that the receiver isn't a smi.
975 test(function, Immediate(kSmiTagMask));
976 j(zero, miss, not_taken);
977
978 // Check that the function really is a function.
979 CmpObjectType(function, JS_FUNCTION_TYPE, result);
980 j(not_equal, miss, not_taken);
981
982 // Make sure that the function has an instance prototype.
983 Label non_instance;
984 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
985 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
986 j(not_zero, &non_instance, not_taken);
987
988 // Get the prototype or initial map from the function.
989 mov(result,
990 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
991
992 // If the prototype or initial map is the hole, don't return it and
993 // simply miss the cache instead. This will allow us to allocate a
994 // prototype object on-demand in the runtime system.
995 cmp(Operand(result), Immediate(Factory::the_hole_value()));
996 j(equal, miss, not_taken);
997
998 // If the function does not have an initial map, we're done.
999 Label done;
1000 CmpObjectType(result, MAP_TYPE, scratch);
1001 j(not_equal, &done);
1002
1003 // Get the prototype from the initial map.
1004 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1005 jmp(&done);
1006
1007 // Non-instance prototype: Fetch prototype from constructor field
1008 // in initial map.
1009 bind(&non_instance);
1010 mov(result, FieldOperand(result, Map::kConstructorOffset));
1011
1012 // All done.
1013 bind(&done);
1014}
1015
1016
1017void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001018 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +00001019 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1020}
1021
1022
John Reck59135872010-11-02 12:39:01 -07001023MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001024 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001025 Object* result;
1026 { MaybeObject* maybe_result = stub->TryGetCode();
1027 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001028 }
John Reck59135872010-11-02 12:39:01 -07001029 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001030 return result;
1031}
1032
1033
Steve Blockd0582a62009-12-15 09:54:21 +00001034void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001035 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001036 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1037}
1038
1039
John Reck59135872010-11-02 12:39:01 -07001040MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001041 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001042 Object* result;
1043 { MaybeObject* maybe_result = stub->TryGetCode();
1044 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001045 }
John Reck59135872010-11-02 12:39:01 -07001046 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001047 return result;
1048}
1049
1050
Steve Blocka7e24c12009-10-30 11:49:00 +00001051void MacroAssembler::StubReturn(int argc) {
1052 ASSERT(argc >= 1 && generating_stub());
1053 ret((argc - 1) * kPointerSize);
1054}
1055
1056
1057void MacroAssembler::IllegalOperation(int num_arguments) {
1058 if (num_arguments > 0) {
1059 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1060 }
1061 mov(eax, Immediate(Factory::undefined_value()));
1062}
1063
1064
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001065void MacroAssembler::IndexFromHash(Register hash, Register index) {
1066 // The assert checks that the constants for the maximum number of digits
1067 // for an array index cached in the hash field and the number of bits
1068 // reserved for it does not conflict.
1069 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1070 (1 << String::kArrayIndexValueBits));
1071 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1072 // the low kHashShift bits.
1073 and_(hash, String::kArrayIndexValueMask);
1074 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1075 if (String::kHashShift > kSmiTagSize) {
1076 shr(hash, String::kHashShift - kSmiTagSize);
1077 }
1078 if (!index.is(hash)) {
1079 mov(index, hash);
1080 }
1081}
1082
1083
Steve Blocka7e24c12009-10-30 11:49:00 +00001084void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1085 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1086}
1087
1088
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1090 Runtime::Function* function = Runtime::FunctionForId(id);
1091 Set(eax, Immediate(function->nargs));
1092 mov(ebx, Immediate(ExternalReference(function)));
1093 CEntryStub ces(1);
1094 ces.SaveDoubles();
1095 CallStub(&ces);
1096}
1097
1098
John Reck59135872010-11-02 12:39:01 -07001099MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1100 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001101 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1102}
1103
1104
Steve Blocka7e24c12009-10-30 11:49:00 +00001105void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1106 // If the expected number of arguments of the runtime function is
1107 // constant, we check that the actual number of arguments match the
1108 // expectation.
1109 if (f->nargs >= 0 && f->nargs != num_arguments) {
1110 IllegalOperation(num_arguments);
1111 return;
1112 }
1113
Leon Clarke4515c472010-02-03 11:58:03 +00001114 // TODO(1236192): Most runtime routines don't need the number of
1115 // arguments passed in because it is constant. At some point we
1116 // should remove this need and make the runtime routine entry code
1117 // smarter.
1118 Set(eax, Immediate(num_arguments));
1119 mov(ebx, Immediate(ExternalReference(f)));
1120 CEntryStub ces(1);
1121 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001122}
1123
1124
John Reck59135872010-11-02 12:39:01 -07001125MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1126 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001127 if (f->nargs >= 0 && f->nargs != num_arguments) {
1128 IllegalOperation(num_arguments);
1129 // Since we did not call the stub, there was no allocation failure.
1130 // Return some non-failure object.
1131 return Heap::undefined_value();
1132 }
1133
Leon Clarke4515c472010-02-03 11:58:03 +00001134 // TODO(1236192): Most runtime routines don't need the number of
1135 // arguments passed in because it is constant. At some point we
1136 // should remove this need and make the runtime routine entry code
1137 // smarter.
1138 Set(eax, Immediate(num_arguments));
1139 mov(ebx, Immediate(ExternalReference(f)));
1140 CEntryStub ces(1);
1141 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001142}
1143
1144
Ben Murdochbb769b22010-08-11 14:56:33 +01001145void MacroAssembler::CallExternalReference(ExternalReference ref,
1146 int num_arguments) {
1147 mov(eax, Immediate(num_arguments));
1148 mov(ebx, Immediate(ref));
1149
1150 CEntryStub stub(1);
1151 CallStub(&stub);
1152}
1153
1154
Steve Block6ded16b2010-05-10 14:33:55 +01001155void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1156 int num_arguments,
1157 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001158 // TODO(1236192): Most runtime routines don't need the number of
1159 // arguments passed in because it is constant. At some point we
1160 // should remove this need and make the runtime routine entry code
1161 // smarter.
1162 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001163 JumpToExternalReference(ext);
1164}
1165
1166
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001167MaybeObject* MacroAssembler::TryTailCallExternalReference(
1168 const ExternalReference& ext, int num_arguments, int result_size) {
1169 // TODO(1236192): Most runtime routines don't need the number of
1170 // arguments passed in because it is constant. At some point we
1171 // should remove this need and make the runtime routine entry code
1172 // smarter.
1173 Set(eax, Immediate(num_arguments));
1174 return TryJumpToExternalReference(ext);
1175}
1176
1177
Steve Block6ded16b2010-05-10 14:33:55 +01001178void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1179 int num_arguments,
1180 int result_size) {
1181 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001182}
1183
1184
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001185MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
1186 int num_arguments,
1187 int result_size) {
1188 return TryTailCallExternalReference(
1189 ExternalReference(fid), num_arguments, result_size);
1190}
1191
1192
Ben Murdochb0fe1622011-05-05 13:52:32 +01001193// If true, a Handle<T> returned by value from a function with cdecl calling
1194// convention will be returned directly as a value of location_ field in a
1195// register eax.
1196// If false, it is returned as a pointer to a preallocated by caller memory
1197// region. Pointer to this region should be passed to a function as an
1198// implicit first argument.
1199#if defined(USING_BSD_ABI) || defined(__MINGW32__)
1200static const bool kReturnHandlesDirectly = true;
John Reck59135872010-11-02 12:39:01 -07001201#else
Ben Murdochb0fe1622011-05-05 13:52:32 +01001202static const bool kReturnHandlesDirectly = false;
John Reck59135872010-11-02 12:39:01 -07001203#endif
1204
1205
1206Operand ApiParameterOperand(int index) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001207 return Operand(
1208 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001209}
1210
1211
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001212void MacroAssembler::PrepareCallApiFunction(int argc, Register scratch) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001213 if (kReturnHandlesDirectly) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001214 EnterApiExitFrame(argc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001215 // When handles are returned directly we don't have to allocate extra
John Reck59135872010-11-02 12:39:01 -07001216 // space for and pass an out parameter.
1217 } else {
1218 // We allocate two additional slots: return value and pointer to it.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001219 EnterApiExitFrame(argc + 2);
John Reck59135872010-11-02 12:39:01 -07001220
John Reck59135872010-11-02 12:39:01 -07001221 // The argument slots are filled as follows:
1222 //
1223 // n + 1: output cell
1224 // n: arg n
1225 // ...
1226 // 1: arg1
1227 // 0: pointer to the output cell
1228 //
1229 // Note that this is one more "argument" than the function expects
1230 // so the out cell will have to be popped explicitly after returning
1231 // from the function. The out cell contains Handle.
John Reck59135872010-11-02 12:39:01 -07001232
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001233 // pointer to out cell.
1234 lea(scratch, Operand(esp, (argc + 1) * kPointerSize));
1235 mov(Operand(esp, 0 * kPointerSize), scratch); // output.
1236 if (FLAG_debug_code) {
1237 mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0)); // out cell.
1238 }
1239 }
1240}
1241
1242
1243MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
1244 int stack_space) {
Steve Blockd0582a62009-12-15 09:54:21 +00001245 ExternalReference next_address =
1246 ExternalReference::handle_scope_next_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001247 ExternalReference limit_address =
1248 ExternalReference::handle_scope_limit_address();
John Reck59135872010-11-02 12:39:01 -07001249 ExternalReference level_address =
1250 ExternalReference::handle_scope_level_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001251
John Reck59135872010-11-02 12:39:01 -07001252 // Allocate HandleScope in callee-save registers.
1253 mov(ebx, Operand::StaticVariable(next_address));
1254 mov(edi, Operand::StaticVariable(limit_address));
1255 add(Operand::StaticVariable(level_address), Immediate(1));
Steve Blockd0582a62009-12-15 09:54:21 +00001256
John Reck59135872010-11-02 12:39:01 -07001257 // Call the api function!
1258 call(function->address(), RelocInfo::RUNTIME_ENTRY);
1259
Ben Murdochb0fe1622011-05-05 13:52:32 +01001260 if (!kReturnHandlesDirectly) {
John Reck59135872010-11-02 12:39:01 -07001261 // The returned value is a pointer to the handle holding the result.
1262 // Dereference this to get to the location.
1263 mov(eax, Operand(eax, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001264 }
Steve Blockd0582a62009-12-15 09:54:21 +00001265
John Reck59135872010-11-02 12:39:01 -07001266 Label empty_handle;
1267 Label prologue;
1268 Label promote_scheduled_exception;
1269 Label delete_allocated_handles;
1270 Label leave_exit_frame;
Leon Clarkee46be812010-01-19 14:06:41 +00001271
John Reck59135872010-11-02 12:39:01 -07001272 // Check if the result handle holds 0.
1273 test(eax, Operand(eax));
1274 j(zero, &empty_handle, not_taken);
1275 // It was non-zero. Dereference to get the result value.
1276 mov(eax, Operand(eax, 0));
1277 bind(&prologue);
1278 // No more valid handles (the result handle was the last one). Restore
1279 // previous handle scope.
1280 mov(Operand::StaticVariable(next_address), ebx);
1281 sub(Operand::StaticVariable(level_address), Immediate(1));
1282 Assert(above_equal, "Invalid HandleScope level");
1283 cmp(edi, Operand::StaticVariable(limit_address));
1284 j(not_equal, &delete_allocated_handles, not_taken);
1285 bind(&leave_exit_frame);
Leon Clarkee46be812010-01-19 14:06:41 +00001286
John Reck59135872010-11-02 12:39:01 -07001287 // Check if the function scheduled an exception.
1288 ExternalReference scheduled_exception_address =
1289 ExternalReference::scheduled_exception_address();
1290 cmp(Operand::StaticVariable(scheduled_exception_address),
1291 Immediate(Factory::the_hole_value()));
1292 j(not_equal, &promote_scheduled_exception, not_taken);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001293 LeaveApiExitFrame();
1294 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001295 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001296 MaybeObject* result =
1297 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1298 if (result->IsFailure()) {
1299 return result;
1300 }
John Reck59135872010-11-02 12:39:01 -07001301 bind(&empty_handle);
1302 // It was zero; the result is undefined.
1303 mov(eax, Factory::undefined_value());
1304 jmp(&prologue);
Leon Clarkee46be812010-01-19 14:06:41 +00001305
John Reck59135872010-11-02 12:39:01 -07001306 // HandleScope limit has changed. Delete allocated extensions.
1307 bind(&delete_allocated_handles);
1308 mov(Operand::StaticVariable(limit_address), edi);
1309 mov(edi, eax);
1310 mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions()));
1311 call(Operand(eax));
1312 mov(eax, edi);
1313 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001314
1315 return result;
Steve Blockd0582a62009-12-15 09:54:21 +00001316}
1317
1318
Steve Block6ded16b2010-05-10 14:33:55 +01001319void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001320 // Set the entry point and jump to the C entry runtime stub.
1321 mov(ebx, Immediate(ext));
1322 CEntryStub ces(1);
1323 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1324}
1325
1326
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001327MaybeObject* MacroAssembler::TryJumpToExternalReference(
1328 const ExternalReference& ext) {
1329 // Set the entry point and jump to the C entry runtime stub.
1330 mov(ebx, Immediate(ext));
1331 CEntryStub ces(1);
1332 return TryTailCallStub(&ces);
1333}
1334
1335
Steve Blocka7e24c12009-10-30 11:49:00 +00001336void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1337 const ParameterCount& actual,
1338 Handle<Code> code_constant,
1339 const Operand& code_operand,
1340 Label* done,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001341 InvokeFlag flag,
1342 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001343 bool definitely_matches = false;
1344 Label invoke;
1345 if (expected.is_immediate()) {
1346 ASSERT(actual.is_immediate());
1347 if (expected.immediate() == actual.immediate()) {
1348 definitely_matches = true;
1349 } else {
1350 mov(eax, actual.immediate());
1351 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1352 if (expected.immediate() == sentinel) {
1353 // Don't worry about adapting arguments for builtins that
1354 // don't want that done. Skip adaption code by making it look
1355 // like we have a match between expected and actual number of
1356 // arguments.
1357 definitely_matches = true;
1358 } else {
1359 mov(ebx, expected.immediate());
1360 }
1361 }
1362 } else {
1363 if (actual.is_immediate()) {
1364 // Expected is in register, actual is immediate. This is the
1365 // case when we invoke function values without going through the
1366 // IC mechanism.
1367 cmp(expected.reg(), actual.immediate());
1368 j(equal, &invoke);
1369 ASSERT(expected.reg().is(ebx));
1370 mov(eax, actual.immediate());
1371 } else if (!expected.reg().is(actual.reg())) {
1372 // Both expected and actual are in (different) registers. This
1373 // is the case when we invoke functions using call and apply.
1374 cmp(expected.reg(), Operand(actual.reg()));
1375 j(equal, &invoke);
1376 ASSERT(actual.reg().is(eax));
1377 ASSERT(expected.reg().is(ebx));
1378 }
1379 }
1380
1381 if (!definitely_matches) {
1382 Handle<Code> adaptor =
1383 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1384 if (!code_constant.is_null()) {
1385 mov(edx, Immediate(code_constant));
1386 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1387 } else if (!code_operand.is_reg(edx)) {
1388 mov(edx, code_operand);
1389 }
1390
1391 if (flag == CALL_FUNCTION) {
1392 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001393 if (post_call_generator != NULL) post_call_generator->Generate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001394 jmp(done);
1395 } else {
1396 jmp(adaptor, RelocInfo::CODE_TARGET);
1397 }
1398 bind(&invoke);
1399 }
1400}
1401
1402
1403void MacroAssembler::InvokeCode(const Operand& code,
1404 const ParameterCount& expected,
1405 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001406 InvokeFlag flag,
1407 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001408 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001409 InvokePrologue(expected, actual, Handle<Code>::null(), code,
1410 &done, flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 if (flag == CALL_FUNCTION) {
1412 call(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001413 if (post_call_generator != NULL) post_call_generator->Generate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001414 } else {
1415 ASSERT(flag == JUMP_FUNCTION);
1416 jmp(code);
1417 }
1418 bind(&done);
1419}
1420
1421
1422void MacroAssembler::InvokeCode(Handle<Code> code,
1423 const ParameterCount& expected,
1424 const ParameterCount& actual,
1425 RelocInfo::Mode rmode,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001426 InvokeFlag flag,
1427 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001428 Label done;
1429 Operand dummy(eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001430 InvokePrologue(expected, actual, code, dummy, &done,
1431 flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001432 if (flag == CALL_FUNCTION) {
1433 call(code, rmode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001434 if (post_call_generator != NULL) post_call_generator->Generate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001435 } else {
1436 ASSERT(flag == JUMP_FUNCTION);
1437 jmp(code, rmode);
1438 }
1439 bind(&done);
1440}
1441
1442
1443void MacroAssembler::InvokeFunction(Register fun,
1444 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001445 InvokeFlag flag,
1446 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001447 ASSERT(fun.is(edi));
1448 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1449 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1450 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001451 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001452
1453 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001454 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdochb0fe1622011-05-05 13:52:32 +01001455 expected, actual, flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001456}
1457
1458
Andrei Popescu402d9372010-02-26 13:31:12 +00001459void MacroAssembler::InvokeFunction(JSFunction* function,
1460 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001461 InvokeFlag flag,
1462 PostCallGenerator* post_call_generator) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001463 ASSERT(function->is_compiled());
1464 // Get the function and setup the context.
1465 mov(edi, Immediate(Handle<JSFunction>(function)));
1466 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001467
Andrei Popescu402d9372010-02-26 13:31:12 +00001468 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001469 if (V8::UseCrankshaft()) {
1470 // TODO(kasperl): For now, we always call indirectly through the
1471 // code field in the function to allow recompilation to take effect
1472 // without changing any of the call sites.
1473 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1474 expected, actual, flag, post_call_generator);
1475 } else {
1476 Handle<Code> code(function->code());
1477 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
1478 flag, post_call_generator);
1479 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001480}
1481
1482
Ben Murdochb0fe1622011-05-05 13:52:32 +01001483void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1484 InvokeFlag flag,
1485 PostCallGenerator* post_call_generator) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001486 // Calls are not allowed in some stubs.
1487 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1488
1489 // Rely on the assertion to check that the number of provided
1490 // arguments match the expected number of arguments. Fake a
1491 // parameter count to avoid emitting code to do the check.
1492 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001493 GetBuiltinFunction(edi, id);
1494 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdochb0fe1622011-05-05 13:52:32 +01001495 expected, expected, flag, post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001496}
1497
Steve Block791712a2010-08-27 10:21:07 +01001498void MacroAssembler::GetBuiltinFunction(Register target,
1499 Builtins::JavaScript id) {
1500 // Load the JavaScript builtin function from the builtins object.
1501 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1502 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1503 mov(target, FieldOperand(target,
1504 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1505}
Steve Blocka7e24c12009-10-30 11:49:00 +00001506
1507void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001508 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001509 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001510 GetBuiltinFunction(edi, id);
1511 // Load the code entry point from the function into the target register.
1512 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001513}
1514
1515
Steve Blockd0582a62009-12-15 09:54:21 +00001516void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1517 if (context_chain_length > 0) {
1518 // Move up the chain of contexts to the context containing the slot.
1519 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1520 // Load the function context (which is the incoming, outer context).
1521 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1522 for (int i = 1; i < context_chain_length; i++) {
1523 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1524 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1525 }
1526 // The context may be an intermediate context, not a function context.
1527 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1528 } else { // Slot is in the current function context.
1529 // The context may be an intermediate context, not a function context.
1530 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1531 }
1532}
1533
1534
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001535void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1536 // Load the global or builtins object from the current context.
1537 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1538 // Load the global context from the global or builtins object.
1539 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1540 // Load the function from the global context.
1541 mov(function, Operand(function, Context::SlotOffset(index)));
1542}
1543
1544
1545void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1546 Register map) {
1547 // Load the initial map. The global functions all have initial maps.
1548 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1549 if (FLAG_debug_code) {
1550 Label ok, fail;
1551 CheckMap(map, Factory::meta_map(), &fail, false);
1552 jmp(&ok);
1553 bind(&fail);
1554 Abort("Global functions must have initial map");
1555 bind(&ok);
1556 }
1557}
1558
Steve Blockd0582a62009-12-15 09:54:21 +00001559
Ben Murdochb0fe1622011-05-05 13:52:32 +01001560int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
1561 // The registers are pushed starting with the lowest encoding,
1562 // which means that lowest encodings are furthest away from
1563 // the stack pointer.
1564 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1565 return kNumSafepointRegisters - reg_code - 1;
1566}
1567
1568
Steve Blocka7e24c12009-10-30 11:49:00 +00001569void MacroAssembler::Ret() {
1570 ret(0);
1571}
1572
1573
Leon Clarkee46be812010-01-19 14:06:41 +00001574void MacroAssembler::Drop(int stack_elements) {
1575 if (stack_elements > 0) {
1576 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1577 }
1578}
1579
1580
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001581void MacroAssembler::Move(Register dst, Register src) {
1582 if (!dst.is(src)) {
1583 mov(dst, src);
1584 }
1585}
1586
1587
Leon Clarkee46be812010-01-19 14:06:41 +00001588void MacroAssembler::Move(Register dst, Handle<Object> value) {
1589 mov(dst, value);
1590}
1591
1592
Steve Blocka7e24c12009-10-30 11:49:00 +00001593void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1594 if (FLAG_native_code_counters && counter->Enabled()) {
1595 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1596 }
1597}
1598
1599
1600void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1601 ASSERT(value > 0);
1602 if (FLAG_native_code_counters && counter->Enabled()) {
1603 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1604 if (value == 1) {
1605 inc(operand);
1606 } else {
1607 add(operand, Immediate(value));
1608 }
1609 }
1610}
1611
1612
1613void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1614 ASSERT(value > 0);
1615 if (FLAG_native_code_counters && counter->Enabled()) {
1616 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1617 if (value == 1) {
1618 dec(operand);
1619 } else {
1620 sub(operand, Immediate(value));
1621 }
1622 }
1623}
1624
1625
Leon Clarked91b9f72010-01-27 17:25:45 +00001626void MacroAssembler::IncrementCounter(Condition cc,
1627 StatsCounter* counter,
1628 int value) {
1629 ASSERT(value > 0);
1630 if (FLAG_native_code_counters && counter->Enabled()) {
1631 Label skip;
1632 j(NegateCondition(cc), &skip);
1633 pushfd();
1634 IncrementCounter(counter, value);
1635 popfd();
1636 bind(&skip);
1637 }
1638}
1639
1640
1641void MacroAssembler::DecrementCounter(Condition cc,
1642 StatsCounter* counter,
1643 int value) {
1644 ASSERT(value > 0);
1645 if (FLAG_native_code_counters && counter->Enabled()) {
1646 Label skip;
1647 j(NegateCondition(cc), &skip);
1648 pushfd();
1649 DecrementCounter(counter, value);
1650 popfd();
1651 bind(&skip);
1652 }
1653}
1654
1655
Steve Blocka7e24c12009-10-30 11:49:00 +00001656void MacroAssembler::Assert(Condition cc, const char* msg) {
1657 if (FLAG_debug_code) Check(cc, msg);
1658}
1659
1660
Iain Merrick75681382010-08-19 15:07:18 +01001661void MacroAssembler::AssertFastElements(Register elements) {
1662 if (FLAG_debug_code) {
1663 Label ok;
1664 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1665 Immediate(Factory::fixed_array_map()));
1666 j(equal, &ok);
1667 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1668 Immediate(Factory::fixed_cow_array_map()));
1669 j(equal, &ok);
1670 Abort("JSObject with fast elements map has slow elements");
1671 bind(&ok);
1672 }
1673}
1674
1675
Steve Blocka7e24c12009-10-30 11:49:00 +00001676void MacroAssembler::Check(Condition cc, const char* msg) {
1677 Label L;
1678 j(cc, &L, taken);
1679 Abort(msg);
1680 // will not return here
1681 bind(&L);
1682}
1683
1684
Steve Block6ded16b2010-05-10 14:33:55 +01001685void MacroAssembler::CheckStackAlignment() {
1686 int frame_alignment = OS::ActivationFrameAlignment();
1687 int frame_alignment_mask = frame_alignment - 1;
1688 if (frame_alignment > kPointerSize) {
1689 ASSERT(IsPowerOf2(frame_alignment));
1690 Label alignment_as_expected;
1691 test(esp, Immediate(frame_alignment_mask));
1692 j(zero, &alignment_as_expected);
1693 // Abort if stack is not aligned.
1694 int3();
1695 bind(&alignment_as_expected);
1696 }
1697}
1698
1699
Steve Blocka7e24c12009-10-30 11:49:00 +00001700void MacroAssembler::Abort(const char* msg) {
1701 // We want to pass the msg string like a smi to avoid GC
1702 // problems, however msg is not guaranteed to be aligned
1703 // properly. Instead, we pass an aligned pointer that is
1704 // a proper v8 smi, but also pass the alignment difference
1705 // from the real pointer as a smi.
1706 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1707 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1708 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1709#ifdef DEBUG
1710 if (msg != NULL) {
1711 RecordComment("Abort message: ");
1712 RecordComment(msg);
1713 }
1714#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001715 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001716 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001717
Steve Blocka7e24c12009-10-30 11:49:00 +00001718 push(eax);
1719 push(Immediate(p0));
1720 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1721 CallRuntime(Runtime::kAbort, 2);
1722 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001723 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001724}
1725
1726
Iain Merrick75681382010-08-19 15:07:18 +01001727void MacroAssembler::JumpIfNotNumber(Register reg,
1728 TypeInfo info,
1729 Label* on_not_number) {
1730 if (FLAG_debug_code) AbortIfSmi(reg);
1731 if (!info.IsNumber()) {
1732 cmp(FieldOperand(reg, HeapObject::kMapOffset),
1733 Factory::heap_number_map());
1734 j(not_equal, on_not_number);
1735 }
1736}
1737
1738
1739void MacroAssembler::ConvertToInt32(Register dst,
1740 Register source,
1741 Register scratch,
1742 TypeInfo info,
1743 Label* on_not_int32) {
1744 if (FLAG_debug_code) {
1745 AbortIfSmi(source);
1746 AbortIfNotNumber(source);
1747 }
1748 if (info.IsInteger32()) {
1749 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1750 } else {
1751 Label done;
1752 bool push_pop = (scratch.is(no_reg) && dst.is(source));
1753 ASSERT(!scratch.is(source));
1754 if (push_pop) {
1755 push(dst);
1756 scratch = dst;
1757 }
1758 if (scratch.is(no_reg)) scratch = dst;
1759 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1760 cmp(scratch, 0x80000000u);
1761 if (push_pop) {
1762 j(not_equal, &done);
1763 pop(dst);
1764 jmp(on_not_int32);
1765 } else {
1766 j(equal, on_not_int32);
1767 }
1768
1769 bind(&done);
1770 if (push_pop) {
1771 add(Operand(esp), Immediate(kPointerSize)); // Pop.
1772 }
1773 if (!scratch.is(dst)) {
1774 mov(dst, scratch);
1775 }
1776 }
1777}
1778
1779
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001780void MacroAssembler::LoadPowerOf2(XMMRegister dst,
1781 Register scratch,
1782 int power) {
1783 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
1784 HeapNumber::kExponentBits));
1785 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
1786 movd(dst, Operand(scratch));
1787 psllq(dst, HeapNumber::kMantissaBits);
1788}
1789
1790
Andrei Popescu402d9372010-02-26 13:31:12 +00001791void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1792 Register instance_type,
1793 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001794 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001795 if (!scratch.is(instance_type)) {
1796 mov(scratch, instance_type);
1797 }
1798 and_(scratch,
1799 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1800 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1801 j(not_equal, failure);
1802}
1803
1804
Leon Clarked91b9f72010-01-27 17:25:45 +00001805void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1806 Register object2,
1807 Register scratch1,
1808 Register scratch2,
1809 Label* failure) {
1810 // Check that both objects are not smis.
1811 ASSERT_EQ(0, kSmiTag);
1812 mov(scratch1, Operand(object1));
1813 and_(scratch1, Operand(object2));
1814 test(scratch1, Immediate(kSmiTagMask));
1815 j(zero, failure);
1816
1817 // Load instance type for both strings.
1818 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1819 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1820 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1821 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1822
1823 // Check that both are flat ascii strings.
1824 const int kFlatAsciiStringMask =
1825 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1826 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1827 // Interleave bits from both instance types and compare them in one check.
1828 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1829 and_(scratch1, kFlatAsciiStringMask);
1830 and_(scratch2, kFlatAsciiStringMask);
1831 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1832 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1833 j(not_equal, failure);
1834}
1835
1836
Steve Block6ded16b2010-05-10 14:33:55 +01001837void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1838 int frameAlignment = OS::ActivationFrameAlignment();
1839 if (frameAlignment != 0) {
1840 // Make stack end at alignment and make room for num_arguments words
1841 // and the original value of esp.
1842 mov(scratch, esp);
1843 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1844 ASSERT(IsPowerOf2(frameAlignment));
1845 and_(esp, -frameAlignment);
1846 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1847 } else {
1848 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1849 }
1850}
1851
1852
1853void MacroAssembler::CallCFunction(ExternalReference function,
1854 int num_arguments) {
1855 // Trashing eax is ok as it will be the return value.
1856 mov(Operand(eax), Immediate(function));
1857 CallCFunction(eax, num_arguments);
1858}
1859
1860
1861void MacroAssembler::CallCFunction(Register function,
1862 int num_arguments) {
1863 // Check stack alignment.
1864 if (FLAG_debug_code) {
1865 CheckStackAlignment();
1866 }
1867
1868 call(Operand(function));
1869 if (OS::ActivationFrameAlignment() != 0) {
1870 mov(esp, Operand(esp, num_arguments * kPointerSize));
1871 } else {
1872 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1873 }
1874}
1875
1876
Steve Blocka7e24c12009-10-30 11:49:00 +00001877CodePatcher::CodePatcher(byte* address, int size)
1878 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1879 // Create a new macro assembler pointing to the address of the code to patch.
1880 // The size is adjusted with kGap on order for the assembler to generate size
1881 // bytes of instructions without failing with buffer size constraints.
1882 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1883}
1884
1885
1886CodePatcher::~CodePatcher() {
1887 // Indicate that code has changed.
1888 CPU::FlushICache(address_, size_);
1889
1890 // Check that the code was patched as expected.
1891 ASSERT(masm_.pc_ == address_ + size_);
1892 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1893}
1894
1895
1896} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001897
1898#endif // V8_TARGET_ARCH_IA32