blob: cbf93dd6a187264bc5a18f7669150e010a7401b3 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100101void MacroAssembler::RecordWrite(Register object,
102 int offset,
103 Register value,
104 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000105 // The compiled code assumes that record write doesn't change the
106 // context register, so we check that none of the clobbered
107 // registers are esi.
108 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
109
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100110 // First, check if a write barrier is even needed. The tests below
111 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 Label done;
113
114 // Skip barrier if writing a smi.
115 ASSERT_EQ(0, kSmiTag);
116 test(value, Immediate(kSmiTagMask));
117 j(zero, &done);
118
Steve Block6ded16b2010-05-10 14:33:55 +0100119 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000120
Steve Block6ded16b2010-05-10 14:33:55 +0100121 // The offset is relative to a tagged or untagged HeapObject pointer,
122 // so either offset or offset + kHeapObjectTag must be a
123 // multiple of kPointerSize.
124 ASSERT(IsAligned(offset, kPointerSize) ||
125 IsAligned(offset + kHeapObjectTag, kPointerSize));
126
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100127 Register dst = scratch;
128 if (offset != 0) {
129 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100131 // Array access: calculate the destination address in the same manner as
132 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
133 // into an array of words.
134 ASSERT_EQ(1, kSmiTagSize);
135 ASSERT_EQ(0, kSmiTag);
136 lea(dst, Operand(object, dst, times_half_pointer_size,
137 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000140
141 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000142
143 // Clobber all input registers when running with the debug-code flag
144 // turned on to provoke errors.
145 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100146 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
147 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
148 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000149 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block8defd9f2010-07-08 12:39:36 +0100153void MacroAssembler::RecordWrite(Register object,
154 Register address,
155 Register value) {
156 // The compiled code assumes that record write doesn't change the
157 // context register, so we check that none of the clobbered
158 // registers are esi.
159 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
160
161 // First, check if a write barrier is even needed. The tests below
162 // catch stores of Smis and stores into young gen.
163 Label done;
164
165 // Skip barrier if writing a smi.
166 ASSERT_EQ(0, kSmiTag);
167 test(value, Immediate(kSmiTagMask));
168 j(zero, &done);
169
170 InNewSpace(object, value, equal, &done);
171
172 RecordWriteHelper(object, address, value);
173
174 bind(&done);
175
176 // Clobber all input registers when running with the debug-code flag
177 // turned on to provoke errors.
178 if (FLAG_debug_code) {
179 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
180 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
181 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
182 }
183}
184
185
Steve Blockd0582a62009-12-15 09:54:21 +0000186void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
187 cmp(esp,
188 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
189 j(below, on_stack_overflow);
190}
191
192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000194void MacroAssembler::DebugBreak() {
195 Set(eax, Immediate(0));
196 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
197 CEntryStub ces(1);
198 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
199}
Steve Blocka7e24c12009-10-30 11:49:00 +0000200#endif
201
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100202
Steve Blocka7e24c12009-10-30 11:49:00 +0000203void MacroAssembler::Set(Register dst, const Immediate& x) {
204 if (x.is_zero()) {
205 xor_(dst, Operand(dst)); // shorter than mov
206 } else {
207 mov(dst, x);
208 }
209}
210
211
212void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
213 mov(dst, x);
214}
215
216
217void MacroAssembler::CmpObjectType(Register heap_object,
218 InstanceType type,
219 Register map) {
220 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
221 CmpInstanceType(map, type);
222}
223
224
225void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
226 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
227 static_cast<int8_t>(type));
228}
229
230
Andrei Popescu31002712010-02-23 13:46:05 +0000231void MacroAssembler::CheckMap(Register obj,
232 Handle<Map> map,
233 Label* fail,
234 bool is_heap_object) {
235 if (!is_heap_object) {
236 test(obj, Immediate(kSmiTagMask));
237 j(zero, fail);
238 }
239 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
240 j(not_equal, fail);
241}
242
243
Leon Clarkee46be812010-01-19 14:06:41 +0000244Condition MacroAssembler::IsObjectStringType(Register heap_object,
245 Register map,
246 Register instance_type) {
247 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
248 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
249 ASSERT(kNotStringTag != 0);
250 test(instance_type, Immediate(kIsNotStringMask));
251 return zero;
252}
253
254
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100255void MacroAssembler::IsObjectJSObjectType(Register heap_object,
256 Register map,
257 Register scratch,
258 Label* fail) {
259 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
260 IsInstanceJSObjectType(map, scratch, fail);
261}
262
263
264void MacroAssembler::IsInstanceJSObjectType(Register map,
265 Register scratch,
266 Label* fail) {
267 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
268 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
269 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
270 j(above, fail);
271}
272
273
Steve Blocka7e24c12009-10-30 11:49:00 +0000274void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000275 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000276 fucomip();
277 ffree(0);
278 fincstp();
279 } else {
280 fucompp();
281 push(eax);
282 fnstsw_ax();
283 sahf();
284 pop(eax);
285 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000286}
287
288
Steve Block6ded16b2010-05-10 14:33:55 +0100289void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000290 Label ok;
291 test(object, Immediate(kSmiTagMask));
292 j(zero, &ok);
293 cmp(FieldOperand(object, HeapObject::kMapOffset),
294 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100295 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000296 bind(&ok);
297}
298
299
Steve Block6ded16b2010-05-10 14:33:55 +0100300void MacroAssembler::AbortIfNotSmi(Register object) {
301 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100302 Assert(equal, "Operand is not a smi");
303}
304
305
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100306void MacroAssembler::AbortIfNotString(Register object) {
307 test(object, Immediate(kSmiTagMask));
308 Assert(not_equal, "Operand is not a string");
309 push(object);
310 mov(object, FieldOperand(object, HeapObject::kMapOffset));
311 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
312 pop(object);
313 Assert(below, "Operand is not a string");
314}
315
316
Iain Merrick75681382010-08-19 15:07:18 +0100317void MacroAssembler::AbortIfSmi(Register object) {
318 test(object, Immediate(kSmiTagMask));
319 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100320}
321
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323void MacroAssembler::EnterFrame(StackFrame::Type type) {
324 push(ebp);
325 mov(ebp, Operand(esp));
326 push(esi);
327 push(Immediate(Smi::FromInt(type)));
328 push(Immediate(CodeObject()));
329 if (FLAG_debug_code) {
330 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
331 Check(not_equal, "code object not properly patched");
332 }
333}
334
335
336void MacroAssembler::LeaveFrame(StackFrame::Type type) {
337 if (FLAG_debug_code) {
338 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
339 Immediate(Smi::FromInt(type)));
340 Check(equal, "stack frame types must match");
341 }
342 leave();
343}
344
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100345
346void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000347 // Setup the frame structure on the stack.
348 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
349 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
350 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
351 push(ebp);
352 mov(ebp, Operand(esp));
353
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100354 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000356 push(Immediate(0)); // Saved entry sp, patched before call.
357 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000358
359 // Save the frame pointer and the context in top.
360 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
361 ExternalReference context_address(Top::k_context_address);
362 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
363 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000364}
Steve Blocka7e24c12009-10-30 11:49:00 +0000365
Steve Blocka7e24c12009-10-30 11:49:00 +0000366
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100367void MacroAssembler::EnterExitFrameEpilogue(int argc) {
Steve Blockd0582a62009-12-15 09:54:21 +0000368 // Reserve space for arguments.
369 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000370
371 // Get the required frame alignment for the OS.
372 static const int kFrameAlignment = OS::ActivationFrameAlignment();
373 if (kFrameAlignment > 0) {
374 ASSERT(IsPowerOf2(kFrameAlignment));
375 and_(esp, -kFrameAlignment);
376 }
377
378 // Patch the saved entry sp.
379 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
380}
381
382
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100383void MacroAssembler::EnterExitFrame() {
384 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000385
386 // Setup argc and argv in callee-saved registers.
387 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
388 mov(edi, Operand(eax));
389 lea(esi, Operand(ebp, eax, times_4, offset));
390
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100391 EnterExitFrameEpilogue(2);
Steve Blockd0582a62009-12-15 09:54:21 +0000392}
393
394
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800395void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100396 EnterExitFramePrologue();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100397 EnterExitFrameEpilogue(argc);
Steve Blockd0582a62009-12-15 09:54:21 +0000398}
399
400
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100401void MacroAssembler::LeaveExitFrame() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000402 // Get the return address from the stack and restore the frame pointer.
403 mov(ecx, Operand(ebp, 1 * kPointerSize));
404 mov(ebp, Operand(ebp, 0 * kPointerSize));
405
406 // Pop the arguments and the receiver from the caller stack.
407 lea(esp, Operand(esi, 1 * kPointerSize));
408
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800409 // Push the return address to get ready to return.
410 push(ecx);
411
412 LeaveExitFrameEpilogue();
413}
414
415void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000416 // Restore current context from top and clear it in debug mode.
417 ExternalReference context_address(Top::k_context_address);
418 mov(esi, Operand::StaticVariable(context_address));
419#ifdef DEBUG
420 mov(Operand::StaticVariable(context_address), Immediate(0));
421#endif
422
Steve Blocka7e24c12009-10-30 11:49:00 +0000423 // Clear the top frame.
424 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
425 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
426}
427
428
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800429void MacroAssembler::LeaveApiExitFrame() {
430 mov(esp, Operand(ebp));
431 pop(ebp);
432
433 LeaveExitFrameEpilogue();
434}
435
436
Steve Blocka7e24c12009-10-30 11:49:00 +0000437void MacroAssembler::PushTryHandler(CodeLocation try_location,
438 HandlerType type) {
439 // Adjust this code if not the case.
440 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
441 // The pc (return address) is already on TOS.
442 if (try_location == IN_JAVASCRIPT) {
443 if (type == TRY_CATCH_HANDLER) {
444 push(Immediate(StackHandler::TRY_CATCH));
445 } else {
446 push(Immediate(StackHandler::TRY_FINALLY));
447 }
448 push(ebp);
449 } else {
450 ASSERT(try_location == IN_JS_ENTRY);
451 // The frame pointer does not point to a JS frame so we save NULL
452 // for ebp. We expect the code throwing an exception to check ebp
453 // before dereferencing it to restore the context.
454 push(Immediate(StackHandler::ENTRY));
455 push(Immediate(0)); // NULL frame pointer.
456 }
457 // Save the current handler as the next handler.
458 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
459 // Link this handler as the new current one.
460 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
461}
462
463
Leon Clarkee46be812010-01-19 14:06:41 +0000464void MacroAssembler::PopTryHandler() {
465 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
466 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
467 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
468}
469
470
Steve Blocka7e24c12009-10-30 11:49:00 +0000471void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
472 Register scratch,
473 Label* miss) {
474 Label same_contexts;
475
476 ASSERT(!holder_reg.is(scratch));
477
478 // Load current lexical context from the stack frame.
479 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
480
481 // When generating debug code, make sure the lexical context is set.
482 if (FLAG_debug_code) {
483 cmp(Operand(scratch), Immediate(0));
484 Check(not_equal, "we should not have an empty lexical context");
485 }
486 // Load the global context of the current context.
487 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
488 mov(scratch, FieldOperand(scratch, offset));
489 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
490
491 // Check the context is a global context.
492 if (FLAG_debug_code) {
493 push(scratch);
494 // Read the first word and compare to global_context_map.
495 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
496 cmp(scratch, Factory::global_context_map());
497 Check(equal, "JSGlobalObject::global_context should be a global context.");
498 pop(scratch);
499 }
500
501 // Check if both contexts are the same.
502 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
503 j(equal, &same_contexts, taken);
504
505 // Compare security tokens, save holder_reg on the stack so we can use it
506 // as a temporary register.
507 //
508 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
509 push(holder_reg);
510 // Check that the security token in the calling global object is
511 // compatible with the security token in the receiving global
512 // object.
513 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
514
515 // Check the context is a global context.
516 if (FLAG_debug_code) {
517 cmp(holder_reg, Factory::null_value());
518 Check(not_equal, "JSGlobalProxy::context() should not be null.");
519
520 push(holder_reg);
521 // Read the first word and compare to global_context_map(),
522 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
523 cmp(holder_reg, Factory::global_context_map());
524 Check(equal, "JSGlobalObject::global_context should be a global context.");
525 pop(holder_reg);
526 }
527
528 int token_offset = Context::kHeaderSize +
529 Context::SECURITY_TOKEN_INDEX * kPointerSize;
530 mov(scratch, FieldOperand(scratch, token_offset));
531 cmp(scratch, FieldOperand(holder_reg, token_offset));
532 pop(holder_reg);
533 j(not_equal, miss, not_taken);
534
535 bind(&same_contexts);
536}
537
538
539void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000540 Register scratch,
541 AllocationFlags flags) {
542 ExternalReference new_space_allocation_top =
543 ExternalReference::new_space_allocation_top_address();
544
545 // Just return if allocation top is already known.
546 if ((flags & RESULT_CONTAINS_TOP) != 0) {
547 // No use of scratch if allocation top is provided.
548 ASSERT(scratch.is(no_reg));
549#ifdef DEBUG
550 // Assert that result actually contains top on entry.
551 cmp(result, Operand::StaticVariable(new_space_allocation_top));
552 Check(equal, "Unexpected allocation top");
553#endif
554 return;
555 }
556
557 // Move address of new object to result. Use scratch register if available.
558 if (scratch.is(no_reg)) {
559 mov(result, Operand::StaticVariable(new_space_allocation_top));
560 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +0000561 mov(Operand(scratch), Immediate(new_space_allocation_top));
562 mov(result, Operand(scratch, 0));
563 }
564}
565
566
567void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
568 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000569 if (FLAG_debug_code) {
570 test(result_end, Immediate(kObjectAlignmentMask));
571 Check(zero, "Unaligned allocation in new space");
572 }
573
Steve Blocka7e24c12009-10-30 11:49:00 +0000574 ExternalReference new_space_allocation_top =
575 ExternalReference::new_space_allocation_top_address();
576
577 // Update new top. Use scratch if available.
578 if (scratch.is(no_reg)) {
579 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
580 } else {
581 mov(Operand(scratch, 0), result_end);
582 }
583}
584
585
586void MacroAssembler::AllocateInNewSpace(int object_size,
587 Register result,
588 Register result_end,
589 Register scratch,
590 Label* gc_required,
591 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700592 if (!FLAG_inline_new) {
593 if (FLAG_debug_code) {
594 // Trash the registers to simulate an allocation failure.
595 mov(result, Immediate(0x7091));
596 if (result_end.is_valid()) {
597 mov(result_end, Immediate(0x7191));
598 }
599 if (scratch.is_valid()) {
600 mov(scratch, Immediate(0x7291));
601 }
602 }
603 jmp(gc_required);
604 return;
605 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000606 ASSERT(!result.is(result_end));
607
608 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800609 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000610
Ben Murdochbb769b22010-08-11 14:56:33 +0100611 Register top_reg = result_end.is_valid() ? result_end : result;
612
Steve Blocka7e24c12009-10-30 11:49:00 +0000613 // Calculate new top and bail out if new space is exhausted.
614 ExternalReference new_space_allocation_limit =
615 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100616
617 if (top_reg.is(result)) {
618 add(Operand(top_reg), Immediate(object_size));
619 } else {
620 lea(top_reg, Operand(result, object_size));
621 }
622 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000623 j(above, gc_required, not_taken);
624
Leon Clarkee46be812010-01-19 14:06:41 +0000625 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100626 UpdateAllocationTopHelper(top_reg, scratch);
627
628 // Tag result if requested.
629 if (top_reg.is(result)) {
630 if ((flags & TAG_OBJECT) != 0) {
631 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
632 } else {
633 sub(Operand(result), Immediate(object_size));
634 }
635 } else if ((flags & TAG_OBJECT) != 0) {
636 add(Operand(result), Immediate(kHeapObjectTag));
637 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000638}
639
640
641void MacroAssembler::AllocateInNewSpace(int header_size,
642 ScaleFactor element_size,
643 Register element_count,
644 Register result,
645 Register result_end,
646 Register scratch,
647 Label* gc_required,
648 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700649 if (!FLAG_inline_new) {
650 if (FLAG_debug_code) {
651 // Trash the registers to simulate an allocation failure.
652 mov(result, Immediate(0x7091));
653 mov(result_end, Immediate(0x7191));
654 if (scratch.is_valid()) {
655 mov(scratch, Immediate(0x7291));
656 }
657 // Register element_count is not modified by the function.
658 }
659 jmp(gc_required);
660 return;
661 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000662 ASSERT(!result.is(result_end));
663
664 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800665 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000666
667 // Calculate new top and bail out if new space is exhausted.
668 ExternalReference new_space_allocation_limit =
669 ExternalReference::new_space_allocation_limit_address();
670 lea(result_end, Operand(result, element_count, element_size, header_size));
671 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
672 j(above, gc_required);
673
Steve Blocka7e24c12009-10-30 11:49:00 +0000674 // Tag result if requested.
675 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000676 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000677 }
Leon Clarkee46be812010-01-19 14:06:41 +0000678
679 // Update allocation top.
680 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000681}
682
683
684void MacroAssembler::AllocateInNewSpace(Register object_size,
685 Register result,
686 Register result_end,
687 Register scratch,
688 Label* gc_required,
689 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700690 if (!FLAG_inline_new) {
691 if (FLAG_debug_code) {
692 // Trash the registers to simulate an allocation failure.
693 mov(result, Immediate(0x7091));
694 mov(result_end, Immediate(0x7191));
695 if (scratch.is_valid()) {
696 mov(scratch, Immediate(0x7291));
697 }
698 // object_size is left unchanged by this function.
699 }
700 jmp(gc_required);
701 return;
702 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000703 ASSERT(!result.is(result_end));
704
705 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800706 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000707
708 // Calculate new top and bail out if new space is exhausted.
709 ExternalReference new_space_allocation_limit =
710 ExternalReference::new_space_allocation_limit_address();
711 if (!object_size.is(result_end)) {
712 mov(result_end, object_size);
713 }
714 add(result_end, Operand(result));
715 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
716 j(above, gc_required, not_taken);
717
Steve Blocka7e24c12009-10-30 11:49:00 +0000718 // Tag result if requested.
719 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000720 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 }
Leon Clarkee46be812010-01-19 14:06:41 +0000722
723 // Update allocation top.
724 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000725}
726
727
728void MacroAssembler::UndoAllocationInNewSpace(Register object) {
729 ExternalReference new_space_allocation_top =
730 ExternalReference::new_space_allocation_top_address();
731
732 // Make sure the object has no tag before resetting top.
733 and_(Operand(object), Immediate(~kHeapObjectTagMask));
734#ifdef DEBUG
735 cmp(object, Operand::StaticVariable(new_space_allocation_top));
736 Check(below, "Undo allocation of non allocated memory");
737#endif
738 mov(Operand::StaticVariable(new_space_allocation_top), object);
739}
740
741
Steve Block3ce2e202009-11-05 08:53:23 +0000742void MacroAssembler::AllocateHeapNumber(Register result,
743 Register scratch1,
744 Register scratch2,
745 Label* gc_required) {
746 // Allocate heap number in new space.
747 AllocateInNewSpace(HeapNumber::kSize,
748 result,
749 scratch1,
750 scratch2,
751 gc_required,
752 TAG_OBJECT);
753
754 // Set the map.
755 mov(FieldOperand(result, HeapObject::kMapOffset),
756 Immediate(Factory::heap_number_map()));
757}
758
759
Steve Blockd0582a62009-12-15 09:54:21 +0000760void MacroAssembler::AllocateTwoByteString(Register result,
761 Register length,
762 Register scratch1,
763 Register scratch2,
764 Register scratch3,
765 Label* gc_required) {
766 // Calculate the number of bytes needed for the characters in the string while
767 // observing object alignment.
768 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000769 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000770 // scratch1 = length * 2 + kObjectAlignmentMask.
771 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000772 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
773
774 // Allocate two byte string in new space.
775 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
776 times_1,
777 scratch1,
778 result,
779 scratch2,
780 scratch3,
781 gc_required,
782 TAG_OBJECT);
783
784 // Set the map, length and hash field.
785 mov(FieldOperand(result, HeapObject::kMapOffset),
786 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100787 mov(scratch1, length);
788 SmiTag(scratch1);
789 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000790 mov(FieldOperand(result, String::kHashFieldOffset),
791 Immediate(String::kEmptyHashField));
792}
793
794
795void MacroAssembler::AllocateAsciiString(Register result,
796 Register length,
797 Register scratch1,
798 Register scratch2,
799 Register scratch3,
800 Label* gc_required) {
801 // Calculate the number of bytes needed for the characters in the string while
802 // observing object alignment.
803 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
804 mov(scratch1, length);
805 ASSERT(kCharSize == 1);
806 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
807 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
808
809 // Allocate ascii string in new space.
810 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
811 times_1,
812 scratch1,
813 result,
814 scratch2,
815 scratch3,
816 gc_required,
817 TAG_OBJECT);
818
819 // Set the map, length and hash field.
820 mov(FieldOperand(result, HeapObject::kMapOffset),
821 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100822 mov(scratch1, length);
823 SmiTag(scratch1);
824 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000825 mov(FieldOperand(result, String::kHashFieldOffset),
826 Immediate(String::kEmptyHashField));
827}
828
829
Iain Merrick9ac36c92010-09-13 15:29:50 +0100830void MacroAssembler::AllocateAsciiString(Register result,
831 int length,
832 Register scratch1,
833 Register scratch2,
834 Label* gc_required) {
835 ASSERT(length > 0);
836
837 // Allocate ascii string in new space.
838 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
839 result,
840 scratch1,
841 scratch2,
842 gc_required,
843 TAG_OBJECT);
844
845 // Set the map, length and hash field.
846 mov(FieldOperand(result, HeapObject::kMapOffset),
847 Immediate(Factory::ascii_string_map()));
848 mov(FieldOperand(result, String::kLengthOffset),
849 Immediate(Smi::FromInt(length)));
850 mov(FieldOperand(result, String::kHashFieldOffset),
851 Immediate(String::kEmptyHashField));
852}
853
854
Steve Blockd0582a62009-12-15 09:54:21 +0000855void MacroAssembler::AllocateConsString(Register result,
856 Register scratch1,
857 Register scratch2,
858 Label* gc_required) {
859 // Allocate heap number in new space.
860 AllocateInNewSpace(ConsString::kSize,
861 result,
862 scratch1,
863 scratch2,
864 gc_required,
865 TAG_OBJECT);
866
867 // Set the map. The other fields are left uninitialized.
868 mov(FieldOperand(result, HeapObject::kMapOffset),
869 Immediate(Factory::cons_string_map()));
870}
871
872
873void MacroAssembler::AllocateAsciiConsString(Register result,
874 Register scratch1,
875 Register scratch2,
876 Label* gc_required) {
877 // Allocate heap number in new space.
878 AllocateInNewSpace(ConsString::kSize,
879 result,
880 scratch1,
881 scratch2,
882 gc_required,
883 TAG_OBJECT);
884
885 // Set the map. The other fields are left uninitialized.
886 mov(FieldOperand(result, HeapObject::kMapOffset),
887 Immediate(Factory::cons_ascii_string_map()));
888}
889
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800890// All registers must be distinct. Only current_string needs valid contents
891// on entry. All registers may be invalid on exit. result_operand is
892// unchanged, padding_chars is updated correctly.
893void MacroAssembler::AppendStringToTopOfNewSpace(
894 Register current_string, // Tagged pointer to string to copy.
895 Register current_string_length,
896 Register result_pos,
897 Register scratch,
898 Register new_padding_chars,
899 Operand operand_result,
900 Operand operand_padding_chars,
901 Label* bailout) {
902 mov(current_string_length,
903 FieldOperand(current_string, String::kLengthOffset));
904 shr(current_string_length, 1);
905 sub(current_string_length, operand_padding_chars);
906 mov(new_padding_chars, current_string_length);
907 add(Operand(current_string_length), Immediate(kObjectAlignmentMask));
908 and_(Operand(current_string_length), Immediate(~kObjectAlignmentMask));
909 sub(new_padding_chars, Operand(current_string_length));
910 neg(new_padding_chars);
911 // We need an allocation even if current_string_length is 0, to fetch
912 // result_pos. Consider using a faster fetch of result_pos in that case.
913 AllocateInNewSpace(current_string_length, result_pos, scratch, no_reg,
914 bailout, NO_ALLOCATION_FLAGS);
915 sub(result_pos, operand_padding_chars);
916 mov(operand_padding_chars, new_padding_chars);
917
918 Register scratch_2 = new_padding_chars; // Used to compute total length.
919 // Copy string to the end of result.
920 mov(current_string_length,
921 FieldOperand(current_string, String::kLengthOffset));
922 mov(scratch, operand_result);
923 mov(scratch_2, current_string_length);
924 add(scratch_2, FieldOperand(scratch, String::kLengthOffset));
925 mov(FieldOperand(scratch, String::kLengthOffset), scratch_2);
926 shr(current_string_length, 1);
927 lea(current_string,
928 FieldOperand(current_string, SeqAsciiString::kHeaderSize));
929 // Loop condition: while (--current_string_length >= 0).
930 Label copy_loop;
931 Label copy_loop_entry;
932 jmp(&copy_loop_entry);
933 bind(&copy_loop);
934 mov_b(scratch, Operand(current_string, current_string_length, times_1, 0));
935 mov_b(Operand(result_pos, current_string_length, times_1, 0), scratch);
936 bind(&copy_loop_entry);
937 sub(Operand(current_string_length), Immediate(1));
938 j(greater_equal, &copy_loop);
939}
940
Steve Blockd0582a62009-12-15 09:54:21 +0000941
Steve Blocka7e24c12009-10-30 11:49:00 +0000942void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
943 Register result,
944 Register op,
945 JumpTarget* then_target) {
946 JumpTarget ok;
947 test(result, Operand(result));
948 ok.Branch(not_zero, taken);
949 test(op, Operand(op));
950 then_target->Branch(sign, not_taken);
951 ok.Bind();
952}
953
954
955void MacroAssembler::NegativeZeroTest(Register result,
956 Register op,
957 Label* then_label) {
958 Label ok;
959 test(result, Operand(result));
960 j(not_zero, &ok, taken);
961 test(op, Operand(op));
962 j(sign, then_label, not_taken);
963 bind(&ok);
964}
965
966
967void MacroAssembler::NegativeZeroTest(Register result,
968 Register op1,
969 Register op2,
970 Register scratch,
971 Label* then_label) {
972 Label ok;
973 test(result, Operand(result));
974 j(not_zero, &ok, taken);
975 mov(scratch, Operand(op1));
976 or_(scratch, Operand(op2));
977 j(sign, then_label, not_taken);
978 bind(&ok);
979}
980
981
982void MacroAssembler::TryGetFunctionPrototype(Register function,
983 Register result,
984 Register scratch,
985 Label* miss) {
986 // Check that the receiver isn't a smi.
987 test(function, Immediate(kSmiTagMask));
988 j(zero, miss, not_taken);
989
990 // Check that the function really is a function.
991 CmpObjectType(function, JS_FUNCTION_TYPE, result);
992 j(not_equal, miss, not_taken);
993
994 // Make sure that the function has an instance prototype.
995 Label non_instance;
996 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
997 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
998 j(not_zero, &non_instance, not_taken);
999
1000 // Get the prototype or initial map from the function.
1001 mov(result,
1002 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1003
1004 // If the prototype or initial map is the hole, don't return it and
1005 // simply miss the cache instead. This will allow us to allocate a
1006 // prototype object on-demand in the runtime system.
1007 cmp(Operand(result), Immediate(Factory::the_hole_value()));
1008 j(equal, miss, not_taken);
1009
1010 // If the function does not have an initial map, we're done.
1011 Label done;
1012 CmpObjectType(result, MAP_TYPE, scratch);
1013 j(not_equal, &done);
1014
1015 // Get the prototype from the initial map.
1016 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1017 jmp(&done);
1018
1019 // Non-instance prototype: Fetch prototype from constructor field
1020 // in initial map.
1021 bind(&non_instance);
1022 mov(result, FieldOperand(result, Map::kConstructorOffset));
1023
1024 // All done.
1025 bind(&done);
1026}
1027
1028
1029void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001030 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +00001031 call(stub->GetCode(), RelocInfo::CODE_TARGET);
1032}
1033
1034
John Reck59135872010-11-02 12:39:01 -07001035MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001036 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001037 Object* result;
1038 { MaybeObject* maybe_result = stub->TryGetCode();
1039 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001040 }
John Reck59135872010-11-02 12:39:01 -07001041 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001042 return result;
1043}
1044
1045
Steve Blockd0582a62009-12-15 09:54:21 +00001046void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001047 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001048 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1049}
1050
1051
John Reck59135872010-11-02 12:39:01 -07001052MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001053 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001054 Object* result;
1055 { MaybeObject* maybe_result = stub->TryGetCode();
1056 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001057 }
John Reck59135872010-11-02 12:39:01 -07001058 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001059 return result;
1060}
1061
1062
Steve Blocka7e24c12009-10-30 11:49:00 +00001063void MacroAssembler::StubReturn(int argc) {
1064 ASSERT(argc >= 1 && generating_stub());
1065 ret((argc - 1) * kPointerSize);
1066}
1067
1068
1069void MacroAssembler::IllegalOperation(int num_arguments) {
1070 if (num_arguments > 0) {
1071 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1072 }
1073 mov(eax, Immediate(Factory::undefined_value()));
1074}
1075
1076
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001077void MacroAssembler::IndexFromHash(Register hash, Register index) {
1078 // The assert checks that the constants for the maximum number of digits
1079 // for an array index cached in the hash field and the number of bits
1080 // reserved for it does not conflict.
1081 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1082 (1 << String::kArrayIndexValueBits));
1083 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1084 // the low kHashShift bits.
1085 and_(hash, String::kArrayIndexValueMask);
1086 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1087 if (String::kHashShift > kSmiTagSize) {
1088 shr(hash, String::kHashShift - kSmiTagSize);
1089 }
1090 if (!index.is(hash)) {
1091 mov(index, hash);
1092 }
1093}
1094
1095
Steve Blocka7e24c12009-10-30 11:49:00 +00001096void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1097 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1098}
1099
1100
John Reck59135872010-11-02 12:39:01 -07001101MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1102 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001103 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1104}
1105
1106
Steve Blocka7e24c12009-10-30 11:49:00 +00001107void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1108 // If the expected number of arguments of the runtime function is
1109 // constant, we check that the actual number of arguments match the
1110 // expectation.
1111 if (f->nargs >= 0 && f->nargs != num_arguments) {
1112 IllegalOperation(num_arguments);
1113 return;
1114 }
1115
Leon Clarke4515c472010-02-03 11:58:03 +00001116 // TODO(1236192): Most runtime routines don't need the number of
1117 // arguments passed in because it is constant. At some point we
1118 // should remove this need and make the runtime routine entry code
1119 // smarter.
1120 Set(eax, Immediate(num_arguments));
1121 mov(ebx, Immediate(ExternalReference(f)));
1122 CEntryStub ces(1);
1123 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001124}
1125
1126
John Reck59135872010-11-02 12:39:01 -07001127MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1128 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001129 if (f->nargs >= 0 && f->nargs != num_arguments) {
1130 IllegalOperation(num_arguments);
1131 // Since we did not call the stub, there was no allocation failure.
1132 // Return some non-failure object.
1133 return Heap::undefined_value();
1134 }
1135
Leon Clarke4515c472010-02-03 11:58:03 +00001136 // TODO(1236192): Most runtime routines don't need the number of
1137 // arguments passed in because it is constant. At some point we
1138 // should remove this need and make the runtime routine entry code
1139 // smarter.
1140 Set(eax, Immediate(num_arguments));
1141 mov(ebx, Immediate(ExternalReference(f)));
1142 CEntryStub ces(1);
1143 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001144}
1145
1146
Ben Murdochbb769b22010-08-11 14:56:33 +01001147void MacroAssembler::CallExternalReference(ExternalReference ref,
1148 int num_arguments) {
1149 mov(eax, Immediate(num_arguments));
1150 mov(ebx, Immediate(ref));
1151
1152 CEntryStub stub(1);
1153 CallStub(&stub);
1154}
1155
1156
Steve Block6ded16b2010-05-10 14:33:55 +01001157void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1158 int num_arguments,
1159 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001160 // TODO(1236192): Most runtime routines don't need the number of
1161 // arguments passed in because it is constant. At some point we
1162 // should remove this need and make the runtime routine entry code
1163 // smarter.
1164 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001165 JumpToExternalReference(ext);
1166}
1167
1168
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001169MaybeObject* MacroAssembler::TryTailCallExternalReference(
1170 const ExternalReference& ext, int num_arguments, int result_size) {
1171 // TODO(1236192): Most runtime routines don't need the number of
1172 // arguments passed in because it is constant. At some point we
1173 // should remove this need and make the runtime routine entry code
1174 // smarter.
1175 Set(eax, Immediate(num_arguments));
1176 return TryJumpToExternalReference(ext);
1177}
1178
1179
Steve Block6ded16b2010-05-10 14:33:55 +01001180void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1181 int num_arguments,
1182 int result_size) {
1183 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001184}
1185
1186
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001187MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
1188 int num_arguments,
1189 int result_size) {
1190 return TryTailCallExternalReference(
1191 ExternalReference(fid), num_arguments, result_size);
1192}
1193
1194
John Reck59135872010-11-02 12:39:01 -07001195// If true, a Handle<T> passed by value is passed and returned by
1196// using the location_ field directly. If false, it is passed and
1197// returned as a pointer to a handle.
1198#ifdef USING_BSD_ABI
1199static const bool kPassHandlesDirectly = true;
1200#else
1201static const bool kPassHandlesDirectly = false;
1202#endif
1203
1204
1205Operand ApiParameterOperand(int index) {
1206 return Operand(esp, (index + (kPassHandlesDirectly ? 0 : 1)) * kPointerSize);
1207}
1208
1209
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001210void MacroAssembler::PrepareCallApiFunction(int argc, Register scratch) {
John Reck59135872010-11-02 12:39:01 -07001211 if (kPassHandlesDirectly) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001212 EnterApiExitFrame(argc);
John Reck59135872010-11-02 12:39:01 -07001213 // When handles as passed directly we don't have to allocate extra
1214 // space for and pass an out parameter.
1215 } else {
1216 // We allocate two additional slots: return value and pointer to it.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001217 EnterApiExitFrame(argc + 2);
John Reck59135872010-11-02 12:39:01 -07001218
John Reck59135872010-11-02 12:39:01 -07001219 // The argument slots are filled as follows:
1220 //
1221 // n + 1: output cell
1222 // n: arg n
1223 // ...
1224 // 1: arg1
1225 // 0: pointer to the output cell
1226 //
1227 // Note that this is one more "argument" than the function expects
1228 // so the out cell will have to be popped explicitly after returning
1229 // from the function. The out cell contains Handle.
John Reck59135872010-11-02 12:39:01 -07001230
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001231 // pointer to out cell.
1232 lea(scratch, Operand(esp, (argc + 1) * kPointerSize));
1233 mov(Operand(esp, 0 * kPointerSize), scratch); // output.
1234 if (FLAG_debug_code) {
1235 mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0)); // out cell.
1236 }
1237 }
1238}
1239
1240
1241MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
1242 int stack_space) {
Steve Blockd0582a62009-12-15 09:54:21 +00001243 ExternalReference next_address =
1244 ExternalReference::handle_scope_next_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001245 ExternalReference limit_address =
1246 ExternalReference::handle_scope_limit_address();
John Reck59135872010-11-02 12:39:01 -07001247 ExternalReference level_address =
1248 ExternalReference::handle_scope_level_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001249
John Reck59135872010-11-02 12:39:01 -07001250 // Allocate HandleScope in callee-save registers.
1251 mov(ebx, Operand::StaticVariable(next_address));
1252 mov(edi, Operand::StaticVariable(limit_address));
1253 add(Operand::StaticVariable(level_address), Immediate(1));
Steve Blockd0582a62009-12-15 09:54:21 +00001254
John Reck59135872010-11-02 12:39:01 -07001255 // Call the api function!
1256 call(function->address(), RelocInfo::RUNTIME_ENTRY);
1257
1258 if (!kPassHandlesDirectly) {
1259 // The returned value is a pointer to the handle holding the result.
1260 // Dereference this to get to the location.
1261 mov(eax, Operand(eax, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001262 }
Steve Blockd0582a62009-12-15 09:54:21 +00001263
John Reck59135872010-11-02 12:39:01 -07001264 Label empty_handle;
1265 Label prologue;
1266 Label promote_scheduled_exception;
1267 Label delete_allocated_handles;
1268 Label leave_exit_frame;
Leon Clarkee46be812010-01-19 14:06:41 +00001269
John Reck59135872010-11-02 12:39:01 -07001270 // Check if the result handle holds 0.
1271 test(eax, Operand(eax));
1272 j(zero, &empty_handle, not_taken);
1273 // It was non-zero. Dereference to get the result value.
1274 mov(eax, Operand(eax, 0));
1275 bind(&prologue);
1276 // No more valid handles (the result handle was the last one). Restore
1277 // previous handle scope.
1278 mov(Operand::StaticVariable(next_address), ebx);
1279 sub(Operand::StaticVariable(level_address), Immediate(1));
1280 Assert(above_equal, "Invalid HandleScope level");
1281 cmp(edi, Operand::StaticVariable(limit_address));
1282 j(not_equal, &delete_allocated_handles, not_taken);
1283 bind(&leave_exit_frame);
Leon Clarkee46be812010-01-19 14:06:41 +00001284
John Reck59135872010-11-02 12:39:01 -07001285 // Check if the function scheduled an exception.
1286 ExternalReference scheduled_exception_address =
1287 ExternalReference::scheduled_exception_address();
1288 cmp(Operand::StaticVariable(scheduled_exception_address),
1289 Immediate(Factory::the_hole_value()));
1290 j(not_equal, &promote_scheduled_exception, not_taken);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001291 LeaveApiExitFrame();
1292 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001293 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001294 MaybeObject* result =
1295 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1296 if (result->IsFailure()) {
1297 return result;
1298 }
John Reck59135872010-11-02 12:39:01 -07001299 bind(&empty_handle);
1300 // It was zero; the result is undefined.
1301 mov(eax, Factory::undefined_value());
1302 jmp(&prologue);
Leon Clarkee46be812010-01-19 14:06:41 +00001303
John Reck59135872010-11-02 12:39:01 -07001304 // HandleScope limit has changed. Delete allocated extensions.
1305 bind(&delete_allocated_handles);
1306 mov(Operand::StaticVariable(limit_address), edi);
1307 mov(edi, eax);
1308 mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions()));
1309 call(Operand(eax));
1310 mov(eax, edi);
1311 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001312
1313 return result;
Steve Blockd0582a62009-12-15 09:54:21 +00001314}
1315
1316
Steve Block6ded16b2010-05-10 14:33:55 +01001317void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001318 // Set the entry point and jump to the C entry runtime stub.
1319 mov(ebx, Immediate(ext));
1320 CEntryStub ces(1);
1321 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1322}
1323
1324
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001325MaybeObject* MacroAssembler::TryJumpToExternalReference(
1326 const ExternalReference& ext) {
1327 // Set the entry point and jump to the C entry runtime stub.
1328 mov(ebx, Immediate(ext));
1329 CEntryStub ces(1);
1330 return TryTailCallStub(&ces);
1331}
1332
1333
Steve Blocka7e24c12009-10-30 11:49:00 +00001334void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1335 const ParameterCount& actual,
1336 Handle<Code> code_constant,
1337 const Operand& code_operand,
1338 Label* done,
1339 InvokeFlag flag) {
1340 bool definitely_matches = false;
1341 Label invoke;
1342 if (expected.is_immediate()) {
1343 ASSERT(actual.is_immediate());
1344 if (expected.immediate() == actual.immediate()) {
1345 definitely_matches = true;
1346 } else {
1347 mov(eax, actual.immediate());
1348 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1349 if (expected.immediate() == sentinel) {
1350 // Don't worry about adapting arguments for builtins that
1351 // don't want that done. Skip adaption code by making it look
1352 // like we have a match between expected and actual number of
1353 // arguments.
1354 definitely_matches = true;
1355 } else {
1356 mov(ebx, expected.immediate());
1357 }
1358 }
1359 } else {
1360 if (actual.is_immediate()) {
1361 // Expected is in register, actual is immediate. This is the
1362 // case when we invoke function values without going through the
1363 // IC mechanism.
1364 cmp(expected.reg(), actual.immediate());
1365 j(equal, &invoke);
1366 ASSERT(expected.reg().is(ebx));
1367 mov(eax, actual.immediate());
1368 } else if (!expected.reg().is(actual.reg())) {
1369 // Both expected and actual are in (different) registers. This
1370 // is the case when we invoke functions using call and apply.
1371 cmp(expected.reg(), Operand(actual.reg()));
1372 j(equal, &invoke);
1373 ASSERT(actual.reg().is(eax));
1374 ASSERT(expected.reg().is(ebx));
1375 }
1376 }
1377
1378 if (!definitely_matches) {
1379 Handle<Code> adaptor =
1380 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1381 if (!code_constant.is_null()) {
1382 mov(edx, Immediate(code_constant));
1383 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1384 } else if (!code_operand.is_reg(edx)) {
1385 mov(edx, code_operand);
1386 }
1387
1388 if (flag == CALL_FUNCTION) {
1389 call(adaptor, RelocInfo::CODE_TARGET);
1390 jmp(done);
1391 } else {
1392 jmp(adaptor, RelocInfo::CODE_TARGET);
1393 }
1394 bind(&invoke);
1395 }
1396}
1397
1398
1399void MacroAssembler::InvokeCode(const Operand& code,
1400 const ParameterCount& expected,
1401 const ParameterCount& actual,
1402 InvokeFlag flag) {
1403 Label done;
1404 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1405 if (flag == CALL_FUNCTION) {
1406 call(code);
1407 } else {
1408 ASSERT(flag == JUMP_FUNCTION);
1409 jmp(code);
1410 }
1411 bind(&done);
1412}
1413
1414
1415void MacroAssembler::InvokeCode(Handle<Code> code,
1416 const ParameterCount& expected,
1417 const ParameterCount& actual,
1418 RelocInfo::Mode rmode,
1419 InvokeFlag flag) {
1420 Label done;
1421 Operand dummy(eax);
1422 InvokePrologue(expected, actual, code, dummy, &done, flag);
1423 if (flag == CALL_FUNCTION) {
1424 call(code, rmode);
1425 } else {
1426 ASSERT(flag == JUMP_FUNCTION);
1427 jmp(code, rmode);
1428 }
1429 bind(&done);
1430}
1431
1432
1433void MacroAssembler::InvokeFunction(Register fun,
1434 const ParameterCount& actual,
1435 InvokeFlag flag) {
1436 ASSERT(fun.is(edi));
1437 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1438 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1439 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001440 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001441
1442 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001443 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1444 expected, actual, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001445}
1446
1447
Andrei Popescu402d9372010-02-26 13:31:12 +00001448void MacroAssembler::InvokeFunction(JSFunction* function,
1449 const ParameterCount& actual,
1450 InvokeFlag flag) {
1451 ASSERT(function->is_compiled());
1452 // Get the function and setup the context.
1453 mov(edi, Immediate(Handle<JSFunction>(function)));
1454 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +00001455 // Invoke the cached code.
1456 Handle<Code> code(function->code());
1457 ParameterCount expected(function->shared()->formal_parameter_count());
1458 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1459}
1460
1461
1462void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001463 // Calls are not allowed in some stubs.
1464 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1465
1466 // Rely on the assertion to check that the number of provided
1467 // arguments match the expected number of arguments. Fake a
1468 // parameter count to avoid emitting code to do the check.
1469 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001470 GetBuiltinFunction(edi, id);
1471 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1472 expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001473}
1474
Steve Block791712a2010-08-27 10:21:07 +01001475void MacroAssembler::GetBuiltinFunction(Register target,
1476 Builtins::JavaScript id) {
1477 // Load the JavaScript builtin function from the builtins object.
1478 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1479 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1480 mov(target, FieldOperand(target,
1481 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1482}
Steve Blocka7e24c12009-10-30 11:49:00 +00001483
1484void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001485 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001486 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001487 GetBuiltinFunction(edi, id);
1488 // Load the code entry point from the function into the target register.
1489 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001490}
1491
1492
Steve Blockd0582a62009-12-15 09:54:21 +00001493void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1494 if (context_chain_length > 0) {
1495 // Move up the chain of contexts to the context containing the slot.
1496 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1497 // Load the function context (which is the incoming, outer context).
1498 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1499 for (int i = 1; i < context_chain_length; i++) {
1500 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1501 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1502 }
1503 // The context may be an intermediate context, not a function context.
1504 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1505 } else { // Slot is in the current function context.
1506 // The context may be an intermediate context, not a function context.
1507 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1508 }
1509}
1510
1511
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001512void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1513 // Load the global or builtins object from the current context.
1514 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1515 // Load the global context from the global or builtins object.
1516 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1517 // Load the function from the global context.
1518 mov(function, Operand(function, Context::SlotOffset(index)));
1519}
1520
1521
1522void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1523 Register map) {
1524 // Load the initial map. The global functions all have initial maps.
1525 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1526 if (FLAG_debug_code) {
1527 Label ok, fail;
1528 CheckMap(map, Factory::meta_map(), &fail, false);
1529 jmp(&ok);
1530 bind(&fail);
1531 Abort("Global functions must have initial map");
1532 bind(&ok);
1533 }
1534}
1535
Steve Blockd0582a62009-12-15 09:54:21 +00001536
Steve Blocka7e24c12009-10-30 11:49:00 +00001537void MacroAssembler::Ret() {
1538 ret(0);
1539}
1540
1541
Leon Clarkee46be812010-01-19 14:06:41 +00001542void MacroAssembler::Drop(int stack_elements) {
1543 if (stack_elements > 0) {
1544 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1545 }
1546}
1547
1548
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001549void MacroAssembler::Move(Register dst, Register src) {
1550 if (!dst.is(src)) {
1551 mov(dst, src);
1552 }
1553}
1554
1555
Leon Clarkee46be812010-01-19 14:06:41 +00001556void MacroAssembler::Move(Register dst, Handle<Object> value) {
1557 mov(dst, value);
1558}
1559
1560
Steve Blocka7e24c12009-10-30 11:49:00 +00001561void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1562 if (FLAG_native_code_counters && counter->Enabled()) {
1563 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1564 }
1565}
1566
1567
1568void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1569 ASSERT(value > 0);
1570 if (FLAG_native_code_counters && counter->Enabled()) {
1571 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1572 if (value == 1) {
1573 inc(operand);
1574 } else {
1575 add(operand, Immediate(value));
1576 }
1577 }
1578}
1579
1580
1581void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1582 ASSERT(value > 0);
1583 if (FLAG_native_code_counters && counter->Enabled()) {
1584 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1585 if (value == 1) {
1586 dec(operand);
1587 } else {
1588 sub(operand, Immediate(value));
1589 }
1590 }
1591}
1592
1593
Leon Clarked91b9f72010-01-27 17:25:45 +00001594void MacroAssembler::IncrementCounter(Condition cc,
1595 StatsCounter* counter,
1596 int value) {
1597 ASSERT(value > 0);
1598 if (FLAG_native_code_counters && counter->Enabled()) {
1599 Label skip;
1600 j(NegateCondition(cc), &skip);
1601 pushfd();
1602 IncrementCounter(counter, value);
1603 popfd();
1604 bind(&skip);
1605 }
1606}
1607
1608
1609void MacroAssembler::DecrementCounter(Condition cc,
1610 StatsCounter* counter,
1611 int value) {
1612 ASSERT(value > 0);
1613 if (FLAG_native_code_counters && counter->Enabled()) {
1614 Label skip;
1615 j(NegateCondition(cc), &skip);
1616 pushfd();
1617 DecrementCounter(counter, value);
1618 popfd();
1619 bind(&skip);
1620 }
1621}
1622
1623
Steve Blocka7e24c12009-10-30 11:49:00 +00001624void MacroAssembler::Assert(Condition cc, const char* msg) {
1625 if (FLAG_debug_code) Check(cc, msg);
1626}
1627
1628
Iain Merrick75681382010-08-19 15:07:18 +01001629void MacroAssembler::AssertFastElements(Register elements) {
1630 if (FLAG_debug_code) {
1631 Label ok;
1632 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1633 Immediate(Factory::fixed_array_map()));
1634 j(equal, &ok);
1635 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1636 Immediate(Factory::fixed_cow_array_map()));
1637 j(equal, &ok);
1638 Abort("JSObject with fast elements map has slow elements");
1639 bind(&ok);
1640 }
1641}
1642
1643
Steve Blocka7e24c12009-10-30 11:49:00 +00001644void MacroAssembler::Check(Condition cc, const char* msg) {
1645 Label L;
1646 j(cc, &L, taken);
1647 Abort(msg);
1648 // will not return here
1649 bind(&L);
1650}
1651
1652
Steve Block6ded16b2010-05-10 14:33:55 +01001653void MacroAssembler::CheckStackAlignment() {
1654 int frame_alignment = OS::ActivationFrameAlignment();
1655 int frame_alignment_mask = frame_alignment - 1;
1656 if (frame_alignment > kPointerSize) {
1657 ASSERT(IsPowerOf2(frame_alignment));
1658 Label alignment_as_expected;
1659 test(esp, Immediate(frame_alignment_mask));
1660 j(zero, &alignment_as_expected);
1661 // Abort if stack is not aligned.
1662 int3();
1663 bind(&alignment_as_expected);
1664 }
1665}
1666
1667
Steve Blocka7e24c12009-10-30 11:49:00 +00001668void MacroAssembler::Abort(const char* msg) {
1669 // We want to pass the msg string like a smi to avoid GC
1670 // problems, however msg is not guaranteed to be aligned
1671 // properly. Instead, we pass an aligned pointer that is
1672 // a proper v8 smi, but also pass the alignment difference
1673 // from the real pointer as a smi.
1674 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1675 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1676 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1677#ifdef DEBUG
1678 if (msg != NULL) {
1679 RecordComment("Abort message: ");
1680 RecordComment(msg);
1681 }
1682#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001683 // Disable stub call restrictions to always allow calls to abort.
1684 set_allow_stub_calls(true);
1685
Steve Blocka7e24c12009-10-30 11:49:00 +00001686 push(eax);
1687 push(Immediate(p0));
1688 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1689 CallRuntime(Runtime::kAbort, 2);
1690 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001691 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001692}
1693
1694
Iain Merrick75681382010-08-19 15:07:18 +01001695void MacroAssembler::JumpIfNotNumber(Register reg,
1696 TypeInfo info,
1697 Label* on_not_number) {
1698 if (FLAG_debug_code) AbortIfSmi(reg);
1699 if (!info.IsNumber()) {
1700 cmp(FieldOperand(reg, HeapObject::kMapOffset),
1701 Factory::heap_number_map());
1702 j(not_equal, on_not_number);
1703 }
1704}
1705
1706
1707void MacroAssembler::ConvertToInt32(Register dst,
1708 Register source,
1709 Register scratch,
1710 TypeInfo info,
1711 Label* on_not_int32) {
1712 if (FLAG_debug_code) {
1713 AbortIfSmi(source);
1714 AbortIfNotNumber(source);
1715 }
1716 if (info.IsInteger32()) {
1717 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1718 } else {
1719 Label done;
1720 bool push_pop = (scratch.is(no_reg) && dst.is(source));
1721 ASSERT(!scratch.is(source));
1722 if (push_pop) {
1723 push(dst);
1724 scratch = dst;
1725 }
1726 if (scratch.is(no_reg)) scratch = dst;
1727 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1728 cmp(scratch, 0x80000000u);
1729 if (push_pop) {
1730 j(not_equal, &done);
1731 pop(dst);
1732 jmp(on_not_int32);
1733 } else {
1734 j(equal, on_not_int32);
1735 }
1736
1737 bind(&done);
1738 if (push_pop) {
1739 add(Operand(esp), Immediate(kPointerSize)); // Pop.
1740 }
1741 if (!scratch.is(dst)) {
1742 mov(dst, scratch);
1743 }
1744 }
1745}
1746
1747
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001748void MacroAssembler::LoadPowerOf2(XMMRegister dst,
1749 Register scratch,
1750 int power) {
1751 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
1752 HeapNumber::kExponentBits));
1753 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
1754 movd(dst, Operand(scratch));
1755 psllq(dst, HeapNumber::kMantissaBits);
1756}
1757
1758
Andrei Popescu402d9372010-02-26 13:31:12 +00001759void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1760 Register instance_type,
1761 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001762 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001763 if (!scratch.is(instance_type)) {
1764 mov(scratch, instance_type);
1765 }
1766 and_(scratch,
1767 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1768 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1769 j(not_equal, failure);
1770}
1771
1772
Leon Clarked91b9f72010-01-27 17:25:45 +00001773void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1774 Register object2,
1775 Register scratch1,
1776 Register scratch2,
1777 Label* failure) {
1778 // Check that both objects are not smis.
1779 ASSERT_EQ(0, kSmiTag);
1780 mov(scratch1, Operand(object1));
1781 and_(scratch1, Operand(object2));
1782 test(scratch1, Immediate(kSmiTagMask));
1783 j(zero, failure);
1784
1785 // Load instance type for both strings.
1786 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1787 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1788 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1789 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1790
1791 // Check that both are flat ascii strings.
1792 const int kFlatAsciiStringMask =
1793 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1794 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1795 // Interleave bits from both instance types and compare them in one check.
1796 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1797 and_(scratch1, kFlatAsciiStringMask);
1798 and_(scratch2, kFlatAsciiStringMask);
1799 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1800 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1801 j(not_equal, failure);
1802}
1803
1804
Steve Block6ded16b2010-05-10 14:33:55 +01001805void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1806 int frameAlignment = OS::ActivationFrameAlignment();
1807 if (frameAlignment != 0) {
1808 // Make stack end at alignment and make room for num_arguments words
1809 // and the original value of esp.
1810 mov(scratch, esp);
1811 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1812 ASSERT(IsPowerOf2(frameAlignment));
1813 and_(esp, -frameAlignment);
1814 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1815 } else {
1816 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1817 }
1818}
1819
1820
1821void MacroAssembler::CallCFunction(ExternalReference function,
1822 int num_arguments) {
1823 // Trashing eax is ok as it will be the return value.
1824 mov(Operand(eax), Immediate(function));
1825 CallCFunction(eax, num_arguments);
1826}
1827
1828
1829void MacroAssembler::CallCFunction(Register function,
1830 int num_arguments) {
1831 // Check stack alignment.
1832 if (FLAG_debug_code) {
1833 CheckStackAlignment();
1834 }
1835
1836 call(Operand(function));
1837 if (OS::ActivationFrameAlignment() != 0) {
1838 mov(esp, Operand(esp, num_arguments * kPointerSize));
1839 } else {
1840 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1841 }
1842}
1843
1844
Steve Blocka7e24c12009-10-30 11:49:00 +00001845CodePatcher::CodePatcher(byte* address, int size)
1846 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1847 // Create a new macro assembler pointing to the address of the code to patch.
1848 // The size is adjusted with kGap on order for the assembler to generate size
1849 // bytes of instructions without failing with buffer size constraints.
1850 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1851}
1852
1853
1854CodePatcher::~CodePatcher() {
1855 // Indicate that code has changed.
1856 CPU::FlushICache(address_, size_);
1857
1858 // Check that the code was patched as expected.
1859 ASSERT(masm_.pc_ == address_ + size_);
1860 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1861}
1862
1863
1864} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001865
1866#endif // V8_TARGET_ARCH_IA32