blob: b72f4df0e7ef2c0838bf1b36db23a79468e84d8b [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100101void MacroAssembler::RecordWrite(Register object,
102 int offset,
103 Register value,
104 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000105 // The compiled code assumes that record write doesn't change the
106 // context register, so we check that none of the clobbered
107 // registers are esi.
108 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
109
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100110 // First, check if a write barrier is even needed. The tests below
111 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 Label done;
113
114 // Skip barrier if writing a smi.
115 ASSERT_EQ(0, kSmiTag);
116 test(value, Immediate(kSmiTagMask));
117 j(zero, &done);
118
Steve Block6ded16b2010-05-10 14:33:55 +0100119 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000120
Steve Block6ded16b2010-05-10 14:33:55 +0100121 // The offset is relative to a tagged or untagged HeapObject pointer,
122 // so either offset or offset + kHeapObjectTag must be a
123 // multiple of kPointerSize.
124 ASSERT(IsAligned(offset, kPointerSize) ||
125 IsAligned(offset + kHeapObjectTag, kPointerSize));
126
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100127 Register dst = scratch;
128 if (offset != 0) {
129 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100131 // Array access: calculate the destination address in the same manner as
132 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
133 // into an array of words.
134 ASSERT_EQ(1, kSmiTagSize);
135 ASSERT_EQ(0, kSmiTag);
136 lea(dst, Operand(object, dst, times_half_pointer_size,
137 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000140
141 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000142
143 // Clobber all input registers when running with the debug-code flag
144 // turned on to provoke errors.
145 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100146 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
147 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
148 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000149 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block8defd9f2010-07-08 12:39:36 +0100153void MacroAssembler::RecordWrite(Register object,
154 Register address,
155 Register value) {
156 // The compiled code assumes that record write doesn't change the
157 // context register, so we check that none of the clobbered
158 // registers are esi.
159 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
160
161 // First, check if a write barrier is even needed. The tests below
162 // catch stores of Smis and stores into young gen.
163 Label done;
164
165 // Skip barrier if writing a smi.
166 ASSERT_EQ(0, kSmiTag);
167 test(value, Immediate(kSmiTagMask));
168 j(zero, &done);
169
170 InNewSpace(object, value, equal, &done);
171
172 RecordWriteHelper(object, address, value);
173
174 bind(&done);
175
176 // Clobber all input registers when running with the debug-code flag
177 // turned on to provoke errors.
178 if (FLAG_debug_code) {
179 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
180 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
181 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
182 }
183}
184
185
Steve Blockd0582a62009-12-15 09:54:21 +0000186void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
187 cmp(esp,
188 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
189 j(below, on_stack_overflow);
190}
191
192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000194void MacroAssembler::DebugBreak() {
195 Set(eax, Immediate(0));
196 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
197 CEntryStub ces(1);
198 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
199}
Steve Blocka7e24c12009-10-30 11:49:00 +0000200#endif
201
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100202
Steve Blocka7e24c12009-10-30 11:49:00 +0000203void MacroAssembler::Set(Register dst, const Immediate& x) {
204 if (x.is_zero()) {
205 xor_(dst, Operand(dst)); // shorter than mov
206 } else {
207 mov(dst, x);
208 }
209}
210
211
212void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
213 mov(dst, x);
214}
215
216
217void MacroAssembler::CmpObjectType(Register heap_object,
218 InstanceType type,
219 Register map) {
220 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
221 CmpInstanceType(map, type);
222}
223
224
225void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
226 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
227 static_cast<int8_t>(type));
228}
229
230
Andrei Popescu31002712010-02-23 13:46:05 +0000231void MacroAssembler::CheckMap(Register obj,
232 Handle<Map> map,
233 Label* fail,
234 bool is_heap_object) {
235 if (!is_heap_object) {
236 test(obj, Immediate(kSmiTagMask));
237 j(zero, fail);
238 }
239 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
240 j(not_equal, fail);
241}
242
243
Leon Clarkee46be812010-01-19 14:06:41 +0000244Condition MacroAssembler::IsObjectStringType(Register heap_object,
245 Register map,
246 Register instance_type) {
247 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
248 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
249 ASSERT(kNotStringTag != 0);
250 test(instance_type, Immediate(kIsNotStringMask));
251 return zero;
252}
253
254
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100255void MacroAssembler::IsObjectJSObjectType(Register heap_object,
256 Register map,
257 Register scratch,
258 Label* fail) {
259 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
260 IsInstanceJSObjectType(map, scratch, fail);
261}
262
263
264void MacroAssembler::IsInstanceJSObjectType(Register map,
265 Register scratch,
266 Label* fail) {
267 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
268 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
269 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
270 j(above, fail);
271}
272
273
Steve Blocka7e24c12009-10-30 11:49:00 +0000274void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000275 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000276 fucomip();
277 ffree(0);
278 fincstp();
279 } else {
280 fucompp();
281 push(eax);
282 fnstsw_ax();
283 sahf();
284 pop(eax);
285 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000286}
287
288
Steve Block6ded16b2010-05-10 14:33:55 +0100289void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000290 Label ok;
291 test(object, Immediate(kSmiTagMask));
292 j(zero, &ok);
293 cmp(FieldOperand(object, HeapObject::kMapOffset),
294 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100295 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000296 bind(&ok);
297}
298
299
Steve Block6ded16b2010-05-10 14:33:55 +0100300void MacroAssembler::AbortIfNotSmi(Register object) {
301 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100302 Assert(equal, "Operand is not a smi");
303}
304
305
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100306void MacroAssembler::AbortIfNotString(Register object) {
307 test(object, Immediate(kSmiTagMask));
308 Assert(not_equal, "Operand is not a string");
309 push(object);
310 mov(object, FieldOperand(object, HeapObject::kMapOffset));
311 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
312 pop(object);
313 Assert(below, "Operand is not a string");
314}
315
316
Iain Merrick75681382010-08-19 15:07:18 +0100317void MacroAssembler::AbortIfSmi(Register object) {
318 test(object, Immediate(kSmiTagMask));
319 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100320}
321
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323void MacroAssembler::EnterFrame(StackFrame::Type type) {
324 push(ebp);
325 mov(ebp, Operand(esp));
326 push(esi);
327 push(Immediate(Smi::FromInt(type)));
328 push(Immediate(CodeObject()));
329 if (FLAG_debug_code) {
330 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
331 Check(not_equal, "code object not properly patched");
332 }
333}
334
335
336void MacroAssembler::LeaveFrame(StackFrame::Type type) {
337 if (FLAG_debug_code) {
338 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
339 Immediate(Smi::FromInt(type)));
340 Check(equal, "stack frame types must match");
341 }
342 leave();
343}
344
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100345
346void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000347 // Setup the frame structure on the stack.
348 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
349 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
350 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
351 push(ebp);
352 mov(ebp, Operand(esp));
353
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100354 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000356 push(Immediate(0)); // Saved entry sp, patched before call.
357 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000358
359 // Save the frame pointer and the context in top.
360 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
361 ExternalReference context_address(Top::k_context_address);
362 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
363 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000364}
Steve Blocka7e24c12009-10-30 11:49:00 +0000365
Steve Blocka7e24c12009-10-30 11:49:00 +0000366
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100367void MacroAssembler::EnterExitFrameEpilogue(int argc) {
Steve Blockd0582a62009-12-15 09:54:21 +0000368 // Reserve space for arguments.
369 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000370
371 // Get the required frame alignment for the OS.
372 static const int kFrameAlignment = OS::ActivationFrameAlignment();
373 if (kFrameAlignment > 0) {
374 ASSERT(IsPowerOf2(kFrameAlignment));
375 and_(esp, -kFrameAlignment);
376 }
377
378 // Patch the saved entry sp.
379 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
380}
381
382
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100383void MacroAssembler::EnterExitFrame() {
384 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000385
386 // Setup argc and argv in callee-saved registers.
387 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
388 mov(edi, Operand(eax));
389 lea(esi, Operand(ebp, eax, times_4, offset));
390
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100391 EnterExitFrameEpilogue(2);
Steve Blockd0582a62009-12-15 09:54:21 +0000392}
393
394
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100395void MacroAssembler::EnterApiExitFrame(int stack_space,
Steve Blockd0582a62009-12-15 09:54:21 +0000396 int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100397 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000398
399 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
400 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
401
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100402 EnterExitFrameEpilogue(argc);
Steve Blockd0582a62009-12-15 09:54:21 +0000403}
404
405
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100406void MacroAssembler::LeaveExitFrame() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 // Get the return address from the stack and restore the frame pointer.
408 mov(ecx, Operand(ebp, 1 * kPointerSize));
409 mov(ebp, Operand(ebp, 0 * kPointerSize));
410
411 // Pop the arguments and the receiver from the caller stack.
412 lea(esp, Operand(esi, 1 * kPointerSize));
413
414 // Restore current context from top and clear it in debug mode.
415 ExternalReference context_address(Top::k_context_address);
416 mov(esi, Operand::StaticVariable(context_address));
417#ifdef DEBUG
418 mov(Operand::StaticVariable(context_address), Immediate(0));
419#endif
420
421 // Push the return address to get ready to return.
422 push(ecx);
423
424 // Clear the top frame.
425 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
426 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
427}
428
429
430void MacroAssembler::PushTryHandler(CodeLocation try_location,
431 HandlerType type) {
432 // Adjust this code if not the case.
433 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
434 // The pc (return address) is already on TOS.
435 if (try_location == IN_JAVASCRIPT) {
436 if (type == TRY_CATCH_HANDLER) {
437 push(Immediate(StackHandler::TRY_CATCH));
438 } else {
439 push(Immediate(StackHandler::TRY_FINALLY));
440 }
441 push(ebp);
442 } else {
443 ASSERT(try_location == IN_JS_ENTRY);
444 // The frame pointer does not point to a JS frame so we save NULL
445 // for ebp. We expect the code throwing an exception to check ebp
446 // before dereferencing it to restore the context.
447 push(Immediate(StackHandler::ENTRY));
448 push(Immediate(0)); // NULL frame pointer.
449 }
450 // Save the current handler as the next handler.
451 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
452 // Link this handler as the new current one.
453 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
454}
455
456
Leon Clarkee46be812010-01-19 14:06:41 +0000457void MacroAssembler::PopTryHandler() {
458 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
459 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
460 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
461}
462
463
Steve Blocka7e24c12009-10-30 11:49:00 +0000464void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
465 Register scratch,
466 Label* miss) {
467 Label same_contexts;
468
469 ASSERT(!holder_reg.is(scratch));
470
471 // Load current lexical context from the stack frame.
472 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
473
474 // When generating debug code, make sure the lexical context is set.
475 if (FLAG_debug_code) {
476 cmp(Operand(scratch), Immediate(0));
477 Check(not_equal, "we should not have an empty lexical context");
478 }
479 // Load the global context of the current context.
480 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
481 mov(scratch, FieldOperand(scratch, offset));
482 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
483
484 // Check the context is a global context.
485 if (FLAG_debug_code) {
486 push(scratch);
487 // Read the first word and compare to global_context_map.
488 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
489 cmp(scratch, Factory::global_context_map());
490 Check(equal, "JSGlobalObject::global_context should be a global context.");
491 pop(scratch);
492 }
493
494 // Check if both contexts are the same.
495 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
496 j(equal, &same_contexts, taken);
497
498 // Compare security tokens, save holder_reg on the stack so we can use it
499 // as a temporary register.
500 //
501 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
502 push(holder_reg);
503 // Check that the security token in the calling global object is
504 // compatible with the security token in the receiving global
505 // object.
506 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
507
508 // Check the context is a global context.
509 if (FLAG_debug_code) {
510 cmp(holder_reg, Factory::null_value());
511 Check(not_equal, "JSGlobalProxy::context() should not be null.");
512
513 push(holder_reg);
514 // Read the first word and compare to global_context_map(),
515 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
516 cmp(holder_reg, Factory::global_context_map());
517 Check(equal, "JSGlobalObject::global_context should be a global context.");
518 pop(holder_reg);
519 }
520
521 int token_offset = Context::kHeaderSize +
522 Context::SECURITY_TOKEN_INDEX * kPointerSize;
523 mov(scratch, FieldOperand(scratch, token_offset));
524 cmp(scratch, FieldOperand(holder_reg, token_offset));
525 pop(holder_reg);
526 j(not_equal, miss, not_taken);
527
528 bind(&same_contexts);
529}
530
531
532void MacroAssembler::LoadAllocationTopHelper(Register result,
533 Register result_end,
534 Register scratch,
535 AllocationFlags flags) {
536 ExternalReference new_space_allocation_top =
537 ExternalReference::new_space_allocation_top_address();
538
539 // Just return if allocation top is already known.
540 if ((flags & RESULT_CONTAINS_TOP) != 0) {
541 // No use of scratch if allocation top is provided.
542 ASSERT(scratch.is(no_reg));
543#ifdef DEBUG
544 // Assert that result actually contains top on entry.
545 cmp(result, Operand::StaticVariable(new_space_allocation_top));
546 Check(equal, "Unexpected allocation top");
547#endif
548 return;
549 }
550
551 // Move address of new object to result. Use scratch register if available.
552 if (scratch.is(no_reg)) {
553 mov(result, Operand::StaticVariable(new_space_allocation_top));
554 } else {
555 ASSERT(!scratch.is(result_end));
556 mov(Operand(scratch), Immediate(new_space_allocation_top));
557 mov(result, Operand(scratch, 0));
558 }
559}
560
561
562void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
563 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000564 if (FLAG_debug_code) {
565 test(result_end, Immediate(kObjectAlignmentMask));
566 Check(zero, "Unaligned allocation in new space");
567 }
568
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 ExternalReference new_space_allocation_top =
570 ExternalReference::new_space_allocation_top_address();
571
572 // Update new top. Use scratch if available.
573 if (scratch.is(no_reg)) {
574 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
575 } else {
576 mov(Operand(scratch, 0), result_end);
577 }
578}
579
580
581void MacroAssembler::AllocateInNewSpace(int object_size,
582 Register result,
583 Register result_end,
584 Register scratch,
585 Label* gc_required,
586 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700587 if (!FLAG_inline_new) {
588 if (FLAG_debug_code) {
589 // Trash the registers to simulate an allocation failure.
590 mov(result, Immediate(0x7091));
591 if (result_end.is_valid()) {
592 mov(result_end, Immediate(0x7191));
593 }
594 if (scratch.is_valid()) {
595 mov(scratch, Immediate(0x7291));
596 }
597 }
598 jmp(gc_required);
599 return;
600 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000601 ASSERT(!result.is(result_end));
602
603 // Load address of new object into result.
604 LoadAllocationTopHelper(result, result_end, scratch, flags);
605
Ben Murdochbb769b22010-08-11 14:56:33 +0100606 Register top_reg = result_end.is_valid() ? result_end : result;
607
Steve Blocka7e24c12009-10-30 11:49:00 +0000608 // Calculate new top and bail out if new space is exhausted.
609 ExternalReference new_space_allocation_limit =
610 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100611
612 if (top_reg.is(result)) {
613 add(Operand(top_reg), Immediate(object_size));
614 } else {
615 lea(top_reg, Operand(result, object_size));
616 }
617 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000618 j(above, gc_required, not_taken);
619
Leon Clarkee46be812010-01-19 14:06:41 +0000620 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100621 UpdateAllocationTopHelper(top_reg, scratch);
622
623 // Tag result if requested.
624 if (top_reg.is(result)) {
625 if ((flags & TAG_OBJECT) != 0) {
626 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
627 } else {
628 sub(Operand(result), Immediate(object_size));
629 }
630 } else if ((flags & TAG_OBJECT) != 0) {
631 add(Operand(result), Immediate(kHeapObjectTag));
632 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000633}
634
635
636void MacroAssembler::AllocateInNewSpace(int header_size,
637 ScaleFactor element_size,
638 Register element_count,
639 Register result,
640 Register result_end,
641 Register scratch,
642 Label* gc_required,
643 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700644 if (!FLAG_inline_new) {
645 if (FLAG_debug_code) {
646 // Trash the registers to simulate an allocation failure.
647 mov(result, Immediate(0x7091));
648 mov(result_end, Immediate(0x7191));
649 if (scratch.is_valid()) {
650 mov(scratch, Immediate(0x7291));
651 }
652 // Register element_count is not modified by the function.
653 }
654 jmp(gc_required);
655 return;
656 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000657 ASSERT(!result.is(result_end));
658
659 // Load address of new object into result.
660 LoadAllocationTopHelper(result, result_end, scratch, flags);
661
662 // Calculate new top and bail out if new space is exhausted.
663 ExternalReference new_space_allocation_limit =
664 ExternalReference::new_space_allocation_limit_address();
665 lea(result_end, Operand(result, element_count, element_size, header_size));
666 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
667 j(above, gc_required);
668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669 // Tag result if requested.
670 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000671 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000672 }
Leon Clarkee46be812010-01-19 14:06:41 +0000673
674 // Update allocation top.
675 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000676}
677
678
679void MacroAssembler::AllocateInNewSpace(Register object_size,
680 Register result,
681 Register result_end,
682 Register scratch,
683 Label* gc_required,
684 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700685 if (!FLAG_inline_new) {
686 if (FLAG_debug_code) {
687 // Trash the registers to simulate an allocation failure.
688 mov(result, Immediate(0x7091));
689 mov(result_end, Immediate(0x7191));
690 if (scratch.is_valid()) {
691 mov(scratch, Immediate(0x7291));
692 }
693 // object_size is left unchanged by this function.
694 }
695 jmp(gc_required);
696 return;
697 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000698 ASSERT(!result.is(result_end));
699
700 // Load address of new object into result.
701 LoadAllocationTopHelper(result, result_end, scratch, flags);
702
703 // Calculate new top and bail out if new space is exhausted.
704 ExternalReference new_space_allocation_limit =
705 ExternalReference::new_space_allocation_limit_address();
706 if (!object_size.is(result_end)) {
707 mov(result_end, object_size);
708 }
709 add(result_end, Operand(result));
710 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
711 j(above, gc_required, not_taken);
712
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 // Tag result if requested.
714 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000715 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000716 }
Leon Clarkee46be812010-01-19 14:06:41 +0000717
718 // Update allocation top.
719 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000720}
721
722
723void MacroAssembler::UndoAllocationInNewSpace(Register object) {
724 ExternalReference new_space_allocation_top =
725 ExternalReference::new_space_allocation_top_address();
726
727 // Make sure the object has no tag before resetting top.
728 and_(Operand(object), Immediate(~kHeapObjectTagMask));
729#ifdef DEBUG
730 cmp(object, Operand::StaticVariable(new_space_allocation_top));
731 Check(below, "Undo allocation of non allocated memory");
732#endif
733 mov(Operand::StaticVariable(new_space_allocation_top), object);
734}
735
736
Steve Block3ce2e202009-11-05 08:53:23 +0000737void MacroAssembler::AllocateHeapNumber(Register result,
738 Register scratch1,
739 Register scratch2,
740 Label* gc_required) {
741 // Allocate heap number in new space.
742 AllocateInNewSpace(HeapNumber::kSize,
743 result,
744 scratch1,
745 scratch2,
746 gc_required,
747 TAG_OBJECT);
748
749 // Set the map.
750 mov(FieldOperand(result, HeapObject::kMapOffset),
751 Immediate(Factory::heap_number_map()));
752}
753
754
Steve Blockd0582a62009-12-15 09:54:21 +0000755void MacroAssembler::AllocateTwoByteString(Register result,
756 Register length,
757 Register scratch1,
758 Register scratch2,
759 Register scratch3,
760 Label* gc_required) {
761 // Calculate the number of bytes needed for the characters in the string while
762 // observing object alignment.
763 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000764 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000765 // scratch1 = length * 2 + kObjectAlignmentMask.
766 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000767 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
768
769 // Allocate two byte string in new space.
770 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
771 times_1,
772 scratch1,
773 result,
774 scratch2,
775 scratch3,
776 gc_required,
777 TAG_OBJECT);
778
779 // Set the map, length and hash field.
780 mov(FieldOperand(result, HeapObject::kMapOffset),
781 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100782 mov(scratch1, length);
783 SmiTag(scratch1);
784 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000785 mov(FieldOperand(result, String::kHashFieldOffset),
786 Immediate(String::kEmptyHashField));
787}
788
789
790void MacroAssembler::AllocateAsciiString(Register result,
791 Register length,
792 Register scratch1,
793 Register scratch2,
794 Register scratch3,
795 Label* gc_required) {
796 // Calculate the number of bytes needed for the characters in the string while
797 // observing object alignment.
798 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
799 mov(scratch1, length);
800 ASSERT(kCharSize == 1);
801 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
802 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
803
804 // Allocate ascii string in new space.
805 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
806 times_1,
807 scratch1,
808 result,
809 scratch2,
810 scratch3,
811 gc_required,
812 TAG_OBJECT);
813
814 // Set the map, length and hash field.
815 mov(FieldOperand(result, HeapObject::kMapOffset),
816 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100817 mov(scratch1, length);
818 SmiTag(scratch1);
819 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000820 mov(FieldOperand(result, String::kHashFieldOffset),
821 Immediate(String::kEmptyHashField));
822}
823
824
Iain Merrick9ac36c92010-09-13 15:29:50 +0100825void MacroAssembler::AllocateAsciiString(Register result,
826 int length,
827 Register scratch1,
828 Register scratch2,
829 Label* gc_required) {
830 ASSERT(length > 0);
831
832 // Allocate ascii string in new space.
833 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
834 result,
835 scratch1,
836 scratch2,
837 gc_required,
838 TAG_OBJECT);
839
840 // Set the map, length and hash field.
841 mov(FieldOperand(result, HeapObject::kMapOffset),
842 Immediate(Factory::ascii_string_map()));
843 mov(FieldOperand(result, String::kLengthOffset),
844 Immediate(Smi::FromInt(length)));
845 mov(FieldOperand(result, String::kHashFieldOffset),
846 Immediate(String::kEmptyHashField));
847}
848
849
Steve Blockd0582a62009-12-15 09:54:21 +0000850void MacroAssembler::AllocateConsString(Register result,
851 Register scratch1,
852 Register scratch2,
853 Label* gc_required) {
854 // Allocate heap number in new space.
855 AllocateInNewSpace(ConsString::kSize,
856 result,
857 scratch1,
858 scratch2,
859 gc_required,
860 TAG_OBJECT);
861
862 // Set the map. The other fields are left uninitialized.
863 mov(FieldOperand(result, HeapObject::kMapOffset),
864 Immediate(Factory::cons_string_map()));
865}
866
867
868void MacroAssembler::AllocateAsciiConsString(Register result,
869 Register scratch1,
870 Register scratch2,
871 Label* gc_required) {
872 // Allocate heap number in new space.
873 AllocateInNewSpace(ConsString::kSize,
874 result,
875 scratch1,
876 scratch2,
877 gc_required,
878 TAG_OBJECT);
879
880 // Set the map. The other fields are left uninitialized.
881 mov(FieldOperand(result, HeapObject::kMapOffset),
882 Immediate(Factory::cons_ascii_string_map()));
883}
884
885
Steve Blocka7e24c12009-10-30 11:49:00 +0000886void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
887 Register result,
888 Register op,
889 JumpTarget* then_target) {
890 JumpTarget ok;
891 test(result, Operand(result));
892 ok.Branch(not_zero, taken);
893 test(op, Operand(op));
894 then_target->Branch(sign, not_taken);
895 ok.Bind();
896}
897
898
899void MacroAssembler::NegativeZeroTest(Register result,
900 Register op,
901 Label* then_label) {
902 Label ok;
903 test(result, Operand(result));
904 j(not_zero, &ok, taken);
905 test(op, Operand(op));
906 j(sign, then_label, not_taken);
907 bind(&ok);
908}
909
910
911void MacroAssembler::NegativeZeroTest(Register result,
912 Register op1,
913 Register op2,
914 Register scratch,
915 Label* then_label) {
916 Label ok;
917 test(result, Operand(result));
918 j(not_zero, &ok, taken);
919 mov(scratch, Operand(op1));
920 or_(scratch, Operand(op2));
921 j(sign, then_label, not_taken);
922 bind(&ok);
923}
924
925
926void MacroAssembler::TryGetFunctionPrototype(Register function,
927 Register result,
928 Register scratch,
929 Label* miss) {
930 // Check that the receiver isn't a smi.
931 test(function, Immediate(kSmiTagMask));
932 j(zero, miss, not_taken);
933
934 // Check that the function really is a function.
935 CmpObjectType(function, JS_FUNCTION_TYPE, result);
936 j(not_equal, miss, not_taken);
937
938 // Make sure that the function has an instance prototype.
939 Label non_instance;
940 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
941 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
942 j(not_zero, &non_instance, not_taken);
943
944 // Get the prototype or initial map from the function.
945 mov(result,
946 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
947
948 // If the prototype or initial map is the hole, don't return it and
949 // simply miss the cache instead. This will allow us to allocate a
950 // prototype object on-demand in the runtime system.
951 cmp(Operand(result), Immediate(Factory::the_hole_value()));
952 j(equal, miss, not_taken);
953
954 // If the function does not have an initial map, we're done.
955 Label done;
956 CmpObjectType(result, MAP_TYPE, scratch);
957 j(not_equal, &done);
958
959 // Get the prototype from the initial map.
960 mov(result, FieldOperand(result, Map::kPrototypeOffset));
961 jmp(&done);
962
963 // Non-instance prototype: Fetch prototype from constructor field
964 // in initial map.
965 bind(&non_instance);
966 mov(result, FieldOperand(result, Map::kConstructorOffset));
967
968 // All done.
969 bind(&done);
970}
971
972
973void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000974 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +0000975 call(stub->GetCode(), RelocInfo::CODE_TARGET);
976}
977
978
John Reck59135872010-11-02 12:39:01 -0700979MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000980 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700981 Object* result;
982 { MaybeObject* maybe_result = stub->TryGetCode();
983 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +0000984 }
John Reck59135872010-11-02 12:39:01 -0700985 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +0000986 return result;
987}
988
989
Steve Blockd0582a62009-12-15 09:54:21 +0000990void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000991 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +0000992 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
993}
994
995
John Reck59135872010-11-02 12:39:01 -0700996MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000997 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -0700998 Object* result;
999 { MaybeObject* maybe_result = stub->TryGetCode();
1000 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001001 }
John Reck59135872010-11-02 12:39:01 -07001002 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001003 return result;
1004}
1005
1006
Steve Blocka7e24c12009-10-30 11:49:00 +00001007void MacroAssembler::StubReturn(int argc) {
1008 ASSERT(argc >= 1 && generating_stub());
1009 ret((argc - 1) * kPointerSize);
1010}
1011
1012
1013void MacroAssembler::IllegalOperation(int num_arguments) {
1014 if (num_arguments > 0) {
1015 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1016 }
1017 mov(eax, Immediate(Factory::undefined_value()));
1018}
1019
1020
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001021void MacroAssembler::IndexFromHash(Register hash, Register index) {
1022 // The assert checks that the constants for the maximum number of digits
1023 // for an array index cached in the hash field and the number of bits
1024 // reserved for it does not conflict.
1025 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1026 (1 << String::kArrayIndexValueBits));
1027 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1028 // the low kHashShift bits.
1029 and_(hash, String::kArrayIndexValueMask);
1030 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1031 if (String::kHashShift > kSmiTagSize) {
1032 shr(hash, String::kHashShift - kSmiTagSize);
1033 }
1034 if (!index.is(hash)) {
1035 mov(index, hash);
1036 }
1037}
1038
1039
Steve Blocka7e24c12009-10-30 11:49:00 +00001040void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1041 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1042}
1043
1044
John Reck59135872010-11-02 12:39:01 -07001045MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1046 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001047 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1048}
1049
1050
Steve Blocka7e24c12009-10-30 11:49:00 +00001051void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1052 // If the expected number of arguments of the runtime function is
1053 // constant, we check that the actual number of arguments match the
1054 // expectation.
1055 if (f->nargs >= 0 && f->nargs != num_arguments) {
1056 IllegalOperation(num_arguments);
1057 return;
1058 }
1059
Leon Clarke4515c472010-02-03 11:58:03 +00001060 // TODO(1236192): Most runtime routines don't need the number of
1061 // arguments passed in because it is constant. At some point we
1062 // should remove this need and make the runtime routine entry code
1063 // smarter.
1064 Set(eax, Immediate(num_arguments));
1065 mov(ebx, Immediate(ExternalReference(f)));
1066 CEntryStub ces(1);
1067 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001068}
1069
1070
John Reck59135872010-11-02 12:39:01 -07001071MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1072 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001073 if (f->nargs >= 0 && f->nargs != num_arguments) {
1074 IllegalOperation(num_arguments);
1075 // Since we did not call the stub, there was no allocation failure.
1076 // Return some non-failure object.
1077 return Heap::undefined_value();
1078 }
1079
Leon Clarke4515c472010-02-03 11:58:03 +00001080 // TODO(1236192): Most runtime routines don't need the number of
1081 // arguments passed in because it is constant. At some point we
1082 // should remove this need and make the runtime routine entry code
1083 // smarter.
1084 Set(eax, Immediate(num_arguments));
1085 mov(ebx, Immediate(ExternalReference(f)));
1086 CEntryStub ces(1);
1087 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001088}
1089
1090
Ben Murdochbb769b22010-08-11 14:56:33 +01001091void MacroAssembler::CallExternalReference(ExternalReference ref,
1092 int num_arguments) {
1093 mov(eax, Immediate(num_arguments));
1094 mov(ebx, Immediate(ref));
1095
1096 CEntryStub stub(1);
1097 CallStub(&stub);
1098}
1099
1100
Steve Block6ded16b2010-05-10 14:33:55 +01001101void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1102 int num_arguments,
1103 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001104 // TODO(1236192): Most runtime routines don't need the number of
1105 // arguments passed in because it is constant. At some point we
1106 // should remove this need and make the runtime routine entry code
1107 // smarter.
1108 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001109 JumpToExternalReference(ext);
1110}
1111
1112
1113void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1114 int num_arguments,
1115 int result_size) {
1116 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001117}
1118
1119
John Reck59135872010-11-02 12:39:01 -07001120// If true, a Handle<T> passed by value is passed and returned by
1121// using the location_ field directly. If false, it is passed and
1122// returned as a pointer to a handle.
1123#ifdef USING_BSD_ABI
1124static const bool kPassHandlesDirectly = true;
1125#else
1126static const bool kPassHandlesDirectly = false;
1127#endif
1128
1129
1130Operand ApiParameterOperand(int index) {
1131 return Operand(esp, (index + (kPassHandlesDirectly ? 0 : 1)) * kPointerSize);
1132}
1133
1134
1135void MacroAssembler::PrepareCallApiFunction(int stack_space, int argc) {
1136 if (kPassHandlesDirectly) {
1137 EnterApiExitFrame(stack_space, argc);
1138 // When handles as passed directly we don't have to allocate extra
1139 // space for and pass an out parameter.
1140 } else {
1141 // We allocate two additional slots: return value and pointer to it.
1142 EnterApiExitFrame(stack_space, argc + 2);
1143 }
1144}
1145
1146
1147void MacroAssembler::CallApiFunctionAndReturn(ApiFunction* function, int argc) {
1148 if (!kPassHandlesDirectly) {
1149 // The argument slots are filled as follows:
1150 //
1151 // n + 1: output cell
1152 // n: arg n
1153 // ...
1154 // 1: arg1
1155 // 0: pointer to the output cell
1156 //
1157 // Note that this is one more "argument" than the function expects
1158 // so the out cell will have to be popped explicitly after returning
1159 // from the function. The out cell contains Handle.
1160 lea(eax, Operand(esp, (argc + 1) * kPointerSize)); // pointer to out cell.
1161 mov(Operand(esp, 0 * kPointerSize), eax); // output.
1162 mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0)); // out cell.
1163 }
1164
Steve Blockd0582a62009-12-15 09:54:21 +00001165 ExternalReference next_address =
1166 ExternalReference::handle_scope_next_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001167 ExternalReference limit_address =
1168 ExternalReference::handle_scope_limit_address();
John Reck59135872010-11-02 12:39:01 -07001169 ExternalReference level_address =
1170 ExternalReference::handle_scope_level_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001171
John Reck59135872010-11-02 12:39:01 -07001172 // Allocate HandleScope in callee-save registers.
1173 mov(ebx, Operand::StaticVariable(next_address));
1174 mov(edi, Operand::StaticVariable(limit_address));
1175 add(Operand::StaticVariable(level_address), Immediate(1));
Steve Blockd0582a62009-12-15 09:54:21 +00001176
John Reck59135872010-11-02 12:39:01 -07001177 // Call the api function!
1178 call(function->address(), RelocInfo::RUNTIME_ENTRY);
1179
1180 if (!kPassHandlesDirectly) {
1181 // The returned value is a pointer to the handle holding the result.
1182 // Dereference this to get to the location.
1183 mov(eax, Operand(eax, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001184 }
Steve Blockd0582a62009-12-15 09:54:21 +00001185
John Reck59135872010-11-02 12:39:01 -07001186 Label empty_handle;
1187 Label prologue;
1188 Label promote_scheduled_exception;
1189 Label delete_allocated_handles;
1190 Label leave_exit_frame;
Leon Clarkee46be812010-01-19 14:06:41 +00001191
John Reck59135872010-11-02 12:39:01 -07001192 // Check if the result handle holds 0.
1193 test(eax, Operand(eax));
1194 j(zero, &empty_handle, not_taken);
1195 // It was non-zero. Dereference to get the result value.
1196 mov(eax, Operand(eax, 0));
1197 bind(&prologue);
1198 // No more valid handles (the result handle was the last one). Restore
1199 // previous handle scope.
1200 mov(Operand::StaticVariable(next_address), ebx);
1201 sub(Operand::StaticVariable(level_address), Immediate(1));
1202 Assert(above_equal, "Invalid HandleScope level");
1203 cmp(edi, Operand::StaticVariable(limit_address));
1204 j(not_equal, &delete_allocated_handles, not_taken);
1205 bind(&leave_exit_frame);
Leon Clarkee46be812010-01-19 14:06:41 +00001206
John Reck59135872010-11-02 12:39:01 -07001207 // Check if the function scheduled an exception.
1208 ExternalReference scheduled_exception_address =
1209 ExternalReference::scheduled_exception_address();
1210 cmp(Operand::StaticVariable(scheduled_exception_address),
1211 Immediate(Factory::the_hole_value()));
1212 j(not_equal, &promote_scheduled_exception, not_taken);
1213 LeaveExitFrame();
1214 ret(0);
1215 bind(&promote_scheduled_exception);
1216 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1217 bind(&empty_handle);
1218 // It was zero; the result is undefined.
1219 mov(eax, Factory::undefined_value());
1220 jmp(&prologue);
Leon Clarkee46be812010-01-19 14:06:41 +00001221
John Reck59135872010-11-02 12:39:01 -07001222 // HandleScope limit has changed. Delete allocated extensions.
1223 bind(&delete_allocated_handles);
1224 mov(Operand::StaticVariable(limit_address), edi);
1225 mov(edi, eax);
1226 mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions()));
1227 call(Operand(eax));
1228 mov(eax, edi);
1229 jmp(&leave_exit_frame);
Steve Blockd0582a62009-12-15 09:54:21 +00001230}
1231
1232
Steve Block6ded16b2010-05-10 14:33:55 +01001233void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001234 // Set the entry point and jump to the C entry runtime stub.
1235 mov(ebx, Immediate(ext));
1236 CEntryStub ces(1);
1237 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1238}
1239
1240
1241void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1242 const ParameterCount& actual,
1243 Handle<Code> code_constant,
1244 const Operand& code_operand,
1245 Label* done,
1246 InvokeFlag flag) {
1247 bool definitely_matches = false;
1248 Label invoke;
1249 if (expected.is_immediate()) {
1250 ASSERT(actual.is_immediate());
1251 if (expected.immediate() == actual.immediate()) {
1252 definitely_matches = true;
1253 } else {
1254 mov(eax, actual.immediate());
1255 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1256 if (expected.immediate() == sentinel) {
1257 // Don't worry about adapting arguments for builtins that
1258 // don't want that done. Skip adaption code by making it look
1259 // like we have a match between expected and actual number of
1260 // arguments.
1261 definitely_matches = true;
1262 } else {
1263 mov(ebx, expected.immediate());
1264 }
1265 }
1266 } else {
1267 if (actual.is_immediate()) {
1268 // Expected is in register, actual is immediate. This is the
1269 // case when we invoke function values without going through the
1270 // IC mechanism.
1271 cmp(expected.reg(), actual.immediate());
1272 j(equal, &invoke);
1273 ASSERT(expected.reg().is(ebx));
1274 mov(eax, actual.immediate());
1275 } else if (!expected.reg().is(actual.reg())) {
1276 // Both expected and actual are in (different) registers. This
1277 // is the case when we invoke functions using call and apply.
1278 cmp(expected.reg(), Operand(actual.reg()));
1279 j(equal, &invoke);
1280 ASSERT(actual.reg().is(eax));
1281 ASSERT(expected.reg().is(ebx));
1282 }
1283 }
1284
1285 if (!definitely_matches) {
1286 Handle<Code> adaptor =
1287 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1288 if (!code_constant.is_null()) {
1289 mov(edx, Immediate(code_constant));
1290 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1291 } else if (!code_operand.is_reg(edx)) {
1292 mov(edx, code_operand);
1293 }
1294
1295 if (flag == CALL_FUNCTION) {
1296 call(adaptor, RelocInfo::CODE_TARGET);
1297 jmp(done);
1298 } else {
1299 jmp(adaptor, RelocInfo::CODE_TARGET);
1300 }
1301 bind(&invoke);
1302 }
1303}
1304
1305
1306void MacroAssembler::InvokeCode(const Operand& code,
1307 const ParameterCount& expected,
1308 const ParameterCount& actual,
1309 InvokeFlag flag) {
1310 Label done;
1311 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1312 if (flag == CALL_FUNCTION) {
1313 call(code);
1314 } else {
1315 ASSERT(flag == JUMP_FUNCTION);
1316 jmp(code);
1317 }
1318 bind(&done);
1319}
1320
1321
1322void MacroAssembler::InvokeCode(Handle<Code> code,
1323 const ParameterCount& expected,
1324 const ParameterCount& actual,
1325 RelocInfo::Mode rmode,
1326 InvokeFlag flag) {
1327 Label done;
1328 Operand dummy(eax);
1329 InvokePrologue(expected, actual, code, dummy, &done, flag);
1330 if (flag == CALL_FUNCTION) {
1331 call(code, rmode);
1332 } else {
1333 ASSERT(flag == JUMP_FUNCTION);
1334 jmp(code, rmode);
1335 }
1336 bind(&done);
1337}
1338
1339
1340void MacroAssembler::InvokeFunction(Register fun,
1341 const ParameterCount& actual,
1342 InvokeFlag flag) {
1343 ASSERT(fun.is(edi));
1344 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1345 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1346 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001347 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001348
1349 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001350 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1351 expected, actual, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001352}
1353
1354
Andrei Popescu402d9372010-02-26 13:31:12 +00001355void MacroAssembler::InvokeFunction(JSFunction* function,
1356 const ParameterCount& actual,
1357 InvokeFlag flag) {
1358 ASSERT(function->is_compiled());
1359 // Get the function and setup the context.
1360 mov(edi, Immediate(Handle<JSFunction>(function)));
1361 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +00001362 // Invoke the cached code.
1363 Handle<Code> code(function->code());
1364 ParameterCount expected(function->shared()->formal_parameter_count());
1365 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1366}
1367
1368
1369void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001370 // Calls are not allowed in some stubs.
1371 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1372
1373 // Rely on the assertion to check that the number of provided
1374 // arguments match the expected number of arguments. Fake a
1375 // parameter count to avoid emitting code to do the check.
1376 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001377 GetBuiltinFunction(edi, id);
1378 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1379 expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001380}
1381
Steve Block791712a2010-08-27 10:21:07 +01001382void MacroAssembler::GetBuiltinFunction(Register target,
1383 Builtins::JavaScript id) {
1384 // Load the JavaScript builtin function from the builtins object.
1385 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1386 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1387 mov(target, FieldOperand(target,
1388 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1389}
Steve Blocka7e24c12009-10-30 11:49:00 +00001390
1391void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001392 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001393 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001394 GetBuiltinFunction(edi, id);
1395 // Load the code entry point from the function into the target register.
1396 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001397}
1398
1399
Steve Blockd0582a62009-12-15 09:54:21 +00001400void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1401 if (context_chain_length > 0) {
1402 // Move up the chain of contexts to the context containing the slot.
1403 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1404 // Load the function context (which is the incoming, outer context).
1405 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1406 for (int i = 1; i < context_chain_length; i++) {
1407 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1408 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1409 }
1410 // The context may be an intermediate context, not a function context.
1411 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1412 } else { // Slot is in the current function context.
1413 // The context may be an intermediate context, not a function context.
1414 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1415 }
1416}
1417
1418
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001419void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1420 // Load the global or builtins object from the current context.
1421 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1422 // Load the global context from the global or builtins object.
1423 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1424 // Load the function from the global context.
1425 mov(function, Operand(function, Context::SlotOffset(index)));
1426}
1427
1428
1429void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1430 Register map) {
1431 // Load the initial map. The global functions all have initial maps.
1432 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1433 if (FLAG_debug_code) {
1434 Label ok, fail;
1435 CheckMap(map, Factory::meta_map(), &fail, false);
1436 jmp(&ok);
1437 bind(&fail);
1438 Abort("Global functions must have initial map");
1439 bind(&ok);
1440 }
1441}
1442
Steve Blockd0582a62009-12-15 09:54:21 +00001443
Steve Blocka7e24c12009-10-30 11:49:00 +00001444void MacroAssembler::Ret() {
1445 ret(0);
1446}
1447
1448
Leon Clarkee46be812010-01-19 14:06:41 +00001449void MacroAssembler::Drop(int stack_elements) {
1450 if (stack_elements > 0) {
1451 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1452 }
1453}
1454
1455
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001456void MacroAssembler::Move(Register dst, Register src) {
1457 if (!dst.is(src)) {
1458 mov(dst, src);
1459 }
1460}
1461
1462
Leon Clarkee46be812010-01-19 14:06:41 +00001463void MacroAssembler::Move(Register dst, Handle<Object> value) {
1464 mov(dst, value);
1465}
1466
1467
Steve Blocka7e24c12009-10-30 11:49:00 +00001468void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1469 if (FLAG_native_code_counters && counter->Enabled()) {
1470 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1471 }
1472}
1473
1474
1475void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1476 ASSERT(value > 0);
1477 if (FLAG_native_code_counters && counter->Enabled()) {
1478 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1479 if (value == 1) {
1480 inc(operand);
1481 } else {
1482 add(operand, Immediate(value));
1483 }
1484 }
1485}
1486
1487
1488void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1489 ASSERT(value > 0);
1490 if (FLAG_native_code_counters && counter->Enabled()) {
1491 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1492 if (value == 1) {
1493 dec(operand);
1494 } else {
1495 sub(operand, Immediate(value));
1496 }
1497 }
1498}
1499
1500
Leon Clarked91b9f72010-01-27 17:25:45 +00001501void MacroAssembler::IncrementCounter(Condition cc,
1502 StatsCounter* counter,
1503 int value) {
1504 ASSERT(value > 0);
1505 if (FLAG_native_code_counters && counter->Enabled()) {
1506 Label skip;
1507 j(NegateCondition(cc), &skip);
1508 pushfd();
1509 IncrementCounter(counter, value);
1510 popfd();
1511 bind(&skip);
1512 }
1513}
1514
1515
1516void MacroAssembler::DecrementCounter(Condition cc,
1517 StatsCounter* counter,
1518 int value) {
1519 ASSERT(value > 0);
1520 if (FLAG_native_code_counters && counter->Enabled()) {
1521 Label skip;
1522 j(NegateCondition(cc), &skip);
1523 pushfd();
1524 DecrementCounter(counter, value);
1525 popfd();
1526 bind(&skip);
1527 }
1528}
1529
1530
Steve Blocka7e24c12009-10-30 11:49:00 +00001531void MacroAssembler::Assert(Condition cc, const char* msg) {
1532 if (FLAG_debug_code) Check(cc, msg);
1533}
1534
1535
Iain Merrick75681382010-08-19 15:07:18 +01001536void MacroAssembler::AssertFastElements(Register elements) {
1537 if (FLAG_debug_code) {
1538 Label ok;
1539 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1540 Immediate(Factory::fixed_array_map()));
1541 j(equal, &ok);
1542 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1543 Immediate(Factory::fixed_cow_array_map()));
1544 j(equal, &ok);
1545 Abort("JSObject with fast elements map has slow elements");
1546 bind(&ok);
1547 }
1548}
1549
1550
Steve Blocka7e24c12009-10-30 11:49:00 +00001551void MacroAssembler::Check(Condition cc, const char* msg) {
1552 Label L;
1553 j(cc, &L, taken);
1554 Abort(msg);
1555 // will not return here
1556 bind(&L);
1557}
1558
1559
Steve Block6ded16b2010-05-10 14:33:55 +01001560void MacroAssembler::CheckStackAlignment() {
1561 int frame_alignment = OS::ActivationFrameAlignment();
1562 int frame_alignment_mask = frame_alignment - 1;
1563 if (frame_alignment > kPointerSize) {
1564 ASSERT(IsPowerOf2(frame_alignment));
1565 Label alignment_as_expected;
1566 test(esp, Immediate(frame_alignment_mask));
1567 j(zero, &alignment_as_expected);
1568 // Abort if stack is not aligned.
1569 int3();
1570 bind(&alignment_as_expected);
1571 }
1572}
1573
1574
Steve Blocka7e24c12009-10-30 11:49:00 +00001575void MacroAssembler::Abort(const char* msg) {
1576 // We want to pass the msg string like a smi to avoid GC
1577 // problems, however msg is not guaranteed to be aligned
1578 // properly. Instead, we pass an aligned pointer that is
1579 // a proper v8 smi, but also pass the alignment difference
1580 // from the real pointer as a smi.
1581 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1582 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1583 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1584#ifdef DEBUG
1585 if (msg != NULL) {
1586 RecordComment("Abort message: ");
1587 RecordComment(msg);
1588 }
1589#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001590 // Disable stub call restrictions to always allow calls to abort.
1591 set_allow_stub_calls(true);
1592
Steve Blocka7e24c12009-10-30 11:49:00 +00001593 push(eax);
1594 push(Immediate(p0));
1595 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1596 CallRuntime(Runtime::kAbort, 2);
1597 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001598 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001599}
1600
1601
Iain Merrick75681382010-08-19 15:07:18 +01001602void MacroAssembler::JumpIfNotNumber(Register reg,
1603 TypeInfo info,
1604 Label* on_not_number) {
1605 if (FLAG_debug_code) AbortIfSmi(reg);
1606 if (!info.IsNumber()) {
1607 cmp(FieldOperand(reg, HeapObject::kMapOffset),
1608 Factory::heap_number_map());
1609 j(not_equal, on_not_number);
1610 }
1611}
1612
1613
1614void MacroAssembler::ConvertToInt32(Register dst,
1615 Register source,
1616 Register scratch,
1617 TypeInfo info,
1618 Label* on_not_int32) {
1619 if (FLAG_debug_code) {
1620 AbortIfSmi(source);
1621 AbortIfNotNumber(source);
1622 }
1623 if (info.IsInteger32()) {
1624 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1625 } else {
1626 Label done;
1627 bool push_pop = (scratch.is(no_reg) && dst.is(source));
1628 ASSERT(!scratch.is(source));
1629 if (push_pop) {
1630 push(dst);
1631 scratch = dst;
1632 }
1633 if (scratch.is(no_reg)) scratch = dst;
1634 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1635 cmp(scratch, 0x80000000u);
1636 if (push_pop) {
1637 j(not_equal, &done);
1638 pop(dst);
1639 jmp(on_not_int32);
1640 } else {
1641 j(equal, on_not_int32);
1642 }
1643
1644 bind(&done);
1645 if (push_pop) {
1646 add(Operand(esp), Immediate(kPointerSize)); // Pop.
1647 }
1648 if (!scratch.is(dst)) {
1649 mov(dst, scratch);
1650 }
1651 }
1652}
1653
1654
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001655void MacroAssembler::LoadPowerOf2(XMMRegister dst,
1656 Register scratch,
1657 int power) {
1658 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
1659 HeapNumber::kExponentBits));
1660 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
1661 movd(dst, Operand(scratch));
1662 psllq(dst, HeapNumber::kMantissaBits);
1663}
1664
1665
Andrei Popescu402d9372010-02-26 13:31:12 +00001666void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1667 Register instance_type,
1668 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001669 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001670 if (!scratch.is(instance_type)) {
1671 mov(scratch, instance_type);
1672 }
1673 and_(scratch,
1674 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1675 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1676 j(not_equal, failure);
1677}
1678
1679
Leon Clarked91b9f72010-01-27 17:25:45 +00001680void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1681 Register object2,
1682 Register scratch1,
1683 Register scratch2,
1684 Label* failure) {
1685 // Check that both objects are not smis.
1686 ASSERT_EQ(0, kSmiTag);
1687 mov(scratch1, Operand(object1));
1688 and_(scratch1, Operand(object2));
1689 test(scratch1, Immediate(kSmiTagMask));
1690 j(zero, failure);
1691
1692 // Load instance type for both strings.
1693 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1694 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1695 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1696 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1697
1698 // Check that both are flat ascii strings.
1699 const int kFlatAsciiStringMask =
1700 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1701 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1702 // Interleave bits from both instance types and compare them in one check.
1703 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1704 and_(scratch1, kFlatAsciiStringMask);
1705 and_(scratch2, kFlatAsciiStringMask);
1706 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1707 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1708 j(not_equal, failure);
1709}
1710
1711
Steve Block6ded16b2010-05-10 14:33:55 +01001712void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1713 int frameAlignment = OS::ActivationFrameAlignment();
1714 if (frameAlignment != 0) {
1715 // Make stack end at alignment and make room for num_arguments words
1716 // and the original value of esp.
1717 mov(scratch, esp);
1718 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1719 ASSERT(IsPowerOf2(frameAlignment));
1720 and_(esp, -frameAlignment);
1721 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1722 } else {
1723 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1724 }
1725}
1726
1727
1728void MacroAssembler::CallCFunction(ExternalReference function,
1729 int num_arguments) {
1730 // Trashing eax is ok as it will be the return value.
1731 mov(Operand(eax), Immediate(function));
1732 CallCFunction(eax, num_arguments);
1733}
1734
1735
1736void MacroAssembler::CallCFunction(Register function,
1737 int num_arguments) {
1738 // Check stack alignment.
1739 if (FLAG_debug_code) {
1740 CheckStackAlignment();
1741 }
1742
1743 call(Operand(function));
1744 if (OS::ActivationFrameAlignment() != 0) {
1745 mov(esp, Operand(esp, num_arguments * kPointerSize));
1746 } else {
1747 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1748 }
1749}
1750
1751
Steve Blocka7e24c12009-10-30 11:49:00 +00001752CodePatcher::CodePatcher(byte* address, int size)
1753 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1754 // Create a new macro assembler pointing to the address of the code to patch.
1755 // The size is adjusted with kGap on order for the assembler to generate size
1756 // bytes of instructions without failing with buffer size constraints.
1757 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1758}
1759
1760
1761CodePatcher::~CodePatcher() {
1762 // Indicate that code has changed.
1763 CPU::FlushICache(address_, size_);
1764
1765 // Check that the code was patched as expected.
1766 ASSERT(masm_.pc_ == address_ + size_);
1767 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1768}
1769
1770
1771} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001772
1773#endif // V8_TARGET_ARCH_IA32