blob: c215142409b16997e464b0c2ae9fb500b69897cb [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100101void MacroAssembler::RecordWrite(Register object,
102 int offset,
103 Register value,
104 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000105 // The compiled code assumes that record write doesn't change the
106 // context register, so we check that none of the clobbered
107 // registers are esi.
108 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
109
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100110 // First, check if a write barrier is even needed. The tests below
111 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 Label done;
113
114 // Skip barrier if writing a smi.
115 ASSERT_EQ(0, kSmiTag);
116 test(value, Immediate(kSmiTagMask));
117 j(zero, &done);
118
Steve Block6ded16b2010-05-10 14:33:55 +0100119 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000120
Steve Block6ded16b2010-05-10 14:33:55 +0100121 // The offset is relative to a tagged or untagged HeapObject pointer,
122 // so either offset or offset + kHeapObjectTag must be a
123 // multiple of kPointerSize.
124 ASSERT(IsAligned(offset, kPointerSize) ||
125 IsAligned(offset + kHeapObjectTag, kPointerSize));
126
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100127 Register dst = scratch;
128 if (offset != 0) {
129 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100131 // Array access: calculate the destination address in the same manner as
132 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
133 // into an array of words.
134 ASSERT_EQ(1, kSmiTagSize);
135 ASSERT_EQ(0, kSmiTag);
136 lea(dst, Operand(object, dst, times_half_pointer_size,
137 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000140
141 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000142
143 // Clobber all input registers when running with the debug-code flag
144 // turned on to provoke errors.
145 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100146 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
147 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
148 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000149 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block8defd9f2010-07-08 12:39:36 +0100153void MacroAssembler::RecordWrite(Register object,
154 Register address,
155 Register value) {
156 // The compiled code assumes that record write doesn't change the
157 // context register, so we check that none of the clobbered
158 // registers are esi.
159 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
160
161 // First, check if a write barrier is even needed. The tests below
162 // catch stores of Smis and stores into young gen.
163 Label done;
164
165 // Skip barrier if writing a smi.
166 ASSERT_EQ(0, kSmiTag);
167 test(value, Immediate(kSmiTagMask));
168 j(zero, &done);
169
170 InNewSpace(object, value, equal, &done);
171
172 RecordWriteHelper(object, address, value);
173
174 bind(&done);
175
176 // Clobber all input registers when running with the debug-code flag
177 // turned on to provoke errors.
178 if (FLAG_debug_code) {
179 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
180 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
181 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
182 }
183}
184
185
Steve Blockd0582a62009-12-15 09:54:21 +0000186void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
187 cmp(esp,
188 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
189 j(below, on_stack_overflow);
190}
191
192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000194void MacroAssembler::DebugBreak() {
195 Set(eax, Immediate(0));
196 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
197 CEntryStub ces(1);
198 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
199}
Steve Blocka7e24c12009-10-30 11:49:00 +0000200#endif
201
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100202
Steve Blocka7e24c12009-10-30 11:49:00 +0000203void MacroAssembler::Set(Register dst, const Immediate& x) {
204 if (x.is_zero()) {
205 xor_(dst, Operand(dst)); // shorter than mov
206 } else {
207 mov(dst, x);
208 }
209}
210
211
212void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
213 mov(dst, x);
214}
215
216
217void MacroAssembler::CmpObjectType(Register heap_object,
218 InstanceType type,
219 Register map) {
220 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
221 CmpInstanceType(map, type);
222}
223
224
225void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
226 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
227 static_cast<int8_t>(type));
228}
229
230
Andrei Popescu31002712010-02-23 13:46:05 +0000231void MacroAssembler::CheckMap(Register obj,
232 Handle<Map> map,
233 Label* fail,
234 bool is_heap_object) {
235 if (!is_heap_object) {
236 test(obj, Immediate(kSmiTagMask));
237 j(zero, fail);
238 }
239 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
240 j(not_equal, fail);
241}
242
243
Leon Clarkee46be812010-01-19 14:06:41 +0000244Condition MacroAssembler::IsObjectStringType(Register heap_object,
245 Register map,
246 Register instance_type) {
247 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
248 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
249 ASSERT(kNotStringTag != 0);
250 test(instance_type, Immediate(kIsNotStringMask));
251 return zero;
252}
253
254
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100255void MacroAssembler::IsObjectJSObjectType(Register heap_object,
256 Register map,
257 Register scratch,
258 Label* fail) {
259 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
260 IsInstanceJSObjectType(map, scratch, fail);
261}
262
263
264void MacroAssembler::IsInstanceJSObjectType(Register map,
265 Register scratch,
266 Label* fail) {
267 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
268 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
269 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
270 j(above, fail);
271}
272
273
Steve Blocka7e24c12009-10-30 11:49:00 +0000274void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000275 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000276 fucomip();
277 ffree(0);
278 fincstp();
279 } else {
280 fucompp();
281 push(eax);
282 fnstsw_ax();
283 sahf();
284 pop(eax);
285 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000286}
287
288
Steve Block6ded16b2010-05-10 14:33:55 +0100289void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000290 Label ok;
291 test(object, Immediate(kSmiTagMask));
292 j(zero, &ok);
293 cmp(FieldOperand(object, HeapObject::kMapOffset),
294 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100295 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000296 bind(&ok);
297}
298
299
Steve Block6ded16b2010-05-10 14:33:55 +0100300void MacroAssembler::AbortIfNotSmi(Register object) {
301 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100302 Assert(equal, "Operand is not a smi");
303}
304
305
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100306void MacroAssembler::AbortIfNotString(Register object) {
307 test(object, Immediate(kSmiTagMask));
308 Assert(not_equal, "Operand is not a string");
309 push(object);
310 mov(object, FieldOperand(object, HeapObject::kMapOffset));
311 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
312 pop(object);
313 Assert(below, "Operand is not a string");
314}
315
316
Iain Merrick75681382010-08-19 15:07:18 +0100317void MacroAssembler::AbortIfSmi(Register object) {
318 test(object, Immediate(kSmiTagMask));
319 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100320}
321
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323void MacroAssembler::EnterFrame(StackFrame::Type type) {
324 push(ebp);
325 mov(ebp, Operand(esp));
326 push(esi);
327 push(Immediate(Smi::FromInt(type)));
328 push(Immediate(CodeObject()));
329 if (FLAG_debug_code) {
330 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
331 Check(not_equal, "code object not properly patched");
332 }
333}
334
335
336void MacroAssembler::LeaveFrame(StackFrame::Type type) {
337 if (FLAG_debug_code) {
338 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
339 Immediate(Smi::FromInt(type)));
340 Check(equal, "stack frame types must match");
341 }
342 leave();
343}
344
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100345
346void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000347 // Setup the frame structure on the stack.
348 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
349 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
350 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
351 push(ebp);
352 mov(ebp, Operand(esp));
353
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100354 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000356 push(Immediate(0)); // Saved entry sp, patched before call.
357 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000358
359 // Save the frame pointer and the context in top.
360 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
361 ExternalReference context_address(Top::k_context_address);
362 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
363 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000364}
Steve Blocka7e24c12009-10-30 11:49:00 +0000365
Steve Blocka7e24c12009-10-30 11:49:00 +0000366
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100367void MacroAssembler::EnterExitFrameEpilogue(int argc) {
Steve Blockd0582a62009-12-15 09:54:21 +0000368 // Reserve space for arguments.
369 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000370
371 // Get the required frame alignment for the OS.
372 static const int kFrameAlignment = OS::ActivationFrameAlignment();
373 if (kFrameAlignment > 0) {
374 ASSERT(IsPowerOf2(kFrameAlignment));
375 and_(esp, -kFrameAlignment);
376 }
377
378 // Patch the saved entry sp.
379 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
380}
381
382
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100383void MacroAssembler::EnterExitFrame() {
384 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000385
386 // Setup argc and argv in callee-saved registers.
387 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
388 mov(edi, Operand(eax));
389 lea(esi, Operand(ebp, eax, times_4, offset));
390
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100391 EnterExitFrameEpilogue(2);
Steve Blockd0582a62009-12-15 09:54:21 +0000392}
393
394
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100395void MacroAssembler::EnterApiExitFrame(int stack_space,
Steve Blockd0582a62009-12-15 09:54:21 +0000396 int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100397 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000398
399 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
400 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
401
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100402 EnterExitFrameEpilogue(argc);
Steve Blockd0582a62009-12-15 09:54:21 +0000403}
404
405
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100406void MacroAssembler::LeaveExitFrame() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 // Get the return address from the stack and restore the frame pointer.
408 mov(ecx, Operand(ebp, 1 * kPointerSize));
409 mov(ebp, Operand(ebp, 0 * kPointerSize));
410
411 // Pop the arguments and the receiver from the caller stack.
412 lea(esp, Operand(esi, 1 * kPointerSize));
413
414 // Restore current context from top and clear it in debug mode.
415 ExternalReference context_address(Top::k_context_address);
416 mov(esi, Operand::StaticVariable(context_address));
417#ifdef DEBUG
418 mov(Operand::StaticVariable(context_address), Immediate(0));
419#endif
420
421 // Push the return address to get ready to return.
422 push(ecx);
423
424 // Clear the top frame.
425 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
426 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
427}
428
429
430void MacroAssembler::PushTryHandler(CodeLocation try_location,
431 HandlerType type) {
432 // Adjust this code if not the case.
433 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
434 // The pc (return address) is already on TOS.
435 if (try_location == IN_JAVASCRIPT) {
436 if (type == TRY_CATCH_HANDLER) {
437 push(Immediate(StackHandler::TRY_CATCH));
438 } else {
439 push(Immediate(StackHandler::TRY_FINALLY));
440 }
441 push(ebp);
442 } else {
443 ASSERT(try_location == IN_JS_ENTRY);
444 // The frame pointer does not point to a JS frame so we save NULL
445 // for ebp. We expect the code throwing an exception to check ebp
446 // before dereferencing it to restore the context.
447 push(Immediate(StackHandler::ENTRY));
448 push(Immediate(0)); // NULL frame pointer.
449 }
450 // Save the current handler as the next handler.
451 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
452 // Link this handler as the new current one.
453 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
454}
455
456
Leon Clarkee46be812010-01-19 14:06:41 +0000457void MacroAssembler::PopTryHandler() {
458 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
459 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
460 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
461}
462
463
Steve Blocka7e24c12009-10-30 11:49:00 +0000464void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
465 Register scratch,
466 Label* miss) {
467 Label same_contexts;
468
469 ASSERT(!holder_reg.is(scratch));
470
471 // Load current lexical context from the stack frame.
472 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
473
474 // When generating debug code, make sure the lexical context is set.
475 if (FLAG_debug_code) {
476 cmp(Operand(scratch), Immediate(0));
477 Check(not_equal, "we should not have an empty lexical context");
478 }
479 // Load the global context of the current context.
480 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
481 mov(scratch, FieldOperand(scratch, offset));
482 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
483
484 // Check the context is a global context.
485 if (FLAG_debug_code) {
486 push(scratch);
487 // Read the first word and compare to global_context_map.
488 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
489 cmp(scratch, Factory::global_context_map());
490 Check(equal, "JSGlobalObject::global_context should be a global context.");
491 pop(scratch);
492 }
493
494 // Check if both contexts are the same.
495 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
496 j(equal, &same_contexts, taken);
497
498 // Compare security tokens, save holder_reg on the stack so we can use it
499 // as a temporary register.
500 //
501 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
502 push(holder_reg);
503 // Check that the security token in the calling global object is
504 // compatible with the security token in the receiving global
505 // object.
506 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
507
508 // Check the context is a global context.
509 if (FLAG_debug_code) {
510 cmp(holder_reg, Factory::null_value());
511 Check(not_equal, "JSGlobalProxy::context() should not be null.");
512
513 push(holder_reg);
514 // Read the first word and compare to global_context_map(),
515 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
516 cmp(holder_reg, Factory::global_context_map());
517 Check(equal, "JSGlobalObject::global_context should be a global context.");
518 pop(holder_reg);
519 }
520
521 int token_offset = Context::kHeaderSize +
522 Context::SECURITY_TOKEN_INDEX * kPointerSize;
523 mov(scratch, FieldOperand(scratch, token_offset));
524 cmp(scratch, FieldOperand(holder_reg, token_offset));
525 pop(holder_reg);
526 j(not_equal, miss, not_taken);
527
528 bind(&same_contexts);
529}
530
531
532void MacroAssembler::LoadAllocationTopHelper(Register result,
533 Register result_end,
534 Register scratch,
535 AllocationFlags flags) {
536 ExternalReference new_space_allocation_top =
537 ExternalReference::new_space_allocation_top_address();
538
539 // Just return if allocation top is already known.
540 if ((flags & RESULT_CONTAINS_TOP) != 0) {
541 // No use of scratch if allocation top is provided.
542 ASSERT(scratch.is(no_reg));
543#ifdef DEBUG
544 // Assert that result actually contains top on entry.
545 cmp(result, Operand::StaticVariable(new_space_allocation_top));
546 Check(equal, "Unexpected allocation top");
547#endif
548 return;
549 }
550
551 // Move address of new object to result. Use scratch register if available.
552 if (scratch.is(no_reg)) {
553 mov(result, Operand::StaticVariable(new_space_allocation_top));
554 } else {
555 ASSERT(!scratch.is(result_end));
556 mov(Operand(scratch), Immediate(new_space_allocation_top));
557 mov(result, Operand(scratch, 0));
558 }
559}
560
561
562void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
563 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000564 if (FLAG_debug_code) {
565 test(result_end, Immediate(kObjectAlignmentMask));
566 Check(zero, "Unaligned allocation in new space");
567 }
568
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 ExternalReference new_space_allocation_top =
570 ExternalReference::new_space_allocation_top_address();
571
572 // Update new top. Use scratch if available.
573 if (scratch.is(no_reg)) {
574 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
575 } else {
576 mov(Operand(scratch, 0), result_end);
577 }
578}
579
580
581void MacroAssembler::AllocateInNewSpace(int object_size,
582 Register result,
583 Register result_end,
584 Register scratch,
585 Label* gc_required,
586 AllocationFlags flags) {
587 ASSERT(!result.is(result_end));
588
589 // Load address of new object into result.
590 LoadAllocationTopHelper(result, result_end, scratch, flags);
591
Ben Murdochbb769b22010-08-11 14:56:33 +0100592 Register top_reg = result_end.is_valid() ? result_end : result;
593
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 // Calculate new top and bail out if new space is exhausted.
595 ExternalReference new_space_allocation_limit =
596 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100597
598 if (top_reg.is(result)) {
599 add(Operand(top_reg), Immediate(object_size));
600 } else {
601 lea(top_reg, Operand(result, object_size));
602 }
603 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000604 j(above, gc_required, not_taken);
605
Leon Clarkee46be812010-01-19 14:06:41 +0000606 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100607 UpdateAllocationTopHelper(top_reg, scratch);
608
609 // Tag result if requested.
610 if (top_reg.is(result)) {
611 if ((flags & TAG_OBJECT) != 0) {
612 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
613 } else {
614 sub(Operand(result), Immediate(object_size));
615 }
616 } else if ((flags & TAG_OBJECT) != 0) {
617 add(Operand(result), Immediate(kHeapObjectTag));
618 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000619}
620
621
622void MacroAssembler::AllocateInNewSpace(int header_size,
623 ScaleFactor element_size,
624 Register element_count,
625 Register result,
626 Register result_end,
627 Register scratch,
628 Label* gc_required,
629 AllocationFlags flags) {
630 ASSERT(!result.is(result_end));
631
632 // Load address of new object into result.
633 LoadAllocationTopHelper(result, result_end, scratch, flags);
634
635 // Calculate new top and bail out if new space is exhausted.
636 ExternalReference new_space_allocation_limit =
637 ExternalReference::new_space_allocation_limit_address();
638 lea(result_end, Operand(result, element_count, element_size, header_size));
639 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
640 j(above, gc_required);
641
Steve Blocka7e24c12009-10-30 11:49:00 +0000642 // Tag result if requested.
643 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000644 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 }
Leon Clarkee46be812010-01-19 14:06:41 +0000646
647 // Update allocation top.
648 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000649}
650
651
652void MacroAssembler::AllocateInNewSpace(Register object_size,
653 Register result,
654 Register result_end,
655 Register scratch,
656 Label* gc_required,
657 AllocationFlags flags) {
658 ASSERT(!result.is(result_end));
659
660 // Load address of new object into result.
661 LoadAllocationTopHelper(result, result_end, scratch, flags);
662
663 // Calculate new top and bail out if new space is exhausted.
664 ExternalReference new_space_allocation_limit =
665 ExternalReference::new_space_allocation_limit_address();
666 if (!object_size.is(result_end)) {
667 mov(result_end, object_size);
668 }
669 add(result_end, Operand(result));
670 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
671 j(above, gc_required, not_taken);
672
Steve Blocka7e24c12009-10-30 11:49:00 +0000673 // Tag result if requested.
674 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000675 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000676 }
Leon Clarkee46be812010-01-19 14:06:41 +0000677
678 // Update allocation top.
679 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000680}
681
682
683void MacroAssembler::UndoAllocationInNewSpace(Register object) {
684 ExternalReference new_space_allocation_top =
685 ExternalReference::new_space_allocation_top_address();
686
687 // Make sure the object has no tag before resetting top.
688 and_(Operand(object), Immediate(~kHeapObjectTagMask));
689#ifdef DEBUG
690 cmp(object, Operand::StaticVariable(new_space_allocation_top));
691 Check(below, "Undo allocation of non allocated memory");
692#endif
693 mov(Operand::StaticVariable(new_space_allocation_top), object);
694}
695
696
Steve Block3ce2e202009-11-05 08:53:23 +0000697void MacroAssembler::AllocateHeapNumber(Register result,
698 Register scratch1,
699 Register scratch2,
700 Label* gc_required) {
701 // Allocate heap number in new space.
702 AllocateInNewSpace(HeapNumber::kSize,
703 result,
704 scratch1,
705 scratch2,
706 gc_required,
707 TAG_OBJECT);
708
709 // Set the map.
710 mov(FieldOperand(result, HeapObject::kMapOffset),
711 Immediate(Factory::heap_number_map()));
712}
713
714
Steve Blockd0582a62009-12-15 09:54:21 +0000715void MacroAssembler::AllocateTwoByteString(Register result,
716 Register length,
717 Register scratch1,
718 Register scratch2,
719 Register scratch3,
720 Label* gc_required) {
721 // Calculate the number of bytes needed for the characters in the string while
722 // observing object alignment.
723 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000724 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000725 // scratch1 = length * 2 + kObjectAlignmentMask.
726 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000727 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
728
729 // Allocate two byte string in new space.
730 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
731 times_1,
732 scratch1,
733 result,
734 scratch2,
735 scratch3,
736 gc_required,
737 TAG_OBJECT);
738
739 // Set the map, length and hash field.
740 mov(FieldOperand(result, HeapObject::kMapOffset),
741 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100742 mov(scratch1, length);
743 SmiTag(scratch1);
744 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000745 mov(FieldOperand(result, String::kHashFieldOffset),
746 Immediate(String::kEmptyHashField));
747}
748
749
750void MacroAssembler::AllocateAsciiString(Register result,
751 Register length,
752 Register scratch1,
753 Register scratch2,
754 Register scratch3,
755 Label* gc_required) {
756 // Calculate the number of bytes needed for the characters in the string while
757 // observing object alignment.
758 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
759 mov(scratch1, length);
760 ASSERT(kCharSize == 1);
761 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
762 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
763
764 // Allocate ascii string in new space.
765 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
766 times_1,
767 scratch1,
768 result,
769 scratch2,
770 scratch3,
771 gc_required,
772 TAG_OBJECT);
773
774 // Set the map, length and hash field.
775 mov(FieldOperand(result, HeapObject::kMapOffset),
776 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100777 mov(scratch1, length);
778 SmiTag(scratch1);
779 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000780 mov(FieldOperand(result, String::kHashFieldOffset),
781 Immediate(String::kEmptyHashField));
782}
783
784
785void MacroAssembler::AllocateConsString(Register result,
786 Register scratch1,
787 Register scratch2,
788 Label* gc_required) {
789 // Allocate heap number in new space.
790 AllocateInNewSpace(ConsString::kSize,
791 result,
792 scratch1,
793 scratch2,
794 gc_required,
795 TAG_OBJECT);
796
797 // Set the map. The other fields are left uninitialized.
798 mov(FieldOperand(result, HeapObject::kMapOffset),
799 Immediate(Factory::cons_string_map()));
800}
801
802
803void MacroAssembler::AllocateAsciiConsString(Register result,
804 Register scratch1,
805 Register scratch2,
806 Label* gc_required) {
807 // Allocate heap number in new space.
808 AllocateInNewSpace(ConsString::kSize,
809 result,
810 scratch1,
811 scratch2,
812 gc_required,
813 TAG_OBJECT);
814
815 // Set the map. The other fields are left uninitialized.
816 mov(FieldOperand(result, HeapObject::kMapOffset),
817 Immediate(Factory::cons_ascii_string_map()));
818}
819
820
Steve Blocka7e24c12009-10-30 11:49:00 +0000821void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
822 Register result,
823 Register op,
824 JumpTarget* then_target) {
825 JumpTarget ok;
826 test(result, Operand(result));
827 ok.Branch(not_zero, taken);
828 test(op, Operand(op));
829 then_target->Branch(sign, not_taken);
830 ok.Bind();
831}
832
833
834void MacroAssembler::NegativeZeroTest(Register result,
835 Register op,
836 Label* then_label) {
837 Label ok;
838 test(result, Operand(result));
839 j(not_zero, &ok, taken);
840 test(op, Operand(op));
841 j(sign, then_label, not_taken);
842 bind(&ok);
843}
844
845
846void MacroAssembler::NegativeZeroTest(Register result,
847 Register op1,
848 Register op2,
849 Register scratch,
850 Label* then_label) {
851 Label ok;
852 test(result, Operand(result));
853 j(not_zero, &ok, taken);
854 mov(scratch, Operand(op1));
855 or_(scratch, Operand(op2));
856 j(sign, then_label, not_taken);
857 bind(&ok);
858}
859
860
861void MacroAssembler::TryGetFunctionPrototype(Register function,
862 Register result,
863 Register scratch,
864 Label* miss) {
865 // Check that the receiver isn't a smi.
866 test(function, Immediate(kSmiTagMask));
867 j(zero, miss, not_taken);
868
869 // Check that the function really is a function.
870 CmpObjectType(function, JS_FUNCTION_TYPE, result);
871 j(not_equal, miss, not_taken);
872
873 // Make sure that the function has an instance prototype.
874 Label non_instance;
875 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
876 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
877 j(not_zero, &non_instance, not_taken);
878
879 // Get the prototype or initial map from the function.
880 mov(result,
881 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
882
883 // If the prototype or initial map is the hole, don't return it and
884 // simply miss the cache instead. This will allow us to allocate a
885 // prototype object on-demand in the runtime system.
886 cmp(Operand(result), Immediate(Factory::the_hole_value()));
887 j(equal, miss, not_taken);
888
889 // If the function does not have an initial map, we're done.
890 Label done;
891 CmpObjectType(result, MAP_TYPE, scratch);
892 j(not_equal, &done);
893
894 // Get the prototype from the initial map.
895 mov(result, FieldOperand(result, Map::kPrototypeOffset));
896 jmp(&done);
897
898 // Non-instance prototype: Fetch prototype from constructor field
899 // in initial map.
900 bind(&non_instance);
901 mov(result, FieldOperand(result, Map::kConstructorOffset));
902
903 // All done.
904 bind(&done);
905}
906
907
908void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000909 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +0000910 call(stub->GetCode(), RelocInfo::CODE_TARGET);
911}
912
913
Leon Clarkee46be812010-01-19 14:06:41 +0000914Object* MacroAssembler::TryCallStub(CodeStub* stub) {
915 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
916 Object* result = stub->TryGetCode();
917 if (!result->IsFailure()) {
918 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
919 }
920 return result;
921}
922
923
Steve Blockd0582a62009-12-15 09:54:21 +0000924void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000925 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +0000926 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
927}
928
929
Leon Clarkee46be812010-01-19 14:06:41 +0000930Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
931 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
932 Object* result = stub->TryGetCode();
933 if (!result->IsFailure()) {
934 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
935 }
936 return result;
937}
938
939
Steve Blocka7e24c12009-10-30 11:49:00 +0000940void MacroAssembler::StubReturn(int argc) {
941 ASSERT(argc >= 1 && generating_stub());
942 ret((argc - 1) * kPointerSize);
943}
944
945
946void MacroAssembler::IllegalOperation(int num_arguments) {
947 if (num_arguments > 0) {
948 add(Operand(esp), Immediate(num_arguments * kPointerSize));
949 }
950 mov(eax, Immediate(Factory::undefined_value()));
951}
952
953
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100954void MacroAssembler::IndexFromHash(Register hash, Register index) {
955 // The assert checks that the constants for the maximum number of digits
956 // for an array index cached in the hash field and the number of bits
957 // reserved for it does not conflict.
958 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
959 (1 << String::kArrayIndexValueBits));
960 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
961 // the low kHashShift bits.
962 and_(hash, String::kArrayIndexValueMask);
963 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
964 if (String::kHashShift > kSmiTagSize) {
965 shr(hash, String::kHashShift - kSmiTagSize);
966 }
967 if (!index.is(hash)) {
968 mov(index, hash);
969 }
970}
971
972
Steve Blocka7e24c12009-10-30 11:49:00 +0000973void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
974 CallRuntime(Runtime::FunctionForId(id), num_arguments);
975}
976
977
Leon Clarkee46be812010-01-19 14:06:41 +0000978Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
979 int num_arguments) {
980 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
981}
982
983
Steve Blocka7e24c12009-10-30 11:49:00 +0000984void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
985 // If the expected number of arguments of the runtime function is
986 // constant, we check that the actual number of arguments match the
987 // expectation.
988 if (f->nargs >= 0 && f->nargs != num_arguments) {
989 IllegalOperation(num_arguments);
990 return;
991 }
992
Leon Clarke4515c472010-02-03 11:58:03 +0000993 // TODO(1236192): Most runtime routines don't need the number of
994 // arguments passed in because it is constant. At some point we
995 // should remove this need and make the runtime routine entry code
996 // smarter.
997 Set(eax, Immediate(num_arguments));
998 mov(ebx, Immediate(ExternalReference(f)));
999 CEntryStub ces(1);
1000 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001001}
1002
1003
Leon Clarkee46be812010-01-19 14:06:41 +00001004Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1005 int num_arguments) {
1006 if (f->nargs >= 0 && f->nargs != num_arguments) {
1007 IllegalOperation(num_arguments);
1008 // Since we did not call the stub, there was no allocation failure.
1009 // Return some non-failure object.
1010 return Heap::undefined_value();
1011 }
1012
Leon Clarke4515c472010-02-03 11:58:03 +00001013 // TODO(1236192): Most runtime routines don't need the number of
1014 // arguments passed in because it is constant. At some point we
1015 // should remove this need and make the runtime routine entry code
1016 // smarter.
1017 Set(eax, Immediate(num_arguments));
1018 mov(ebx, Immediate(ExternalReference(f)));
1019 CEntryStub ces(1);
1020 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001021}
1022
1023
Ben Murdochbb769b22010-08-11 14:56:33 +01001024void MacroAssembler::CallExternalReference(ExternalReference ref,
1025 int num_arguments) {
1026 mov(eax, Immediate(num_arguments));
1027 mov(ebx, Immediate(ref));
1028
1029 CEntryStub stub(1);
1030 CallStub(&stub);
1031}
1032
1033
Steve Block6ded16b2010-05-10 14:33:55 +01001034void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1035 int num_arguments,
1036 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001037 // TODO(1236192): Most runtime routines don't need the number of
1038 // arguments passed in because it is constant. At some point we
1039 // should remove this need and make the runtime routine entry code
1040 // smarter.
1041 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001042 JumpToExternalReference(ext);
1043}
1044
1045
1046void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1047 int num_arguments,
1048 int result_size) {
1049 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001050}
1051
1052
Steve Blockd0582a62009-12-15 09:54:21 +00001053void MacroAssembler::PushHandleScope(Register scratch) {
1054 // Push the number of extensions, smi-tagged so the gc will ignore it.
1055 ExternalReference extensions_address =
1056 ExternalReference::handle_scope_extensions_address();
1057 mov(scratch, Operand::StaticVariable(extensions_address));
Ben Murdochbb769b22010-08-11 14:56:33 +01001058 SmiTag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001059 push(scratch);
1060 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1061 // Push next and limit pointers which will be wordsize aligned and
1062 // hence automatically smi tagged.
1063 ExternalReference next_address =
1064 ExternalReference::handle_scope_next_address();
1065 push(Operand::StaticVariable(next_address));
1066 ExternalReference limit_address =
1067 ExternalReference::handle_scope_limit_address();
1068 push(Operand::StaticVariable(limit_address));
1069}
1070
1071
Leon Clarkee46be812010-01-19 14:06:41 +00001072Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1073 Register scratch,
1074 bool gc_allowed) {
1075 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001076 ExternalReference extensions_address =
1077 ExternalReference::handle_scope_extensions_address();
1078 Label write_back;
1079 mov(scratch, Operand::StaticVariable(extensions_address));
1080 cmp(Operand(scratch), Immediate(0));
1081 j(equal, &write_back);
Ben Murdochbb769b22010-08-11 14:56:33 +01001082 push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001083 if (gc_allowed) {
1084 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1085 } else {
1086 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1087 if (result->IsFailure()) return result;
1088 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001089 pop(saved);
Steve Blockd0582a62009-12-15 09:54:21 +00001090
1091 bind(&write_back);
1092 ExternalReference limit_address =
1093 ExternalReference::handle_scope_limit_address();
1094 pop(Operand::StaticVariable(limit_address));
1095 ExternalReference next_address =
1096 ExternalReference::handle_scope_next_address();
1097 pop(Operand::StaticVariable(next_address));
1098 pop(scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +01001099 SmiUntag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001100 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001101
1102 return result;
1103}
1104
1105
1106void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1107 PopHandleScopeHelper(saved, scratch, true);
1108}
1109
1110
1111Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1112 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001113}
1114
1115
Steve Block6ded16b2010-05-10 14:33:55 +01001116void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001117 // Set the entry point and jump to the C entry runtime stub.
1118 mov(ebx, Immediate(ext));
1119 CEntryStub ces(1);
1120 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1121}
1122
1123
1124void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1125 const ParameterCount& actual,
1126 Handle<Code> code_constant,
1127 const Operand& code_operand,
1128 Label* done,
1129 InvokeFlag flag) {
1130 bool definitely_matches = false;
1131 Label invoke;
1132 if (expected.is_immediate()) {
1133 ASSERT(actual.is_immediate());
1134 if (expected.immediate() == actual.immediate()) {
1135 definitely_matches = true;
1136 } else {
1137 mov(eax, actual.immediate());
1138 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1139 if (expected.immediate() == sentinel) {
1140 // Don't worry about adapting arguments for builtins that
1141 // don't want that done. Skip adaption code by making it look
1142 // like we have a match between expected and actual number of
1143 // arguments.
1144 definitely_matches = true;
1145 } else {
1146 mov(ebx, expected.immediate());
1147 }
1148 }
1149 } else {
1150 if (actual.is_immediate()) {
1151 // Expected is in register, actual is immediate. This is the
1152 // case when we invoke function values without going through the
1153 // IC mechanism.
1154 cmp(expected.reg(), actual.immediate());
1155 j(equal, &invoke);
1156 ASSERT(expected.reg().is(ebx));
1157 mov(eax, actual.immediate());
1158 } else if (!expected.reg().is(actual.reg())) {
1159 // Both expected and actual are in (different) registers. This
1160 // is the case when we invoke functions using call and apply.
1161 cmp(expected.reg(), Operand(actual.reg()));
1162 j(equal, &invoke);
1163 ASSERT(actual.reg().is(eax));
1164 ASSERT(expected.reg().is(ebx));
1165 }
1166 }
1167
1168 if (!definitely_matches) {
1169 Handle<Code> adaptor =
1170 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1171 if (!code_constant.is_null()) {
1172 mov(edx, Immediate(code_constant));
1173 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1174 } else if (!code_operand.is_reg(edx)) {
1175 mov(edx, code_operand);
1176 }
1177
1178 if (flag == CALL_FUNCTION) {
1179 call(adaptor, RelocInfo::CODE_TARGET);
1180 jmp(done);
1181 } else {
1182 jmp(adaptor, RelocInfo::CODE_TARGET);
1183 }
1184 bind(&invoke);
1185 }
1186}
1187
1188
1189void MacroAssembler::InvokeCode(const Operand& code,
1190 const ParameterCount& expected,
1191 const ParameterCount& actual,
1192 InvokeFlag flag) {
1193 Label done;
1194 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1195 if (flag == CALL_FUNCTION) {
1196 call(code);
1197 } else {
1198 ASSERT(flag == JUMP_FUNCTION);
1199 jmp(code);
1200 }
1201 bind(&done);
1202}
1203
1204
1205void MacroAssembler::InvokeCode(Handle<Code> code,
1206 const ParameterCount& expected,
1207 const ParameterCount& actual,
1208 RelocInfo::Mode rmode,
1209 InvokeFlag flag) {
1210 Label done;
1211 Operand dummy(eax);
1212 InvokePrologue(expected, actual, code, dummy, &done, flag);
1213 if (flag == CALL_FUNCTION) {
1214 call(code, rmode);
1215 } else {
1216 ASSERT(flag == JUMP_FUNCTION);
1217 jmp(code, rmode);
1218 }
1219 bind(&done);
1220}
1221
1222
1223void MacroAssembler::InvokeFunction(Register fun,
1224 const ParameterCount& actual,
1225 InvokeFlag flag) {
1226 ASSERT(fun.is(edi));
1227 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1228 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1229 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001230 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001231
1232 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001233 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1234 expected, actual, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001235}
1236
1237
Andrei Popescu402d9372010-02-26 13:31:12 +00001238void MacroAssembler::InvokeFunction(JSFunction* function,
1239 const ParameterCount& actual,
1240 InvokeFlag flag) {
1241 ASSERT(function->is_compiled());
1242 // Get the function and setup the context.
1243 mov(edi, Immediate(Handle<JSFunction>(function)));
1244 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +00001245 // Invoke the cached code.
1246 Handle<Code> code(function->code());
1247 ParameterCount expected(function->shared()->formal_parameter_count());
1248 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1249}
1250
1251
1252void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001253 // Calls are not allowed in some stubs.
1254 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1255
1256 // Rely on the assertion to check that the number of provided
1257 // arguments match the expected number of arguments. Fake a
1258 // parameter count to avoid emitting code to do the check.
1259 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001260 GetBuiltinFunction(edi, id);
1261 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1262 expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001263}
1264
Steve Block791712a2010-08-27 10:21:07 +01001265void MacroAssembler::GetBuiltinFunction(Register target,
1266 Builtins::JavaScript id) {
1267 // Load the JavaScript builtin function from the builtins object.
1268 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1269 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1270 mov(target, FieldOperand(target,
1271 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1272}
Steve Blocka7e24c12009-10-30 11:49:00 +00001273
1274void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001275 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001276 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001277 GetBuiltinFunction(edi, id);
1278 // Load the code entry point from the function into the target register.
1279 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001280}
1281
1282
Steve Blockd0582a62009-12-15 09:54:21 +00001283void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1284 if (context_chain_length > 0) {
1285 // Move up the chain of contexts to the context containing the slot.
1286 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1287 // Load the function context (which is the incoming, outer context).
1288 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1289 for (int i = 1; i < context_chain_length; i++) {
1290 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1291 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1292 }
1293 // The context may be an intermediate context, not a function context.
1294 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1295 } else { // Slot is in the current function context.
1296 // The context may be an intermediate context, not a function context.
1297 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1298 }
1299}
1300
1301
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001302void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1303 // Load the global or builtins object from the current context.
1304 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1305 // Load the global context from the global or builtins object.
1306 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1307 // Load the function from the global context.
1308 mov(function, Operand(function, Context::SlotOffset(index)));
1309}
1310
1311
1312void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1313 Register map) {
1314 // Load the initial map. The global functions all have initial maps.
1315 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1316 if (FLAG_debug_code) {
1317 Label ok, fail;
1318 CheckMap(map, Factory::meta_map(), &fail, false);
1319 jmp(&ok);
1320 bind(&fail);
1321 Abort("Global functions must have initial map");
1322 bind(&ok);
1323 }
1324}
1325
Steve Blockd0582a62009-12-15 09:54:21 +00001326
Steve Blocka7e24c12009-10-30 11:49:00 +00001327void MacroAssembler::Ret() {
1328 ret(0);
1329}
1330
1331
Leon Clarkee46be812010-01-19 14:06:41 +00001332void MacroAssembler::Drop(int stack_elements) {
1333 if (stack_elements > 0) {
1334 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1335 }
1336}
1337
1338
1339void MacroAssembler::Move(Register dst, Handle<Object> value) {
1340 mov(dst, value);
1341}
1342
1343
Steve Blocka7e24c12009-10-30 11:49:00 +00001344void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1345 if (FLAG_native_code_counters && counter->Enabled()) {
1346 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1347 }
1348}
1349
1350
1351void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1352 ASSERT(value > 0);
1353 if (FLAG_native_code_counters && counter->Enabled()) {
1354 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1355 if (value == 1) {
1356 inc(operand);
1357 } else {
1358 add(operand, Immediate(value));
1359 }
1360 }
1361}
1362
1363
1364void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1365 ASSERT(value > 0);
1366 if (FLAG_native_code_counters && counter->Enabled()) {
1367 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1368 if (value == 1) {
1369 dec(operand);
1370 } else {
1371 sub(operand, Immediate(value));
1372 }
1373 }
1374}
1375
1376
Leon Clarked91b9f72010-01-27 17:25:45 +00001377void MacroAssembler::IncrementCounter(Condition cc,
1378 StatsCounter* counter,
1379 int value) {
1380 ASSERT(value > 0);
1381 if (FLAG_native_code_counters && counter->Enabled()) {
1382 Label skip;
1383 j(NegateCondition(cc), &skip);
1384 pushfd();
1385 IncrementCounter(counter, value);
1386 popfd();
1387 bind(&skip);
1388 }
1389}
1390
1391
1392void MacroAssembler::DecrementCounter(Condition cc,
1393 StatsCounter* counter,
1394 int value) {
1395 ASSERT(value > 0);
1396 if (FLAG_native_code_counters && counter->Enabled()) {
1397 Label skip;
1398 j(NegateCondition(cc), &skip);
1399 pushfd();
1400 DecrementCounter(counter, value);
1401 popfd();
1402 bind(&skip);
1403 }
1404}
1405
1406
Steve Blocka7e24c12009-10-30 11:49:00 +00001407void MacroAssembler::Assert(Condition cc, const char* msg) {
1408 if (FLAG_debug_code) Check(cc, msg);
1409}
1410
1411
Iain Merrick75681382010-08-19 15:07:18 +01001412void MacroAssembler::AssertFastElements(Register elements) {
1413 if (FLAG_debug_code) {
1414 Label ok;
1415 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1416 Immediate(Factory::fixed_array_map()));
1417 j(equal, &ok);
1418 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1419 Immediate(Factory::fixed_cow_array_map()));
1420 j(equal, &ok);
1421 Abort("JSObject with fast elements map has slow elements");
1422 bind(&ok);
1423 }
1424}
1425
1426
Steve Blocka7e24c12009-10-30 11:49:00 +00001427void MacroAssembler::Check(Condition cc, const char* msg) {
1428 Label L;
1429 j(cc, &L, taken);
1430 Abort(msg);
1431 // will not return here
1432 bind(&L);
1433}
1434
1435
Steve Block6ded16b2010-05-10 14:33:55 +01001436void MacroAssembler::CheckStackAlignment() {
1437 int frame_alignment = OS::ActivationFrameAlignment();
1438 int frame_alignment_mask = frame_alignment - 1;
1439 if (frame_alignment > kPointerSize) {
1440 ASSERT(IsPowerOf2(frame_alignment));
1441 Label alignment_as_expected;
1442 test(esp, Immediate(frame_alignment_mask));
1443 j(zero, &alignment_as_expected);
1444 // Abort if stack is not aligned.
1445 int3();
1446 bind(&alignment_as_expected);
1447 }
1448}
1449
1450
Steve Blocka7e24c12009-10-30 11:49:00 +00001451void MacroAssembler::Abort(const char* msg) {
1452 // We want to pass the msg string like a smi to avoid GC
1453 // problems, however msg is not guaranteed to be aligned
1454 // properly. Instead, we pass an aligned pointer that is
1455 // a proper v8 smi, but also pass the alignment difference
1456 // from the real pointer as a smi.
1457 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1458 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1459 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1460#ifdef DEBUG
1461 if (msg != NULL) {
1462 RecordComment("Abort message: ");
1463 RecordComment(msg);
1464 }
1465#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001466 // Disable stub call restrictions to always allow calls to abort.
1467 set_allow_stub_calls(true);
1468
Steve Blocka7e24c12009-10-30 11:49:00 +00001469 push(eax);
1470 push(Immediate(p0));
1471 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1472 CallRuntime(Runtime::kAbort, 2);
1473 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001474 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001475}
1476
1477
Iain Merrick75681382010-08-19 15:07:18 +01001478void MacroAssembler::JumpIfNotNumber(Register reg,
1479 TypeInfo info,
1480 Label* on_not_number) {
1481 if (FLAG_debug_code) AbortIfSmi(reg);
1482 if (!info.IsNumber()) {
1483 cmp(FieldOperand(reg, HeapObject::kMapOffset),
1484 Factory::heap_number_map());
1485 j(not_equal, on_not_number);
1486 }
1487}
1488
1489
1490void MacroAssembler::ConvertToInt32(Register dst,
1491 Register source,
1492 Register scratch,
1493 TypeInfo info,
1494 Label* on_not_int32) {
1495 if (FLAG_debug_code) {
1496 AbortIfSmi(source);
1497 AbortIfNotNumber(source);
1498 }
1499 if (info.IsInteger32()) {
1500 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1501 } else {
1502 Label done;
1503 bool push_pop = (scratch.is(no_reg) && dst.is(source));
1504 ASSERT(!scratch.is(source));
1505 if (push_pop) {
1506 push(dst);
1507 scratch = dst;
1508 }
1509 if (scratch.is(no_reg)) scratch = dst;
1510 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1511 cmp(scratch, 0x80000000u);
1512 if (push_pop) {
1513 j(not_equal, &done);
1514 pop(dst);
1515 jmp(on_not_int32);
1516 } else {
1517 j(equal, on_not_int32);
1518 }
1519
1520 bind(&done);
1521 if (push_pop) {
1522 add(Operand(esp), Immediate(kPointerSize)); // Pop.
1523 }
1524 if (!scratch.is(dst)) {
1525 mov(dst, scratch);
1526 }
1527 }
1528}
1529
1530
Andrei Popescu402d9372010-02-26 13:31:12 +00001531void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1532 Register instance_type,
1533 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001534 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001535 if (!scratch.is(instance_type)) {
1536 mov(scratch, instance_type);
1537 }
1538 and_(scratch,
1539 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1540 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1541 j(not_equal, failure);
1542}
1543
1544
Leon Clarked91b9f72010-01-27 17:25:45 +00001545void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1546 Register object2,
1547 Register scratch1,
1548 Register scratch2,
1549 Label* failure) {
1550 // Check that both objects are not smis.
1551 ASSERT_EQ(0, kSmiTag);
1552 mov(scratch1, Operand(object1));
1553 and_(scratch1, Operand(object2));
1554 test(scratch1, Immediate(kSmiTagMask));
1555 j(zero, failure);
1556
1557 // Load instance type for both strings.
1558 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1559 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1560 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1561 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1562
1563 // Check that both are flat ascii strings.
1564 const int kFlatAsciiStringMask =
1565 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1566 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1567 // Interleave bits from both instance types and compare them in one check.
1568 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1569 and_(scratch1, kFlatAsciiStringMask);
1570 and_(scratch2, kFlatAsciiStringMask);
1571 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1572 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1573 j(not_equal, failure);
1574}
1575
1576
Steve Block6ded16b2010-05-10 14:33:55 +01001577void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1578 int frameAlignment = OS::ActivationFrameAlignment();
1579 if (frameAlignment != 0) {
1580 // Make stack end at alignment and make room for num_arguments words
1581 // and the original value of esp.
1582 mov(scratch, esp);
1583 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1584 ASSERT(IsPowerOf2(frameAlignment));
1585 and_(esp, -frameAlignment);
1586 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1587 } else {
1588 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1589 }
1590}
1591
1592
1593void MacroAssembler::CallCFunction(ExternalReference function,
1594 int num_arguments) {
1595 // Trashing eax is ok as it will be the return value.
1596 mov(Operand(eax), Immediate(function));
1597 CallCFunction(eax, num_arguments);
1598}
1599
1600
1601void MacroAssembler::CallCFunction(Register function,
1602 int num_arguments) {
1603 // Check stack alignment.
1604 if (FLAG_debug_code) {
1605 CheckStackAlignment();
1606 }
1607
1608 call(Operand(function));
1609 if (OS::ActivationFrameAlignment() != 0) {
1610 mov(esp, Operand(esp, num_arguments * kPointerSize));
1611 } else {
1612 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1613 }
1614}
1615
1616
Steve Blocka7e24c12009-10-30 11:49:00 +00001617CodePatcher::CodePatcher(byte* address, int size)
1618 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1619 // Create a new macro assembler pointing to the address of the code to patch.
1620 // The size is adjusted with kGap on order for the assembler to generate size
1621 // bytes of instructions without failing with buffer size constraints.
1622 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1623}
1624
1625
1626CodePatcher::~CodePatcher() {
1627 // Indicate that code has changed.
1628 CPU::FlushICache(address_, size_);
1629
1630 // Check that the code was patched as expected.
1631 ASSERT(masm_.pc_ == address_ + size_);
1632 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1633}
1634
1635
1636} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001637
1638#endif // V8_TARGET_ARCH_IA32