blob: 87e25d73dbd113234e8de6ae848afc8d9034cb29 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
34#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
44MacroAssembler::MacroAssembler(void* buffer, int size)
45 : Assembler(buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
47 allow_stub_calls_(true),
48 code_object_(Heap::undefined_value()) {
49}
50
51
Steve Block6ded16b2010-05-10 14:33:55 +010052void MacroAssembler::RecordWriteHelper(Register object,
53 Register addr,
54 Register scratch) {
55 if (FLAG_debug_code) {
56 // Check that the object is not in new space.
57 Label not_in_new_space;
58 InNewSpace(object, scratch, not_equal, &not_in_new_space);
59 Abort("new-space object passed to RecordWriteHelper");
60 bind(&not_in_new_space);
61 }
62
Steve Blocka7e24c12009-10-30 11:49:00 +000063 // Compute the page start address from the heap object pointer, and reuse
64 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010065 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
68 // method for more details.
69 and_(addr, Page::kPageAlignmentMask);
70 shr(addr, Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 // Set dirty mark for region.
73 bts(Operand(object, Page::kDirtyFlagOffset), addr);
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block6ded16b2010-05-10 14:33:55 +010077void MacroAssembler::InNewSpace(Register object,
78 Register scratch,
79 Condition cc,
80 Label* branch) {
81 ASSERT(cc == equal || cc == not_equal);
82 if (Serializer::enabled()) {
83 // Can't do arithmetic on external references if it might get serialized.
84 mov(scratch, Operand(object));
85 // The mask isn't really an address. We load it as an external reference in
86 // case the size of the new space is different between the snapshot maker
87 // and the running system.
88 and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
89 cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
90 j(cc, branch);
91 } else {
92 int32_t new_space_start = reinterpret_cast<int32_t>(
93 ExternalReference::new_space_start().address());
94 lea(scratch, Operand(object, -new_space_start));
95 and_(scratch, Heap::NewSpaceMask());
96 j(cc, branch);
Steve Blocka7e24c12009-10-30 11:49:00 +000097 }
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100101void MacroAssembler::RecordWrite(Register object,
102 int offset,
103 Register value,
104 Register scratch) {
Leon Clarke4515c472010-02-03 11:58:03 +0000105 // The compiled code assumes that record write doesn't change the
106 // context register, so we check that none of the clobbered
107 // registers are esi.
108 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
109
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100110 // First, check if a write barrier is even needed. The tests below
111 // catch stores of Smis and stores into young gen.
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 Label done;
113
114 // Skip barrier if writing a smi.
115 ASSERT_EQ(0, kSmiTag);
116 test(value, Immediate(kSmiTagMask));
117 j(zero, &done);
118
Steve Block6ded16b2010-05-10 14:33:55 +0100119 InNewSpace(object, value, equal, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000120
Steve Block6ded16b2010-05-10 14:33:55 +0100121 // The offset is relative to a tagged or untagged HeapObject pointer,
122 // so either offset or offset + kHeapObjectTag must be a
123 // multiple of kPointerSize.
124 ASSERT(IsAligned(offset, kPointerSize) ||
125 IsAligned(offset + kHeapObjectTag, kPointerSize));
126
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100127 Register dst = scratch;
128 if (offset != 0) {
129 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100131 // Array access: calculate the destination address in the same manner as
132 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
133 // into an array of words.
134 ASSERT_EQ(1, kSmiTagSize);
135 ASSERT_EQ(0, kSmiTag);
136 lea(dst, Operand(object, dst, times_half_pointer_size,
137 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000140
141 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000142
143 // Clobber all input registers when running with the debug-code flag
144 // turned on to provoke errors.
145 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100146 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
147 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
148 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000149 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000150}
151
152
Steve Block8defd9f2010-07-08 12:39:36 +0100153void MacroAssembler::RecordWrite(Register object,
154 Register address,
155 Register value) {
156 // The compiled code assumes that record write doesn't change the
157 // context register, so we check that none of the clobbered
158 // registers are esi.
159 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
160
161 // First, check if a write barrier is even needed. The tests below
162 // catch stores of Smis and stores into young gen.
163 Label done;
164
165 // Skip barrier if writing a smi.
166 ASSERT_EQ(0, kSmiTag);
167 test(value, Immediate(kSmiTagMask));
168 j(zero, &done);
169
170 InNewSpace(object, value, equal, &done);
171
172 RecordWriteHelper(object, address, value);
173
174 bind(&done);
175
176 // Clobber all input registers when running with the debug-code flag
177 // turned on to provoke errors.
178 if (FLAG_debug_code) {
179 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
180 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
181 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
182 }
183}
184
185
Steve Blockd0582a62009-12-15 09:54:21 +0000186void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
187 cmp(esp,
188 Operand::StaticVariable(ExternalReference::address_of_stack_limit()));
189 j(below, on_stack_overflow);
190}
191
192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000194void MacroAssembler::DebugBreak() {
195 Set(eax, Immediate(0));
196 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
197 CEntryStub ces(1);
198 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
199}
Steve Blocka7e24c12009-10-30 11:49:00 +0000200#endif
201
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100202
Steve Blocka7e24c12009-10-30 11:49:00 +0000203void MacroAssembler::Set(Register dst, const Immediate& x) {
204 if (x.is_zero()) {
205 xor_(dst, Operand(dst)); // shorter than mov
206 } else {
207 mov(dst, x);
208 }
209}
210
211
212void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
213 mov(dst, x);
214}
215
216
217void MacroAssembler::CmpObjectType(Register heap_object,
218 InstanceType type,
219 Register map) {
220 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
221 CmpInstanceType(map, type);
222}
223
224
225void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
226 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
227 static_cast<int8_t>(type));
228}
229
230
Andrei Popescu31002712010-02-23 13:46:05 +0000231void MacroAssembler::CheckMap(Register obj,
232 Handle<Map> map,
233 Label* fail,
234 bool is_heap_object) {
235 if (!is_heap_object) {
236 test(obj, Immediate(kSmiTagMask));
237 j(zero, fail);
238 }
239 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
240 j(not_equal, fail);
241}
242
243
Leon Clarkee46be812010-01-19 14:06:41 +0000244Condition MacroAssembler::IsObjectStringType(Register heap_object,
245 Register map,
246 Register instance_type) {
247 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
248 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
249 ASSERT(kNotStringTag != 0);
250 test(instance_type, Immediate(kIsNotStringMask));
251 return zero;
252}
253
254
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100255void MacroAssembler::IsObjectJSObjectType(Register heap_object,
256 Register map,
257 Register scratch,
258 Label* fail) {
259 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
260 IsInstanceJSObjectType(map, scratch, fail);
261}
262
263
264void MacroAssembler::IsInstanceJSObjectType(Register map,
265 Register scratch,
266 Label* fail) {
267 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
268 sub(Operand(scratch), Immediate(FIRST_JS_OBJECT_TYPE));
269 cmp(scratch, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
270 j(above, fail);
271}
272
273
Steve Blocka7e24c12009-10-30 11:49:00 +0000274void MacroAssembler::FCmp() {
Steve Blockd0582a62009-12-15 09:54:21 +0000275 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000276 fucomip();
277 ffree(0);
278 fincstp();
279 } else {
280 fucompp();
281 push(eax);
282 fnstsw_ax();
283 sahf();
284 pop(eax);
285 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000286}
287
288
Steve Block6ded16b2010-05-10 14:33:55 +0100289void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000290 Label ok;
291 test(object, Immediate(kSmiTagMask));
292 j(zero, &ok);
293 cmp(FieldOperand(object, HeapObject::kMapOffset),
294 Factory::heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100295 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000296 bind(&ok);
297}
298
299
Steve Block6ded16b2010-05-10 14:33:55 +0100300void MacroAssembler::AbortIfNotSmi(Register object) {
301 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100302 Assert(equal, "Operand is not a smi");
303}
304
305
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100306void MacroAssembler::AbortIfNotString(Register object) {
307 test(object, Immediate(kSmiTagMask));
308 Assert(not_equal, "Operand is not a string");
309 push(object);
310 mov(object, FieldOperand(object, HeapObject::kMapOffset));
311 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
312 pop(object);
313 Assert(below, "Operand is not a string");
314}
315
316
Iain Merrick75681382010-08-19 15:07:18 +0100317void MacroAssembler::AbortIfSmi(Register object) {
318 test(object, Immediate(kSmiTagMask));
319 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100320}
321
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323void MacroAssembler::EnterFrame(StackFrame::Type type) {
324 push(ebp);
325 mov(ebp, Operand(esp));
326 push(esi);
327 push(Immediate(Smi::FromInt(type)));
328 push(Immediate(CodeObject()));
329 if (FLAG_debug_code) {
330 cmp(Operand(esp, 0), Immediate(Factory::undefined_value()));
331 Check(not_equal, "code object not properly patched");
332 }
333}
334
335
336void MacroAssembler::LeaveFrame(StackFrame::Type type) {
337 if (FLAG_debug_code) {
338 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
339 Immediate(Smi::FromInt(type)));
340 Check(equal, "stack frame types must match");
341 }
342 leave();
343}
344
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100345
346void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000347 // Setup the frame structure on the stack.
348 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
349 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
350 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
351 push(ebp);
352 mov(ebp, Operand(esp));
353
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100354 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000356 push(Immediate(0)); // Saved entry sp, patched before call.
357 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000358
359 // Save the frame pointer and the context in top.
360 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
361 ExternalReference context_address(Top::k_context_address);
362 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
363 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000364}
Steve Blocka7e24c12009-10-30 11:49:00 +0000365
Steve Blocka7e24c12009-10-30 11:49:00 +0000366
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100367void MacroAssembler::EnterExitFrameEpilogue(int argc) {
Steve Blockd0582a62009-12-15 09:54:21 +0000368 // Reserve space for arguments.
369 sub(Operand(esp), Immediate(argc * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000370
371 // Get the required frame alignment for the OS.
372 static const int kFrameAlignment = OS::ActivationFrameAlignment();
373 if (kFrameAlignment > 0) {
374 ASSERT(IsPowerOf2(kFrameAlignment));
375 and_(esp, -kFrameAlignment);
376 }
377
378 // Patch the saved entry sp.
379 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
380}
381
382
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100383void MacroAssembler::EnterExitFrame() {
384 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000385
386 // Setup argc and argv in callee-saved registers.
387 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
388 mov(edi, Operand(eax));
389 lea(esi, Operand(ebp, eax, times_4, offset));
390
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100391 EnterExitFrameEpilogue(2);
Steve Blockd0582a62009-12-15 09:54:21 +0000392}
393
394
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100395void MacroAssembler::EnterApiExitFrame(int stack_space,
Steve Blockd0582a62009-12-15 09:54:21 +0000396 int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100397 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000398
399 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
400 lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset));
401
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100402 EnterExitFrameEpilogue(argc);
Steve Blockd0582a62009-12-15 09:54:21 +0000403}
404
405
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100406void MacroAssembler::LeaveExitFrame() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 // Get the return address from the stack and restore the frame pointer.
408 mov(ecx, Operand(ebp, 1 * kPointerSize));
409 mov(ebp, Operand(ebp, 0 * kPointerSize));
410
411 // Pop the arguments and the receiver from the caller stack.
412 lea(esp, Operand(esi, 1 * kPointerSize));
413
414 // Restore current context from top and clear it in debug mode.
415 ExternalReference context_address(Top::k_context_address);
416 mov(esi, Operand::StaticVariable(context_address));
417#ifdef DEBUG
418 mov(Operand::StaticVariable(context_address), Immediate(0));
419#endif
420
421 // Push the return address to get ready to return.
422 push(ecx);
423
424 // Clear the top frame.
425 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
426 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
427}
428
429
430void MacroAssembler::PushTryHandler(CodeLocation try_location,
431 HandlerType type) {
432 // Adjust this code if not the case.
433 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
434 // The pc (return address) is already on TOS.
435 if (try_location == IN_JAVASCRIPT) {
436 if (type == TRY_CATCH_HANDLER) {
437 push(Immediate(StackHandler::TRY_CATCH));
438 } else {
439 push(Immediate(StackHandler::TRY_FINALLY));
440 }
441 push(ebp);
442 } else {
443 ASSERT(try_location == IN_JS_ENTRY);
444 // The frame pointer does not point to a JS frame so we save NULL
445 // for ebp. We expect the code throwing an exception to check ebp
446 // before dereferencing it to restore the context.
447 push(Immediate(StackHandler::ENTRY));
448 push(Immediate(0)); // NULL frame pointer.
449 }
450 // Save the current handler as the next handler.
451 push(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
452 // Link this handler as the new current one.
453 mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp);
454}
455
456
Leon Clarkee46be812010-01-19 14:06:41 +0000457void MacroAssembler::PopTryHandler() {
458 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
459 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
460 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
461}
462
463
Steve Blocka7e24c12009-10-30 11:49:00 +0000464void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
465 Register scratch,
466 Label* miss) {
467 Label same_contexts;
468
469 ASSERT(!holder_reg.is(scratch));
470
471 // Load current lexical context from the stack frame.
472 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
473
474 // When generating debug code, make sure the lexical context is set.
475 if (FLAG_debug_code) {
476 cmp(Operand(scratch), Immediate(0));
477 Check(not_equal, "we should not have an empty lexical context");
478 }
479 // Load the global context of the current context.
480 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
481 mov(scratch, FieldOperand(scratch, offset));
482 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
483
484 // Check the context is a global context.
485 if (FLAG_debug_code) {
486 push(scratch);
487 // Read the first word and compare to global_context_map.
488 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
489 cmp(scratch, Factory::global_context_map());
490 Check(equal, "JSGlobalObject::global_context should be a global context.");
491 pop(scratch);
492 }
493
494 // Check if both contexts are the same.
495 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
496 j(equal, &same_contexts, taken);
497
498 // Compare security tokens, save holder_reg on the stack so we can use it
499 // as a temporary register.
500 //
501 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
502 push(holder_reg);
503 // Check that the security token in the calling global object is
504 // compatible with the security token in the receiving global
505 // object.
506 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
507
508 // Check the context is a global context.
509 if (FLAG_debug_code) {
510 cmp(holder_reg, Factory::null_value());
511 Check(not_equal, "JSGlobalProxy::context() should not be null.");
512
513 push(holder_reg);
514 // Read the first word and compare to global_context_map(),
515 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
516 cmp(holder_reg, Factory::global_context_map());
517 Check(equal, "JSGlobalObject::global_context should be a global context.");
518 pop(holder_reg);
519 }
520
521 int token_offset = Context::kHeaderSize +
522 Context::SECURITY_TOKEN_INDEX * kPointerSize;
523 mov(scratch, FieldOperand(scratch, token_offset));
524 cmp(scratch, FieldOperand(holder_reg, token_offset));
525 pop(holder_reg);
526 j(not_equal, miss, not_taken);
527
528 bind(&same_contexts);
529}
530
531
532void MacroAssembler::LoadAllocationTopHelper(Register result,
533 Register result_end,
534 Register scratch,
535 AllocationFlags flags) {
536 ExternalReference new_space_allocation_top =
537 ExternalReference::new_space_allocation_top_address();
538
539 // Just return if allocation top is already known.
540 if ((flags & RESULT_CONTAINS_TOP) != 0) {
541 // No use of scratch if allocation top is provided.
542 ASSERT(scratch.is(no_reg));
543#ifdef DEBUG
544 // Assert that result actually contains top on entry.
545 cmp(result, Operand::StaticVariable(new_space_allocation_top));
546 Check(equal, "Unexpected allocation top");
547#endif
548 return;
549 }
550
551 // Move address of new object to result. Use scratch register if available.
552 if (scratch.is(no_reg)) {
553 mov(result, Operand::StaticVariable(new_space_allocation_top));
554 } else {
555 ASSERT(!scratch.is(result_end));
556 mov(Operand(scratch), Immediate(new_space_allocation_top));
557 mov(result, Operand(scratch, 0));
558 }
559}
560
561
562void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
563 Register scratch) {
Steve Blockd0582a62009-12-15 09:54:21 +0000564 if (FLAG_debug_code) {
565 test(result_end, Immediate(kObjectAlignmentMask));
566 Check(zero, "Unaligned allocation in new space");
567 }
568
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 ExternalReference new_space_allocation_top =
570 ExternalReference::new_space_allocation_top_address();
571
572 // Update new top. Use scratch if available.
573 if (scratch.is(no_reg)) {
574 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
575 } else {
576 mov(Operand(scratch, 0), result_end);
577 }
578}
579
580
581void MacroAssembler::AllocateInNewSpace(int object_size,
582 Register result,
583 Register result_end,
584 Register scratch,
585 Label* gc_required,
586 AllocationFlags flags) {
587 ASSERT(!result.is(result_end));
588
589 // Load address of new object into result.
590 LoadAllocationTopHelper(result, result_end, scratch, flags);
591
Ben Murdochbb769b22010-08-11 14:56:33 +0100592 Register top_reg = result_end.is_valid() ? result_end : result;
593
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 // Calculate new top and bail out if new space is exhausted.
595 ExternalReference new_space_allocation_limit =
596 ExternalReference::new_space_allocation_limit_address();
Ben Murdochbb769b22010-08-11 14:56:33 +0100597
598 if (top_reg.is(result)) {
599 add(Operand(top_reg), Immediate(object_size));
600 } else {
601 lea(top_reg, Operand(result, object_size));
602 }
603 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +0000604 j(above, gc_required, not_taken);
605
Leon Clarkee46be812010-01-19 14:06:41 +0000606 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100607 UpdateAllocationTopHelper(top_reg, scratch);
608
609 // Tag result if requested.
610 if (top_reg.is(result)) {
611 if ((flags & TAG_OBJECT) != 0) {
612 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
613 } else {
614 sub(Operand(result), Immediate(object_size));
615 }
616 } else if ((flags & TAG_OBJECT) != 0) {
617 add(Operand(result), Immediate(kHeapObjectTag));
618 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000619}
620
621
622void MacroAssembler::AllocateInNewSpace(int header_size,
623 ScaleFactor element_size,
624 Register element_count,
625 Register result,
626 Register result_end,
627 Register scratch,
628 Label* gc_required,
629 AllocationFlags flags) {
630 ASSERT(!result.is(result_end));
631
632 // Load address of new object into result.
633 LoadAllocationTopHelper(result, result_end, scratch, flags);
634
635 // Calculate new top and bail out if new space is exhausted.
636 ExternalReference new_space_allocation_limit =
637 ExternalReference::new_space_allocation_limit_address();
638 lea(result_end, Operand(result, element_count, element_size, header_size));
639 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
640 j(above, gc_required);
641
Steve Blocka7e24c12009-10-30 11:49:00 +0000642 // Tag result if requested.
643 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000644 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 }
Leon Clarkee46be812010-01-19 14:06:41 +0000646
647 // Update allocation top.
648 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000649}
650
651
652void MacroAssembler::AllocateInNewSpace(Register object_size,
653 Register result,
654 Register result_end,
655 Register scratch,
656 Label* gc_required,
657 AllocationFlags flags) {
658 ASSERT(!result.is(result_end));
659
660 // Load address of new object into result.
661 LoadAllocationTopHelper(result, result_end, scratch, flags);
662
663 // Calculate new top and bail out if new space is exhausted.
664 ExternalReference new_space_allocation_limit =
665 ExternalReference::new_space_allocation_limit_address();
666 if (!object_size.is(result_end)) {
667 mov(result_end, object_size);
668 }
669 add(result_end, Operand(result));
670 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
671 j(above, gc_required, not_taken);
672
Steve Blocka7e24c12009-10-30 11:49:00 +0000673 // Tag result if requested.
674 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000675 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000676 }
Leon Clarkee46be812010-01-19 14:06:41 +0000677
678 // Update allocation top.
679 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000680}
681
682
683void MacroAssembler::UndoAllocationInNewSpace(Register object) {
684 ExternalReference new_space_allocation_top =
685 ExternalReference::new_space_allocation_top_address();
686
687 // Make sure the object has no tag before resetting top.
688 and_(Operand(object), Immediate(~kHeapObjectTagMask));
689#ifdef DEBUG
690 cmp(object, Operand::StaticVariable(new_space_allocation_top));
691 Check(below, "Undo allocation of non allocated memory");
692#endif
693 mov(Operand::StaticVariable(new_space_allocation_top), object);
694}
695
696
Steve Block3ce2e202009-11-05 08:53:23 +0000697void MacroAssembler::AllocateHeapNumber(Register result,
698 Register scratch1,
699 Register scratch2,
700 Label* gc_required) {
701 // Allocate heap number in new space.
702 AllocateInNewSpace(HeapNumber::kSize,
703 result,
704 scratch1,
705 scratch2,
706 gc_required,
707 TAG_OBJECT);
708
709 // Set the map.
710 mov(FieldOperand(result, HeapObject::kMapOffset),
711 Immediate(Factory::heap_number_map()));
712}
713
714
Steve Blockd0582a62009-12-15 09:54:21 +0000715void MacroAssembler::AllocateTwoByteString(Register result,
716 Register length,
717 Register scratch1,
718 Register scratch2,
719 Register scratch3,
720 Label* gc_required) {
721 // Calculate the number of bytes needed for the characters in the string while
722 // observing object alignment.
723 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +0000724 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +0000725 // scratch1 = length * 2 + kObjectAlignmentMask.
726 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +0000727 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
728
729 // Allocate two byte string in new space.
730 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
731 times_1,
732 scratch1,
733 result,
734 scratch2,
735 scratch3,
736 gc_required,
737 TAG_OBJECT);
738
739 // Set the map, length and hash field.
740 mov(FieldOperand(result, HeapObject::kMapOffset),
741 Immediate(Factory::string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100742 mov(scratch1, length);
743 SmiTag(scratch1);
744 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000745 mov(FieldOperand(result, String::kHashFieldOffset),
746 Immediate(String::kEmptyHashField));
747}
748
749
750void MacroAssembler::AllocateAsciiString(Register result,
751 Register length,
752 Register scratch1,
753 Register scratch2,
754 Register scratch3,
755 Label* gc_required) {
756 // Calculate the number of bytes needed for the characters in the string while
757 // observing object alignment.
758 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
759 mov(scratch1, length);
760 ASSERT(kCharSize == 1);
761 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
762 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
763
764 // Allocate ascii string in new space.
765 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
766 times_1,
767 scratch1,
768 result,
769 scratch2,
770 scratch3,
771 gc_required,
772 TAG_OBJECT);
773
774 // Set the map, length and hash field.
775 mov(FieldOperand(result, HeapObject::kMapOffset),
776 Immediate(Factory::ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +0100777 mov(scratch1, length);
778 SmiTag(scratch1);
779 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +0000780 mov(FieldOperand(result, String::kHashFieldOffset),
781 Immediate(String::kEmptyHashField));
782}
783
784
Iain Merrick9ac36c92010-09-13 15:29:50 +0100785void MacroAssembler::AllocateAsciiString(Register result,
786 int length,
787 Register scratch1,
788 Register scratch2,
789 Label* gc_required) {
790 ASSERT(length > 0);
791
792 // Allocate ascii string in new space.
793 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
794 result,
795 scratch1,
796 scratch2,
797 gc_required,
798 TAG_OBJECT);
799
800 // Set the map, length and hash field.
801 mov(FieldOperand(result, HeapObject::kMapOffset),
802 Immediate(Factory::ascii_string_map()));
803 mov(FieldOperand(result, String::kLengthOffset),
804 Immediate(Smi::FromInt(length)));
805 mov(FieldOperand(result, String::kHashFieldOffset),
806 Immediate(String::kEmptyHashField));
807}
808
809
Steve Blockd0582a62009-12-15 09:54:21 +0000810void MacroAssembler::AllocateConsString(Register result,
811 Register scratch1,
812 Register scratch2,
813 Label* gc_required) {
814 // Allocate heap number in new space.
815 AllocateInNewSpace(ConsString::kSize,
816 result,
817 scratch1,
818 scratch2,
819 gc_required,
820 TAG_OBJECT);
821
822 // Set the map. The other fields are left uninitialized.
823 mov(FieldOperand(result, HeapObject::kMapOffset),
824 Immediate(Factory::cons_string_map()));
825}
826
827
828void MacroAssembler::AllocateAsciiConsString(Register result,
829 Register scratch1,
830 Register scratch2,
831 Label* gc_required) {
832 // Allocate heap number in new space.
833 AllocateInNewSpace(ConsString::kSize,
834 result,
835 scratch1,
836 scratch2,
837 gc_required,
838 TAG_OBJECT);
839
840 // Set the map. The other fields are left uninitialized.
841 mov(FieldOperand(result, HeapObject::kMapOffset),
842 Immediate(Factory::cons_ascii_string_map()));
843}
844
845
Steve Blocka7e24c12009-10-30 11:49:00 +0000846void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
847 Register result,
848 Register op,
849 JumpTarget* then_target) {
850 JumpTarget ok;
851 test(result, Operand(result));
852 ok.Branch(not_zero, taken);
853 test(op, Operand(op));
854 then_target->Branch(sign, not_taken);
855 ok.Bind();
856}
857
858
859void MacroAssembler::NegativeZeroTest(Register result,
860 Register op,
861 Label* then_label) {
862 Label ok;
863 test(result, Operand(result));
864 j(not_zero, &ok, taken);
865 test(op, Operand(op));
866 j(sign, then_label, not_taken);
867 bind(&ok);
868}
869
870
871void MacroAssembler::NegativeZeroTest(Register result,
872 Register op1,
873 Register op2,
874 Register scratch,
875 Label* then_label) {
876 Label ok;
877 test(result, Operand(result));
878 j(not_zero, &ok, taken);
879 mov(scratch, Operand(op1));
880 or_(scratch, Operand(op2));
881 j(sign, then_label, not_taken);
882 bind(&ok);
883}
884
885
886void MacroAssembler::TryGetFunctionPrototype(Register function,
887 Register result,
888 Register scratch,
889 Label* miss) {
890 // Check that the receiver isn't a smi.
891 test(function, Immediate(kSmiTagMask));
892 j(zero, miss, not_taken);
893
894 // Check that the function really is a function.
895 CmpObjectType(function, JS_FUNCTION_TYPE, result);
896 j(not_equal, miss, not_taken);
897
898 // Make sure that the function has an instance prototype.
899 Label non_instance;
900 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
901 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
902 j(not_zero, &non_instance, not_taken);
903
904 // Get the prototype or initial map from the function.
905 mov(result,
906 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
907
908 // If the prototype or initial map is the hole, don't return it and
909 // simply miss the cache instead. This will allow us to allocate a
910 // prototype object on-demand in the runtime system.
911 cmp(Operand(result), Immediate(Factory::the_hole_value()));
912 j(equal, miss, not_taken);
913
914 // If the function does not have an initial map, we're done.
915 Label done;
916 CmpObjectType(result, MAP_TYPE, scratch);
917 j(not_equal, &done);
918
919 // Get the prototype from the initial map.
920 mov(result, FieldOperand(result, Map::kPrototypeOffset));
921 jmp(&done);
922
923 // Non-instance prototype: Fetch prototype from constructor field
924 // in initial map.
925 bind(&non_instance);
926 mov(result, FieldOperand(result, Map::kConstructorOffset));
927
928 // All done.
929 bind(&done);
930}
931
932
933void MacroAssembler::CallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000934 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blocka7e24c12009-10-30 11:49:00 +0000935 call(stub->GetCode(), RelocInfo::CODE_TARGET);
936}
937
938
Leon Clarkee46be812010-01-19 14:06:41 +0000939Object* MacroAssembler::TryCallStub(CodeStub* stub) {
940 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
941 Object* result = stub->TryGetCode();
942 if (!result->IsFailure()) {
943 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
944 }
945 return result;
946}
947
948
Steve Blockd0582a62009-12-15 09:54:21 +0000949void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +0000950 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +0000951 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
952}
953
954
Leon Clarkee46be812010-01-19 14:06:41 +0000955Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
956 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
957 Object* result = stub->TryGetCode();
958 if (!result->IsFailure()) {
959 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
960 }
961 return result;
962}
963
964
Steve Blocka7e24c12009-10-30 11:49:00 +0000965void MacroAssembler::StubReturn(int argc) {
966 ASSERT(argc >= 1 && generating_stub());
967 ret((argc - 1) * kPointerSize);
968}
969
970
971void MacroAssembler::IllegalOperation(int num_arguments) {
972 if (num_arguments > 0) {
973 add(Operand(esp), Immediate(num_arguments * kPointerSize));
974 }
975 mov(eax, Immediate(Factory::undefined_value()));
976}
977
978
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100979void MacroAssembler::IndexFromHash(Register hash, Register index) {
980 // The assert checks that the constants for the maximum number of digits
981 // for an array index cached in the hash field and the number of bits
982 // reserved for it does not conflict.
983 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
984 (1 << String::kArrayIndexValueBits));
985 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
986 // the low kHashShift bits.
987 and_(hash, String::kArrayIndexValueMask);
988 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
989 if (String::kHashShift > kSmiTagSize) {
990 shr(hash, String::kHashShift - kSmiTagSize);
991 }
992 if (!index.is(hash)) {
993 mov(index, hash);
994 }
995}
996
997
Steve Blocka7e24c12009-10-30 11:49:00 +0000998void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
999 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1000}
1001
1002
Leon Clarkee46be812010-01-19 14:06:41 +00001003Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1004 int num_arguments) {
1005 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1006}
1007
1008
Steve Blocka7e24c12009-10-30 11:49:00 +00001009void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
1010 // If the expected number of arguments of the runtime function is
1011 // constant, we check that the actual number of arguments match the
1012 // expectation.
1013 if (f->nargs >= 0 && f->nargs != num_arguments) {
1014 IllegalOperation(num_arguments);
1015 return;
1016 }
1017
Leon Clarke4515c472010-02-03 11:58:03 +00001018 // TODO(1236192): Most runtime routines don't need the number of
1019 // arguments passed in because it is constant. At some point we
1020 // should remove this need and make the runtime routine entry code
1021 // smarter.
1022 Set(eax, Immediate(num_arguments));
1023 mov(ebx, Immediate(ExternalReference(f)));
1024 CEntryStub ces(1);
1025 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001026}
1027
1028
Leon Clarkee46be812010-01-19 14:06:41 +00001029Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
1030 int num_arguments) {
1031 if (f->nargs >= 0 && f->nargs != num_arguments) {
1032 IllegalOperation(num_arguments);
1033 // Since we did not call the stub, there was no allocation failure.
1034 // Return some non-failure object.
1035 return Heap::undefined_value();
1036 }
1037
Leon Clarke4515c472010-02-03 11:58:03 +00001038 // TODO(1236192): Most runtime routines don't need the number of
1039 // arguments passed in because it is constant. At some point we
1040 // should remove this need and make the runtime routine entry code
1041 // smarter.
1042 Set(eax, Immediate(num_arguments));
1043 mov(ebx, Immediate(ExternalReference(f)));
1044 CEntryStub ces(1);
1045 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001046}
1047
1048
Ben Murdochbb769b22010-08-11 14:56:33 +01001049void MacroAssembler::CallExternalReference(ExternalReference ref,
1050 int num_arguments) {
1051 mov(eax, Immediate(num_arguments));
1052 mov(ebx, Immediate(ref));
1053
1054 CEntryStub stub(1);
1055 CallStub(&stub);
1056}
1057
1058
Steve Block6ded16b2010-05-10 14:33:55 +01001059void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1060 int num_arguments,
1061 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001062 // TODO(1236192): Most runtime routines don't need the number of
1063 // arguments passed in because it is constant. At some point we
1064 // should remove this need and make the runtime routine entry code
1065 // smarter.
1066 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001067 JumpToExternalReference(ext);
1068}
1069
1070
1071void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1072 int num_arguments,
1073 int result_size) {
1074 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001075}
1076
1077
Steve Blockd0582a62009-12-15 09:54:21 +00001078void MacroAssembler::PushHandleScope(Register scratch) {
1079 // Push the number of extensions, smi-tagged so the gc will ignore it.
1080 ExternalReference extensions_address =
1081 ExternalReference::handle_scope_extensions_address();
1082 mov(scratch, Operand::StaticVariable(extensions_address));
Ben Murdochbb769b22010-08-11 14:56:33 +01001083 SmiTag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001084 push(scratch);
1085 mov(Operand::StaticVariable(extensions_address), Immediate(0));
1086 // Push next and limit pointers which will be wordsize aligned and
1087 // hence automatically smi tagged.
1088 ExternalReference next_address =
1089 ExternalReference::handle_scope_next_address();
1090 push(Operand::StaticVariable(next_address));
1091 ExternalReference limit_address =
1092 ExternalReference::handle_scope_limit_address();
1093 push(Operand::StaticVariable(limit_address));
1094}
1095
1096
Leon Clarkee46be812010-01-19 14:06:41 +00001097Object* MacroAssembler::PopHandleScopeHelper(Register saved,
1098 Register scratch,
1099 bool gc_allowed) {
1100 Object* result = NULL;
Steve Blockd0582a62009-12-15 09:54:21 +00001101 ExternalReference extensions_address =
1102 ExternalReference::handle_scope_extensions_address();
1103 Label write_back;
1104 mov(scratch, Operand::StaticVariable(extensions_address));
1105 cmp(Operand(scratch), Immediate(0));
1106 j(equal, &write_back);
Ben Murdochbb769b22010-08-11 14:56:33 +01001107 push(saved);
Leon Clarkee46be812010-01-19 14:06:41 +00001108 if (gc_allowed) {
1109 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1110 } else {
1111 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0);
1112 if (result->IsFailure()) return result;
1113 }
Ben Murdochbb769b22010-08-11 14:56:33 +01001114 pop(saved);
Steve Blockd0582a62009-12-15 09:54:21 +00001115
1116 bind(&write_back);
1117 ExternalReference limit_address =
1118 ExternalReference::handle_scope_limit_address();
1119 pop(Operand::StaticVariable(limit_address));
1120 ExternalReference next_address =
1121 ExternalReference::handle_scope_next_address();
1122 pop(Operand::StaticVariable(next_address));
1123 pop(scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +01001124 SmiUntag(scratch);
Steve Blockd0582a62009-12-15 09:54:21 +00001125 mov(Operand::StaticVariable(extensions_address), scratch);
Leon Clarkee46be812010-01-19 14:06:41 +00001126
1127 return result;
1128}
1129
1130
1131void MacroAssembler::PopHandleScope(Register saved, Register scratch) {
1132 PopHandleScopeHelper(saved, scratch, true);
1133}
1134
1135
1136Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) {
1137 return PopHandleScopeHelper(saved, scratch, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001138}
1139
1140
Steve Block6ded16b2010-05-10 14:33:55 +01001141void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001142 // Set the entry point and jump to the C entry runtime stub.
1143 mov(ebx, Immediate(ext));
1144 CEntryStub ces(1);
1145 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1146}
1147
1148
1149void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1150 const ParameterCount& actual,
1151 Handle<Code> code_constant,
1152 const Operand& code_operand,
1153 Label* done,
1154 InvokeFlag flag) {
1155 bool definitely_matches = false;
1156 Label invoke;
1157 if (expected.is_immediate()) {
1158 ASSERT(actual.is_immediate());
1159 if (expected.immediate() == actual.immediate()) {
1160 definitely_matches = true;
1161 } else {
1162 mov(eax, actual.immediate());
1163 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1164 if (expected.immediate() == sentinel) {
1165 // Don't worry about adapting arguments for builtins that
1166 // don't want that done. Skip adaption code by making it look
1167 // like we have a match between expected and actual number of
1168 // arguments.
1169 definitely_matches = true;
1170 } else {
1171 mov(ebx, expected.immediate());
1172 }
1173 }
1174 } else {
1175 if (actual.is_immediate()) {
1176 // Expected is in register, actual is immediate. This is the
1177 // case when we invoke function values without going through the
1178 // IC mechanism.
1179 cmp(expected.reg(), actual.immediate());
1180 j(equal, &invoke);
1181 ASSERT(expected.reg().is(ebx));
1182 mov(eax, actual.immediate());
1183 } else if (!expected.reg().is(actual.reg())) {
1184 // Both expected and actual are in (different) registers. This
1185 // is the case when we invoke functions using call and apply.
1186 cmp(expected.reg(), Operand(actual.reg()));
1187 j(equal, &invoke);
1188 ASSERT(actual.reg().is(eax));
1189 ASSERT(expected.reg().is(ebx));
1190 }
1191 }
1192
1193 if (!definitely_matches) {
1194 Handle<Code> adaptor =
1195 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1196 if (!code_constant.is_null()) {
1197 mov(edx, Immediate(code_constant));
1198 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1199 } else if (!code_operand.is_reg(edx)) {
1200 mov(edx, code_operand);
1201 }
1202
1203 if (flag == CALL_FUNCTION) {
1204 call(adaptor, RelocInfo::CODE_TARGET);
1205 jmp(done);
1206 } else {
1207 jmp(adaptor, RelocInfo::CODE_TARGET);
1208 }
1209 bind(&invoke);
1210 }
1211}
1212
1213
1214void MacroAssembler::InvokeCode(const Operand& code,
1215 const ParameterCount& expected,
1216 const ParameterCount& actual,
1217 InvokeFlag flag) {
1218 Label done;
1219 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1220 if (flag == CALL_FUNCTION) {
1221 call(code);
1222 } else {
1223 ASSERT(flag == JUMP_FUNCTION);
1224 jmp(code);
1225 }
1226 bind(&done);
1227}
1228
1229
1230void MacroAssembler::InvokeCode(Handle<Code> code,
1231 const ParameterCount& expected,
1232 const ParameterCount& actual,
1233 RelocInfo::Mode rmode,
1234 InvokeFlag flag) {
1235 Label done;
1236 Operand dummy(eax);
1237 InvokePrologue(expected, actual, code, dummy, &done, flag);
1238 if (flag == CALL_FUNCTION) {
1239 call(code, rmode);
1240 } else {
1241 ASSERT(flag == JUMP_FUNCTION);
1242 jmp(code, rmode);
1243 }
1244 bind(&done);
1245}
1246
1247
1248void MacroAssembler::InvokeFunction(Register fun,
1249 const ParameterCount& actual,
1250 InvokeFlag flag) {
1251 ASSERT(fun.is(edi));
1252 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1253 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1254 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001255 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001256
1257 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001258 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1259 expected, actual, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001260}
1261
1262
Andrei Popescu402d9372010-02-26 13:31:12 +00001263void MacroAssembler::InvokeFunction(JSFunction* function,
1264 const ParameterCount& actual,
1265 InvokeFlag flag) {
1266 ASSERT(function->is_compiled());
1267 // Get the function and setup the context.
1268 mov(edi, Immediate(Handle<JSFunction>(function)));
1269 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +00001270 // Invoke the cached code.
1271 Handle<Code> code(function->code());
1272 ParameterCount expected(function->shared()->formal_parameter_count());
1273 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
1274}
1275
1276
1277void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 // Calls are not allowed in some stubs.
1279 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1280
1281 // Rely on the assertion to check that the number of provided
1282 // arguments match the expected number of arguments. Fake a
1283 // parameter count to avoid emitting code to do the check.
1284 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001285 GetBuiltinFunction(edi, id);
1286 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
1287 expected, expected, flag);
Steve Blocka7e24c12009-10-30 11:49:00 +00001288}
1289
Steve Block791712a2010-08-27 10:21:07 +01001290void MacroAssembler::GetBuiltinFunction(Register target,
1291 Builtins::JavaScript id) {
1292 // Load the JavaScript builtin function from the builtins object.
1293 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1294 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1295 mov(target, FieldOperand(target,
1296 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1297}
Steve Blocka7e24c12009-10-30 11:49:00 +00001298
1299void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001300 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001301 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001302 GetBuiltinFunction(edi, id);
1303 // Load the code entry point from the function into the target register.
1304 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001305}
1306
1307
Steve Blockd0582a62009-12-15 09:54:21 +00001308void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1309 if (context_chain_length > 0) {
1310 // Move up the chain of contexts to the context containing the slot.
1311 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1312 // Load the function context (which is the incoming, outer context).
1313 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1314 for (int i = 1; i < context_chain_length; i++) {
1315 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1316 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1317 }
1318 // The context may be an intermediate context, not a function context.
1319 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1320 } else { // Slot is in the current function context.
1321 // The context may be an intermediate context, not a function context.
1322 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1323 }
1324}
1325
1326
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001327void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1328 // Load the global or builtins object from the current context.
1329 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1330 // Load the global context from the global or builtins object.
1331 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1332 // Load the function from the global context.
1333 mov(function, Operand(function, Context::SlotOffset(index)));
1334}
1335
1336
1337void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1338 Register map) {
1339 // Load the initial map. The global functions all have initial maps.
1340 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1341 if (FLAG_debug_code) {
1342 Label ok, fail;
1343 CheckMap(map, Factory::meta_map(), &fail, false);
1344 jmp(&ok);
1345 bind(&fail);
1346 Abort("Global functions must have initial map");
1347 bind(&ok);
1348 }
1349}
1350
Steve Blockd0582a62009-12-15 09:54:21 +00001351
Steve Blocka7e24c12009-10-30 11:49:00 +00001352void MacroAssembler::Ret() {
1353 ret(0);
1354}
1355
1356
Leon Clarkee46be812010-01-19 14:06:41 +00001357void MacroAssembler::Drop(int stack_elements) {
1358 if (stack_elements > 0) {
1359 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1360 }
1361}
1362
1363
1364void MacroAssembler::Move(Register dst, Handle<Object> value) {
1365 mov(dst, value);
1366}
1367
1368
Steve Blocka7e24c12009-10-30 11:49:00 +00001369void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1370 if (FLAG_native_code_counters && counter->Enabled()) {
1371 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
1372 }
1373}
1374
1375
1376void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1377 ASSERT(value > 0);
1378 if (FLAG_native_code_counters && counter->Enabled()) {
1379 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1380 if (value == 1) {
1381 inc(operand);
1382 } else {
1383 add(operand, Immediate(value));
1384 }
1385 }
1386}
1387
1388
1389void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1390 ASSERT(value > 0);
1391 if (FLAG_native_code_counters && counter->Enabled()) {
1392 Operand operand = Operand::StaticVariable(ExternalReference(counter));
1393 if (value == 1) {
1394 dec(operand);
1395 } else {
1396 sub(operand, Immediate(value));
1397 }
1398 }
1399}
1400
1401
Leon Clarked91b9f72010-01-27 17:25:45 +00001402void MacroAssembler::IncrementCounter(Condition cc,
1403 StatsCounter* counter,
1404 int value) {
1405 ASSERT(value > 0);
1406 if (FLAG_native_code_counters && counter->Enabled()) {
1407 Label skip;
1408 j(NegateCondition(cc), &skip);
1409 pushfd();
1410 IncrementCounter(counter, value);
1411 popfd();
1412 bind(&skip);
1413 }
1414}
1415
1416
1417void MacroAssembler::DecrementCounter(Condition cc,
1418 StatsCounter* counter,
1419 int value) {
1420 ASSERT(value > 0);
1421 if (FLAG_native_code_counters && counter->Enabled()) {
1422 Label skip;
1423 j(NegateCondition(cc), &skip);
1424 pushfd();
1425 DecrementCounter(counter, value);
1426 popfd();
1427 bind(&skip);
1428 }
1429}
1430
1431
Steve Blocka7e24c12009-10-30 11:49:00 +00001432void MacroAssembler::Assert(Condition cc, const char* msg) {
1433 if (FLAG_debug_code) Check(cc, msg);
1434}
1435
1436
Iain Merrick75681382010-08-19 15:07:18 +01001437void MacroAssembler::AssertFastElements(Register elements) {
1438 if (FLAG_debug_code) {
1439 Label ok;
1440 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1441 Immediate(Factory::fixed_array_map()));
1442 j(equal, &ok);
1443 cmp(FieldOperand(elements, HeapObject::kMapOffset),
1444 Immediate(Factory::fixed_cow_array_map()));
1445 j(equal, &ok);
1446 Abort("JSObject with fast elements map has slow elements");
1447 bind(&ok);
1448 }
1449}
1450
1451
Steve Blocka7e24c12009-10-30 11:49:00 +00001452void MacroAssembler::Check(Condition cc, const char* msg) {
1453 Label L;
1454 j(cc, &L, taken);
1455 Abort(msg);
1456 // will not return here
1457 bind(&L);
1458}
1459
1460
Steve Block6ded16b2010-05-10 14:33:55 +01001461void MacroAssembler::CheckStackAlignment() {
1462 int frame_alignment = OS::ActivationFrameAlignment();
1463 int frame_alignment_mask = frame_alignment - 1;
1464 if (frame_alignment > kPointerSize) {
1465 ASSERT(IsPowerOf2(frame_alignment));
1466 Label alignment_as_expected;
1467 test(esp, Immediate(frame_alignment_mask));
1468 j(zero, &alignment_as_expected);
1469 // Abort if stack is not aligned.
1470 int3();
1471 bind(&alignment_as_expected);
1472 }
1473}
1474
1475
Steve Blocka7e24c12009-10-30 11:49:00 +00001476void MacroAssembler::Abort(const char* msg) {
1477 // We want to pass the msg string like a smi to avoid GC
1478 // problems, however msg is not guaranteed to be aligned
1479 // properly. Instead, we pass an aligned pointer that is
1480 // a proper v8 smi, but also pass the alignment difference
1481 // from the real pointer as a smi.
1482 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1483 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1484 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1485#ifdef DEBUG
1486 if (msg != NULL) {
1487 RecordComment("Abort message: ");
1488 RecordComment(msg);
1489 }
1490#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001491 // Disable stub call restrictions to always allow calls to abort.
1492 set_allow_stub_calls(true);
1493
Steve Blocka7e24c12009-10-30 11:49:00 +00001494 push(eax);
1495 push(Immediate(p0));
1496 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1497 CallRuntime(Runtime::kAbort, 2);
1498 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00001499 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00001500}
1501
1502
Iain Merrick75681382010-08-19 15:07:18 +01001503void MacroAssembler::JumpIfNotNumber(Register reg,
1504 TypeInfo info,
1505 Label* on_not_number) {
1506 if (FLAG_debug_code) AbortIfSmi(reg);
1507 if (!info.IsNumber()) {
1508 cmp(FieldOperand(reg, HeapObject::kMapOffset),
1509 Factory::heap_number_map());
1510 j(not_equal, on_not_number);
1511 }
1512}
1513
1514
1515void MacroAssembler::ConvertToInt32(Register dst,
1516 Register source,
1517 Register scratch,
1518 TypeInfo info,
1519 Label* on_not_int32) {
1520 if (FLAG_debug_code) {
1521 AbortIfSmi(source);
1522 AbortIfNotNumber(source);
1523 }
1524 if (info.IsInteger32()) {
1525 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset));
1526 } else {
1527 Label done;
1528 bool push_pop = (scratch.is(no_reg) && dst.is(source));
1529 ASSERT(!scratch.is(source));
1530 if (push_pop) {
1531 push(dst);
1532 scratch = dst;
1533 }
1534 if (scratch.is(no_reg)) scratch = dst;
1535 cvttsd2si(scratch, FieldOperand(source, HeapNumber::kValueOffset));
1536 cmp(scratch, 0x80000000u);
1537 if (push_pop) {
1538 j(not_equal, &done);
1539 pop(dst);
1540 jmp(on_not_int32);
1541 } else {
1542 j(equal, on_not_int32);
1543 }
1544
1545 bind(&done);
1546 if (push_pop) {
1547 add(Operand(esp), Immediate(kPointerSize)); // Pop.
1548 }
1549 if (!scratch.is(dst)) {
1550 mov(dst, scratch);
1551 }
1552 }
1553}
1554
1555
Andrei Popescu402d9372010-02-26 13:31:12 +00001556void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1557 Register instance_type,
1558 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01001559 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001560 if (!scratch.is(instance_type)) {
1561 mov(scratch, instance_type);
1562 }
1563 and_(scratch,
1564 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
1565 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
1566 j(not_equal, failure);
1567}
1568
1569
Leon Clarked91b9f72010-01-27 17:25:45 +00001570void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
1571 Register object2,
1572 Register scratch1,
1573 Register scratch2,
1574 Label* failure) {
1575 // Check that both objects are not smis.
1576 ASSERT_EQ(0, kSmiTag);
1577 mov(scratch1, Operand(object1));
1578 and_(scratch1, Operand(object2));
1579 test(scratch1, Immediate(kSmiTagMask));
1580 j(zero, failure);
1581
1582 // Load instance type for both strings.
1583 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
1584 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
1585 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1586 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1587
1588 // Check that both are flat ascii strings.
1589 const int kFlatAsciiStringMask =
1590 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1591 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1592 // Interleave bits from both instance types and compare them in one check.
1593 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1594 and_(scratch1, kFlatAsciiStringMask);
1595 and_(scratch2, kFlatAsciiStringMask);
1596 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1597 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
1598 j(not_equal, failure);
1599}
1600
1601
Steve Block6ded16b2010-05-10 14:33:55 +01001602void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1603 int frameAlignment = OS::ActivationFrameAlignment();
1604 if (frameAlignment != 0) {
1605 // Make stack end at alignment and make room for num_arguments words
1606 // and the original value of esp.
1607 mov(scratch, esp);
1608 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
1609 ASSERT(IsPowerOf2(frameAlignment));
1610 and_(esp, -frameAlignment);
1611 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1612 } else {
1613 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
1614 }
1615}
1616
1617
1618void MacroAssembler::CallCFunction(ExternalReference function,
1619 int num_arguments) {
1620 // Trashing eax is ok as it will be the return value.
1621 mov(Operand(eax), Immediate(function));
1622 CallCFunction(eax, num_arguments);
1623}
1624
1625
1626void MacroAssembler::CallCFunction(Register function,
1627 int num_arguments) {
1628 // Check stack alignment.
1629 if (FLAG_debug_code) {
1630 CheckStackAlignment();
1631 }
1632
1633 call(Operand(function));
1634 if (OS::ActivationFrameAlignment() != 0) {
1635 mov(esp, Operand(esp, num_arguments * kPointerSize));
1636 } else {
1637 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t)));
1638 }
1639}
1640
1641
Steve Blocka7e24c12009-10-30 11:49:00 +00001642CodePatcher::CodePatcher(byte* address, int size)
1643 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
1644 // Create a new macro assembler pointing to the address of the code to patch.
1645 // The size is adjusted with kGap on order for the assembler to generate size
1646 // bytes of instructions without failing with buffer size constraints.
1647 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1648}
1649
1650
1651CodePatcher::~CodePatcher() {
1652 // Indicate that code has changed.
1653 CPU::FlushICache(address_, size_);
1654
1655 // Check that the code was patched as expected.
1656 ASSERT(masm_.pc_ == address_ + size_);
1657 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1658}
1659
1660
1661} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001662
1663#endif // V8_TARGET_ARCH_IA32