blob: dff174cb5384ac42a542b012dcef060724a58407 [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
Ben Murdoch8b112d22011-06-08 16:22:53 +010044MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010047 allow_stub_calls_(true) {
48 if (isolate() != NULL) {
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50 isolate());
51 }
Steve Blocka7e24c12009-10-30 11:49:00 +000052}
53
54
Steve Block6ded16b2010-05-10 14:33:55 +010055void MacroAssembler::RecordWriteHelper(Register object,
56 Register addr,
57 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +010058 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +010059 // Check that the object is not in new space.
60 Label not_in_new_space;
61 InNewSpace(object, scratch, not_equal, &not_in_new_space);
62 Abort("new-space object passed to RecordWriteHelper");
63 bind(&not_in_new_space);
64 }
65
Steve Blocka7e24c12009-10-30 11:49:00 +000066 // Compute the page start address from the heap object pointer, and reuse
67 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010068 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000069
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010070 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
71 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 shr(addr, Page::kRegionSizeLog2);
Ben Murdoch69a99ed2011-11-30 16:03:39 +000073 and_(addr, Page::kPageAlignmentMask >> Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000074
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010075 // Set dirty mark for region.
Ben Murdoch257744e2011-11-30 15:57:28 +000076 // Bit tests with a memory operand should be avoided on Intel processors,
77 // as they usually have long latency and multiple uops. We load the bit base
78 // operand to a register at first and store it back after bit set.
79 mov(scratch, Operand(object, Page::kDirtyFlagOffset));
80 bts(Operand(scratch), addr);
81 mov(Operand(object, Page::kDirtyFlagOffset), scratch);
82}
83
84
85void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
86 XMMRegister scratch_reg,
87 Register result_reg) {
88 Label done;
89 ExternalReference zero_ref = ExternalReference::address_of_zero();
90 movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
91 Set(result_reg, Immediate(0));
92 ucomisd(input_reg, scratch_reg);
93 j(below, &done, Label::kNear);
94 ExternalReference half_ref = ExternalReference::address_of_one_half();
95 movdbl(scratch_reg, Operand::StaticVariable(half_ref));
96 addsd(scratch_reg, input_reg);
97 cvttsd2si(result_reg, Operand(scratch_reg));
98 test(result_reg, Immediate(0xFFFFFF00));
99 j(zero, &done, Label::kNear);
100 Set(result_reg, Immediate(255));
101 bind(&done);
102}
103
104
105void MacroAssembler::ClampUint8(Register reg) {
106 Label done;
107 test(reg, Immediate(0xFFFFFF00));
108 j(zero, &done, Label::kNear);
109 setcc(negative, reg); // 1 if negative, 0 if positive.
110 dec_b(reg); // 0 if negative, 255 if positive.
111 bind(&done);
112}
113
114
115void MacroAssembler::InNewSpace(Register object,
116 Register scratch,
117 Condition cc,
118 Label* branch,
119 Label::Distance branch_near) {
120 ASSERT(cc == equal || cc == not_equal);
121 if (Serializer::enabled()) {
122 // Can't do arithmetic on external references if it might get serialized.
123 mov(scratch, Operand(object));
124 // The mask isn't really an address. We load it as an external reference in
125 // case the size of the new space is different between the snapshot maker
126 // and the running system.
127 and_(Operand(scratch),
128 Immediate(ExternalReference::new_space_mask(isolate())));
129 cmp(Operand(scratch),
130 Immediate(ExternalReference::new_space_start(isolate())));
131 j(cc, branch, branch_near);
132 } else {
133 int32_t new_space_start = reinterpret_cast<int32_t>(
134 ExternalReference::new_space_start(isolate()).address());
135 lea(scratch, Operand(object, -new_space_start));
136 and_(scratch, isolate()->heap()->NewSpaceMask());
137 j(cc, branch, branch_near);
138 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000139}
140
141
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100142void MacroAssembler::RecordWrite(Register object,
143 int offset,
144 Register value,
145 Register scratch) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
Ben Murdoch257744e2011-11-30 15:57:28 +0000148 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +0000149
150 // Skip barrier if writing a smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000151 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000152 JumpIfSmi(value, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000153
Ben Murdoch257744e2011-11-30 15:57:28 +0000154 InNewSpace(object, value, equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000155
Steve Block6ded16b2010-05-10 14:33:55 +0100156 // The offset is relative to a tagged or untagged HeapObject pointer,
157 // so either offset or offset + kHeapObjectTag must be a
158 // multiple of kPointerSize.
159 ASSERT(IsAligned(offset, kPointerSize) ||
160 IsAligned(offset + kHeapObjectTag, kPointerSize));
161
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100162 Register dst = scratch;
163 if (offset != 0) {
164 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000165 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100166 // Array access: calculate the destination address in the same manner as
167 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
168 // into an array of words.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000169 STATIC_ASSERT(kSmiTagSize == 1);
170 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100171 lea(dst, Operand(object, dst, times_half_pointer_size,
172 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000173 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100174 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000175
176 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000177
178 // Clobber all input registers when running with the debug-code flag
179 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100180 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100181 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
182 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
183 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000184 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000185}
186
187
Steve Block8defd9f2010-07-08 12:39:36 +0100188void MacroAssembler::RecordWrite(Register object,
189 Register address,
190 Register value) {
Steve Block8defd9f2010-07-08 12:39:36 +0100191 // First, check if a write barrier is even needed. The tests below
192 // catch stores of Smis and stores into young gen.
193 Label done;
194
195 // Skip barrier if writing a smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000196 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000197 JumpIfSmi(value, &done, Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100198
199 InNewSpace(object, value, equal, &done);
200
201 RecordWriteHelper(object, address, value);
202
203 bind(&done);
204
205 // Clobber all input registers when running with the debug-code flag
206 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100207 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100208 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
209 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
210 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
211 }
212}
213
214
Steve Blocka7e24c12009-10-30 11:49:00 +0000215#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000216void MacroAssembler::DebugBreak() {
217 Set(eax, Immediate(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100218 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
Andrei Popescu402d9372010-02-26 13:31:12 +0000219 CEntryStub ces(1);
220 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
221}
Steve Blocka7e24c12009-10-30 11:49:00 +0000222#endif
223
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100224
Steve Blocka7e24c12009-10-30 11:49:00 +0000225void MacroAssembler::Set(Register dst, const Immediate& x) {
226 if (x.is_zero()) {
Steve Block053d10c2011-06-13 19:13:29 +0100227 xor_(dst, Operand(dst)); // Shorter than mov.
Steve Blocka7e24c12009-10-30 11:49:00 +0000228 } else {
229 mov(dst, x);
230 }
231}
232
233
234void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
235 mov(dst, x);
236}
237
238
Steve Block053d10c2011-06-13 19:13:29 +0100239bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
240 static const int kMaxImmediateBits = 17;
241 if (x.rmode_ != RelocInfo::NONE) return false;
242 return !is_intn(x.x_, kMaxImmediateBits);
243}
244
245
246void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
247 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
248 Set(dst, Immediate(x.x_ ^ jit_cookie()));
249 xor_(dst, jit_cookie());
250 } else {
251 Set(dst, x);
252 }
253}
254
255
256void MacroAssembler::SafePush(const Immediate& x) {
257 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
258 push(Immediate(x.x_ ^ jit_cookie()));
259 xor_(Operand(esp, 0), Immediate(jit_cookie()));
260 } else {
261 push(x);
262 }
263}
264
265
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000266void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
267 // see ROOT_ACCESSOR macro in factory.h
268 Handle<Object> value(&isolate()->heap()->roots_address()[index]);
269 cmp(with, value);
270}
271
272
Steve Blocka7e24c12009-10-30 11:49:00 +0000273void MacroAssembler::CmpObjectType(Register heap_object,
274 InstanceType type,
275 Register map) {
276 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
277 CmpInstanceType(map, type);
278}
279
280
281void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
282 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
283 static_cast<int8_t>(type));
284}
285
286
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000287void MacroAssembler::CheckFastElements(Register map,
288 Label* fail,
289 Label::Distance distance) {
290 STATIC_ASSERT(JSObject::FAST_ELEMENTS == 0);
291 cmpb(FieldOperand(map, Map::kBitField2Offset),
292 Map::kMaximumBitField2FastElementValue);
293 j(above, fail, distance);
294}
295
296
Andrei Popescu31002712010-02-23 13:46:05 +0000297void MacroAssembler::CheckMap(Register obj,
298 Handle<Map> map,
299 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000300 SmiCheckType smi_check_type) {
301 if (smi_check_type == DO_SMI_CHECK) {
302 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +0000303 }
304 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
305 j(not_equal, fail);
306}
307
308
Ben Murdoch257744e2011-11-30 15:57:28 +0000309void MacroAssembler::DispatchMap(Register obj,
310 Handle<Map> map,
311 Handle<Code> success,
312 SmiCheckType smi_check_type) {
313 Label fail;
314 if (smi_check_type == DO_SMI_CHECK) {
315 JumpIfSmi(obj, &fail);
316 }
317 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
318 j(equal, success);
319
320 bind(&fail);
321}
322
323
Leon Clarkee46be812010-01-19 14:06:41 +0000324Condition MacroAssembler::IsObjectStringType(Register heap_object,
325 Register map,
326 Register instance_type) {
327 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
328 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000329 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +0000330 test(instance_type, Immediate(kIsNotStringMask));
331 return zero;
332}
333
334
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100335void MacroAssembler::IsObjectJSObjectType(Register heap_object,
336 Register map,
337 Register scratch,
338 Label* fail) {
339 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
340 IsInstanceJSObjectType(map, scratch, fail);
341}
342
343
344void MacroAssembler::IsInstanceJSObjectType(Register map,
345 Register scratch,
346 Label* fail) {
347 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000348 sub(Operand(scratch), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
349 cmp(scratch,
350 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100351 j(above, fail);
352}
353
354
Steve Blocka7e24c12009-10-30 11:49:00 +0000355void MacroAssembler::FCmp() {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100356 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000357 fucomip();
358 ffree(0);
359 fincstp();
360 } else {
361 fucompp();
362 push(eax);
363 fnstsw_ax();
364 sahf();
365 pop(eax);
366 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000367}
368
369
Steve Block6ded16b2010-05-10 14:33:55 +0100370void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000371 Label ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000372 JumpIfSmi(object, &ok);
Andrei Popescu402d9372010-02-26 13:31:12 +0000373 cmp(FieldOperand(object, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +0100374 isolate()->factory()->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100375 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000376 bind(&ok);
377}
378
379
Steve Block6ded16b2010-05-10 14:33:55 +0100380void MacroAssembler::AbortIfNotSmi(Register object) {
381 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100382 Assert(equal, "Operand is not a smi");
383}
384
385
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100386void MacroAssembler::AbortIfNotString(Register object) {
387 test(object, Immediate(kSmiTagMask));
388 Assert(not_equal, "Operand is not a string");
389 push(object);
390 mov(object, FieldOperand(object, HeapObject::kMapOffset));
391 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
392 pop(object);
393 Assert(below, "Operand is not a string");
394}
395
396
Iain Merrick75681382010-08-19 15:07:18 +0100397void MacroAssembler::AbortIfSmi(Register object) {
398 test(object, Immediate(kSmiTagMask));
399 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100400}
401
402
Steve Blocka7e24c12009-10-30 11:49:00 +0000403void MacroAssembler::EnterFrame(StackFrame::Type type) {
404 push(ebp);
405 mov(ebp, Operand(esp));
406 push(esi);
407 push(Immediate(Smi::FromInt(type)));
408 push(Immediate(CodeObject()));
Steve Block44f0eee2011-05-26 01:26:41 +0100409 if (emit_debug_code()) {
410 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000411 Check(not_equal, "code object not properly patched");
412 }
413}
414
415
416void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +0100417 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000418 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
419 Immediate(Smi::FromInt(type)));
420 Check(equal, "stack frame types must match");
421 }
422 leave();
423}
424
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100425
426void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 // Setup the frame structure on the stack.
428 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
429 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
430 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
431 push(ebp);
432 mov(ebp, Operand(esp));
433
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100434 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000435 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000436 push(Immediate(0)); // Saved entry sp, patched before call.
437 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000438
439 // Save the frame pointer and the context in top.
Steve Block44f0eee2011-05-26 01:26:41 +0100440 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
441 isolate());
442 ExternalReference context_address(Isolate::k_context_address,
443 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000444 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
445 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000446}
Steve Blocka7e24c12009-10-30 11:49:00 +0000447
Steve Blocka7e24c12009-10-30 11:49:00 +0000448
Ben Murdochb0fe1622011-05-05 13:52:32 +0100449void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
450 // Optionally save all XMM registers.
451 if (save_doubles) {
452 CpuFeatures::Scope scope(SSE2);
453 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
454 sub(Operand(esp), Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +0100455 const int offset = -2 * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100456 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
457 XMMRegister reg = XMMRegister::from_code(i);
458 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
459 }
460 } else {
461 sub(Operand(esp), Immediate(argc * kPointerSize));
462 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000463
464 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +0100465 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000466 if (kFrameAlignment > 0) {
467 ASSERT(IsPowerOf2(kFrameAlignment));
468 and_(esp, -kFrameAlignment);
469 }
470
471 // Patch the saved entry sp.
472 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
473}
474
475
Ben Murdochb0fe1622011-05-05 13:52:32 +0100476void MacroAssembler::EnterExitFrame(bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100477 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000478
479 // Setup argc and argv in callee-saved registers.
480 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
481 mov(edi, Operand(eax));
482 lea(esi, Operand(ebp, eax, times_4, offset));
483
Steve Block44f0eee2011-05-26 01:26:41 +0100484 // Reserve space for argc, argv and isolate.
485 EnterExitFrameEpilogue(3, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000486}
487
488
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800489void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100490 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100491 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +0000492}
493
494
Ben Murdochb0fe1622011-05-05 13:52:32 +0100495void MacroAssembler::LeaveExitFrame(bool save_doubles) {
496 // Optionally restore all XMM registers.
497 if (save_doubles) {
498 CpuFeatures::Scope scope(SSE2);
Steve Block1e0659c2011-05-24 12:43:12 +0100499 const int offset = -2 * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100500 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
501 XMMRegister reg = XMMRegister::from_code(i);
502 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
503 }
504 }
505
Steve Blocka7e24c12009-10-30 11:49:00 +0000506 // Get the return address from the stack and restore the frame pointer.
507 mov(ecx, Operand(ebp, 1 * kPointerSize));
508 mov(ebp, Operand(ebp, 0 * kPointerSize));
509
510 // Pop the arguments and the receiver from the caller stack.
511 lea(esp, Operand(esi, 1 * kPointerSize));
512
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800513 // Push the return address to get ready to return.
514 push(ecx);
515
516 LeaveExitFrameEpilogue();
517}
518
519void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000520 // Restore current context from top and clear it in debug mode.
Steve Block44f0eee2011-05-26 01:26:41 +0100521 ExternalReference context_address(Isolate::k_context_address, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000522 mov(esi, Operand::StaticVariable(context_address));
523#ifdef DEBUG
524 mov(Operand::StaticVariable(context_address), Immediate(0));
525#endif
526
Steve Blocka7e24c12009-10-30 11:49:00 +0000527 // Clear the top frame.
Steve Block44f0eee2011-05-26 01:26:41 +0100528 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
529 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000530 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
531}
532
533
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800534void MacroAssembler::LeaveApiExitFrame() {
535 mov(esp, Operand(ebp));
536 pop(ebp);
537
538 LeaveExitFrameEpilogue();
539}
540
541
Steve Blocka7e24c12009-10-30 11:49:00 +0000542void MacroAssembler::PushTryHandler(CodeLocation try_location,
543 HandlerType type) {
544 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000545 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
546 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
547 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
548 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
549 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
550 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +0000551 // The pc (return address) is already on TOS.
552 if (try_location == IN_JAVASCRIPT) {
553 if (type == TRY_CATCH_HANDLER) {
554 push(Immediate(StackHandler::TRY_CATCH));
555 } else {
556 push(Immediate(StackHandler::TRY_FINALLY));
557 }
558 push(ebp);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000559 push(esi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000560 } else {
561 ASSERT(try_location == IN_JS_ENTRY);
562 // The frame pointer does not point to a JS frame so we save NULL
563 // for ebp. We expect the code throwing an exception to check ebp
564 // before dereferencing it to restore the context.
565 push(Immediate(StackHandler::ENTRY));
566 push(Immediate(0)); // NULL frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000567 push(Immediate(Smi::FromInt(0))); // No context.
Steve Blocka7e24c12009-10-30 11:49:00 +0000568 }
569 // Save the current handler as the next handler.
Steve Block44f0eee2011-05-26 01:26:41 +0100570 push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
571 isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +0000572 // Link this handler as the new current one.
Steve Block44f0eee2011-05-26 01:26:41 +0100573 mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
574 isolate())),
575 esp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000576}
577
578
Leon Clarkee46be812010-01-19 14:06:41 +0000579void MacroAssembler::PopTryHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000580 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Block44f0eee2011-05-26 01:26:41 +0100581 pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
582 isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +0000583 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
584}
585
586
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100587void MacroAssembler::Throw(Register value) {
588 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000589 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
590 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
591 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
592 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
593 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
594 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100595 // eax must hold the exception.
596 if (!value.is(eax)) {
597 mov(eax, value);
598 }
599
600 // Drop the sp to the top of the handler.
Steve Block44f0eee2011-05-26 01:26:41 +0100601 ExternalReference handler_address(Isolate::k_handler_address,
602 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100603 mov(esp, Operand::StaticVariable(handler_address));
604
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000605 // Restore next handler, context, and frame pointer; discard handler state.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100606 pop(Operand::StaticVariable(handler_address));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000607 pop(esi); // Context.
608 pop(ebp); // Frame pointer.
609 pop(edx); // State.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100610
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000611 // If the handler is a JS frame, restore the context to the frame.
612 // (edx == ENTRY) == (ebp == 0) == (esi == 0), so we could test any
613 // of them.
Ben Murdoch257744e2011-11-30 15:57:28 +0000614 Label skip;
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000615 cmp(Operand(edx), Immediate(StackHandler::ENTRY));
Ben Murdoch257744e2011-11-30 15:57:28 +0000616 j(equal, &skip, Label::kNear);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000617 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100618 bind(&skip);
619
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100620 ret(0);
621}
622
623
624void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
625 Register value) {
626 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000627 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
628 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
629 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
630 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
631 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
632 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100633
634 // eax must hold the exception.
635 if (!value.is(eax)) {
636 mov(eax, value);
637 }
638
639 // Drop sp to the top stack handler.
Steve Block44f0eee2011-05-26 01:26:41 +0100640 ExternalReference handler_address(Isolate::k_handler_address,
641 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100642 mov(esp, Operand::StaticVariable(handler_address));
643
644 // Unwind the handlers until the ENTRY handler is found.
Ben Murdoch257744e2011-11-30 15:57:28 +0000645 Label loop, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100646 bind(&loop);
647 // Load the type of the current stack handler.
648 const int kStateOffset = StackHandlerConstants::kStateOffset;
649 cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
Ben Murdoch257744e2011-11-30 15:57:28 +0000650 j(equal, &done, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100651 // Fetch the next handler in the list.
652 const int kNextOffset = StackHandlerConstants::kNextOffset;
653 mov(esp, Operand(esp, kNextOffset));
654 jmp(&loop);
655 bind(&done);
656
657 // Set the top handler address to next handler past the current ENTRY handler.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100658 pop(Operand::StaticVariable(handler_address));
659
660 if (type == OUT_OF_MEMORY) {
661 // Set external caught exception to false.
Steve Block44f0eee2011-05-26 01:26:41 +0100662 ExternalReference external_caught(
663 Isolate::k_external_caught_exception_address,
664 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100665 mov(eax, false);
666 mov(Operand::StaticVariable(external_caught), eax);
667
668 // Set pending exception and eax to out of memory exception.
Steve Block44f0eee2011-05-26 01:26:41 +0100669 ExternalReference pending_exception(Isolate::k_pending_exception_address,
670 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100671 mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
672 mov(Operand::StaticVariable(pending_exception), eax);
673 }
674
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000675 // Discard the context saved in the handler and clear the context pointer.
676 pop(edx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100677 Set(esi, Immediate(0));
678
679 // Restore fp from handler and discard handler state.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100680 pop(ebp);
681 pop(edx); // State.
682
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100683 ret(0);
684}
685
686
Steve Blocka7e24c12009-10-30 11:49:00 +0000687void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
688 Register scratch,
689 Label* miss) {
690 Label same_contexts;
691
692 ASSERT(!holder_reg.is(scratch));
693
694 // Load current lexical context from the stack frame.
695 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
696
697 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +0100698 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 cmp(Operand(scratch), Immediate(0));
700 Check(not_equal, "we should not have an empty lexical context");
701 }
702 // Load the global context of the current context.
703 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
704 mov(scratch, FieldOperand(scratch, offset));
705 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
706
707 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +0100708 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 push(scratch);
710 // Read the first word and compare to global_context_map.
711 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100712 cmp(scratch, isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 Check(equal, "JSGlobalObject::global_context should be a global context.");
714 pop(scratch);
715 }
716
717 // Check if both contexts are the same.
718 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000719 j(equal, &same_contexts);
Steve Blocka7e24c12009-10-30 11:49:00 +0000720
721 // Compare security tokens, save holder_reg on the stack so we can use it
722 // as a temporary register.
723 //
724 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
725 push(holder_reg);
726 // Check that the security token in the calling global object is
727 // compatible with the security token in the receiving global
728 // object.
729 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
730
731 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +0100732 if (emit_debug_code()) {
733 cmp(holder_reg, isolate()->factory()->null_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000734 Check(not_equal, "JSGlobalProxy::context() should not be null.");
735
736 push(holder_reg);
737 // Read the first word and compare to global_context_map(),
738 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100739 cmp(holder_reg, isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +0000740 Check(equal, "JSGlobalObject::global_context should be a global context.");
741 pop(holder_reg);
742 }
743
744 int token_offset = Context::kHeaderSize +
745 Context::SECURITY_TOKEN_INDEX * kPointerSize;
746 mov(scratch, FieldOperand(scratch, token_offset));
747 cmp(scratch, FieldOperand(holder_reg, token_offset));
748 pop(holder_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +0000749 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000750
751 bind(&same_contexts);
752}
753
754
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000755void MacroAssembler::LoadFromNumberDictionary(Label* miss,
756 Register elements,
757 Register key,
758 Register r0,
759 Register r1,
760 Register r2,
761 Register result) {
762 // Register use:
763 //
764 // elements - holds the slow-case elements of the receiver and is unchanged.
765 //
766 // key - holds the smi key on entry and is unchanged.
767 //
768 // Scratch registers:
769 //
770 // r0 - holds the untagged key on entry and holds the hash once computed.
771 //
772 // r1 - used to hold the capacity mask of the dictionary
773 //
774 // r2 - used for the index into the dictionary.
775 //
776 // result - holds the result on exit if the load succeeds and we fall through.
777
778 Label done;
779
780 // Compute the hash code from the untagged key. This must be kept in sync
781 // with ComputeIntegerHash in utils.h.
782 //
783 // hash = ~hash + (hash << 15);
784 mov(r1, r0);
785 not_(r0);
786 shl(r1, 15);
787 add(r0, Operand(r1));
788 // hash = hash ^ (hash >> 12);
789 mov(r1, r0);
790 shr(r1, 12);
791 xor_(r0, Operand(r1));
792 // hash = hash + (hash << 2);
793 lea(r0, Operand(r0, r0, times_4, 0));
794 // hash = hash ^ (hash >> 4);
795 mov(r1, r0);
796 shr(r1, 4);
797 xor_(r0, Operand(r1));
798 // hash = hash * 2057;
799 imul(r0, r0, 2057);
800 // hash = hash ^ (hash >> 16);
801 mov(r1, r0);
802 shr(r1, 16);
803 xor_(r0, Operand(r1));
804
805 // Compute capacity mask.
806 mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset));
807 shr(r1, kSmiTagSize); // convert smi to int
808 dec(r1);
809
810 // Generate an unrolled loop that performs a few probes before giving up.
811 const int kProbes = 4;
812 for (int i = 0; i < kProbes; i++) {
813 // Use r2 for index calculations and keep the hash intact in r0.
814 mov(r2, r0);
815 // Compute the masked index: (hash + i + i * i) & mask.
816 if (i > 0) {
817 add(Operand(r2), Immediate(NumberDictionary::GetProbeOffset(i)));
818 }
819 and_(r2, Operand(r1));
820
821 // Scale the index by multiplying by the entry size.
822 ASSERT(NumberDictionary::kEntrySize == 3);
823 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
824
825 // Check if the key matches.
826 cmp(key, FieldOperand(elements,
827 r2,
828 times_pointer_size,
829 NumberDictionary::kElementsStartOffset));
830 if (i != (kProbes - 1)) {
831 j(equal, &done);
832 } else {
833 j(not_equal, miss);
834 }
835 }
836
837 bind(&done);
838 // Check that the value is a normal propety.
839 const int kDetailsOffset =
840 NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
841 ASSERT_EQ(NORMAL, 0);
842 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
843 Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize));
844 j(not_zero, miss);
845
846 // Get the value at the masked, scaled index.
847 const int kValueOffset =
848 NumberDictionary::kElementsStartOffset + kPointerSize;
849 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
850}
851
852
Steve Blocka7e24c12009-10-30 11:49:00 +0000853void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000854 Register scratch,
855 AllocationFlags flags) {
856 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +0100857 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000858
859 // Just return if allocation top is already known.
860 if ((flags & RESULT_CONTAINS_TOP) != 0) {
861 // No use of scratch if allocation top is provided.
862 ASSERT(scratch.is(no_reg));
863#ifdef DEBUG
864 // Assert that result actually contains top on entry.
865 cmp(result, Operand::StaticVariable(new_space_allocation_top));
866 Check(equal, "Unexpected allocation top");
867#endif
868 return;
869 }
870
871 // Move address of new object to result. Use scratch register if available.
872 if (scratch.is(no_reg)) {
873 mov(result, Operand::StaticVariable(new_space_allocation_top));
874 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +0000875 mov(Operand(scratch), Immediate(new_space_allocation_top));
876 mov(result, Operand(scratch, 0));
877 }
878}
879
880
881void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
882 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100883 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000884 test(result_end, Immediate(kObjectAlignmentMask));
885 Check(zero, "Unaligned allocation in new space");
886 }
887
Steve Blocka7e24c12009-10-30 11:49:00 +0000888 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +0100889 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000890
891 // Update new top. Use scratch if available.
892 if (scratch.is(no_reg)) {
893 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
894 } else {
895 mov(Operand(scratch, 0), result_end);
896 }
897}
898
899
900void MacroAssembler::AllocateInNewSpace(int object_size,
901 Register result,
902 Register result_end,
903 Register scratch,
904 Label* gc_required,
905 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700906 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +0100907 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -0700908 // Trash the registers to simulate an allocation failure.
909 mov(result, Immediate(0x7091));
910 if (result_end.is_valid()) {
911 mov(result_end, Immediate(0x7191));
912 }
913 if (scratch.is_valid()) {
914 mov(scratch, Immediate(0x7291));
915 }
916 }
917 jmp(gc_required);
918 return;
919 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000920 ASSERT(!result.is(result_end));
921
922 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800923 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000924
Ben Murdochbb769b22010-08-11 14:56:33 +0100925 Register top_reg = result_end.is_valid() ? result_end : result;
926
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 // Calculate new top and bail out if new space is exhausted.
928 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +0100929 ExternalReference::new_space_allocation_limit_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100930
Steve Block1e0659c2011-05-24 12:43:12 +0100931 if (!top_reg.is(result)) {
932 mov(top_reg, result);
Ben Murdochbb769b22010-08-11 14:56:33 +0100933 }
Steve Block1e0659c2011-05-24 12:43:12 +0100934 add(Operand(top_reg), Immediate(object_size));
Ben Murdoch257744e2011-11-30 15:57:28 +0000935 j(carry, gc_required);
Ben Murdochbb769b22010-08-11 14:56:33 +0100936 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +0000937 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +0000938
Leon Clarkee46be812010-01-19 14:06:41 +0000939 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100940 UpdateAllocationTopHelper(top_reg, scratch);
941
942 // Tag result if requested.
943 if (top_reg.is(result)) {
944 if ((flags & TAG_OBJECT) != 0) {
945 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
946 } else {
947 sub(Operand(result), Immediate(object_size));
948 }
949 } else if ((flags & TAG_OBJECT) != 0) {
950 add(Operand(result), Immediate(kHeapObjectTag));
951 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000952}
953
954
955void MacroAssembler::AllocateInNewSpace(int header_size,
956 ScaleFactor element_size,
957 Register element_count,
958 Register result,
959 Register result_end,
960 Register scratch,
961 Label* gc_required,
962 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700963 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +0100964 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -0700965 // Trash the registers to simulate an allocation failure.
966 mov(result, Immediate(0x7091));
967 mov(result_end, Immediate(0x7191));
968 if (scratch.is_valid()) {
969 mov(scratch, Immediate(0x7291));
970 }
971 // Register element_count is not modified by the function.
972 }
973 jmp(gc_required);
974 return;
975 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000976 ASSERT(!result.is(result_end));
977
978 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800979 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000980
981 // Calculate new top and bail out if new space is exhausted.
982 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +0100983 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +0100984
985 // We assume that element_count*element_size + header_size does not
986 // overflow.
987 lea(result_end, Operand(element_count, element_size, header_size));
988 add(result_end, Operand(result));
989 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +0000990 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
991 j(above, gc_required);
992
Steve Blocka7e24c12009-10-30 11:49:00 +0000993 // Tag result if requested.
994 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +0000995 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000996 }
Leon Clarkee46be812010-01-19 14:06:41 +0000997
998 // Update allocation top.
999 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001000}
1001
1002
1003void MacroAssembler::AllocateInNewSpace(Register object_size,
1004 Register result,
1005 Register result_end,
1006 Register scratch,
1007 Label* gc_required,
1008 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001009 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001010 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001011 // Trash the registers to simulate an allocation failure.
1012 mov(result, Immediate(0x7091));
1013 mov(result_end, Immediate(0x7191));
1014 if (scratch.is_valid()) {
1015 mov(scratch, Immediate(0x7291));
1016 }
1017 // object_size is left unchanged by this function.
1018 }
1019 jmp(gc_required);
1020 return;
1021 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001022 ASSERT(!result.is(result_end));
1023
1024 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001025 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001026
1027 // Calculate new top and bail out if new space is exhausted.
1028 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001029 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001030 if (!object_size.is(result_end)) {
1031 mov(result_end, object_size);
1032 }
1033 add(result_end, Operand(result));
Ben Murdoch257744e2011-11-30 15:57:28 +00001034 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001036 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001037
Steve Blocka7e24c12009-10-30 11:49:00 +00001038 // Tag result if requested.
1039 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00001040 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001041 }
Leon Clarkee46be812010-01-19 14:06:41 +00001042
1043 // Update allocation top.
1044 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001045}
1046
1047
1048void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1049 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01001050 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001051
1052 // Make sure the object has no tag before resetting top.
1053 and_(Operand(object), Immediate(~kHeapObjectTagMask));
1054#ifdef DEBUG
1055 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1056 Check(below, "Undo allocation of non allocated memory");
1057#endif
1058 mov(Operand::StaticVariable(new_space_allocation_top), object);
1059}
1060
1061
Steve Block3ce2e202009-11-05 08:53:23 +00001062void MacroAssembler::AllocateHeapNumber(Register result,
1063 Register scratch1,
1064 Register scratch2,
1065 Label* gc_required) {
1066 // Allocate heap number in new space.
1067 AllocateInNewSpace(HeapNumber::kSize,
1068 result,
1069 scratch1,
1070 scratch2,
1071 gc_required,
1072 TAG_OBJECT);
1073
1074 // Set the map.
1075 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001076 Immediate(isolate()->factory()->heap_number_map()));
Steve Block3ce2e202009-11-05 08:53:23 +00001077}
1078
1079
Steve Blockd0582a62009-12-15 09:54:21 +00001080void MacroAssembler::AllocateTwoByteString(Register result,
1081 Register length,
1082 Register scratch1,
1083 Register scratch2,
1084 Register scratch3,
1085 Label* gc_required) {
1086 // Calculate the number of bytes needed for the characters in the string while
1087 // observing object alignment.
1088 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00001089 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00001090 // scratch1 = length * 2 + kObjectAlignmentMask.
1091 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001092 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
1093
1094 // Allocate two byte string in new space.
1095 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1096 times_1,
1097 scratch1,
1098 result,
1099 scratch2,
1100 scratch3,
1101 gc_required,
1102 TAG_OBJECT);
1103
1104 // Set the map, length and hash field.
1105 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001106 Immediate(isolate()->factory()->string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001107 mov(scratch1, length);
1108 SmiTag(scratch1);
1109 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001110 mov(FieldOperand(result, String::kHashFieldOffset),
1111 Immediate(String::kEmptyHashField));
1112}
1113
1114
1115void MacroAssembler::AllocateAsciiString(Register result,
1116 Register length,
1117 Register scratch1,
1118 Register scratch2,
1119 Register scratch3,
1120 Label* gc_required) {
1121 // Calculate the number of bytes needed for the characters in the string while
1122 // observing object alignment.
1123 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1124 mov(scratch1, length);
1125 ASSERT(kCharSize == 1);
1126 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
1127 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
1128
1129 // Allocate ascii string in new space.
1130 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1131 times_1,
1132 scratch1,
1133 result,
1134 scratch2,
1135 scratch3,
1136 gc_required,
1137 TAG_OBJECT);
1138
1139 // Set the map, length and hash field.
1140 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001141 Immediate(isolate()->factory()->ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001142 mov(scratch1, length);
1143 SmiTag(scratch1);
1144 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001145 mov(FieldOperand(result, String::kHashFieldOffset),
1146 Immediate(String::kEmptyHashField));
1147}
1148
1149
Iain Merrick9ac36c92010-09-13 15:29:50 +01001150void MacroAssembler::AllocateAsciiString(Register result,
1151 int length,
1152 Register scratch1,
1153 Register scratch2,
1154 Label* gc_required) {
1155 ASSERT(length > 0);
1156
1157 // Allocate ascii string in new space.
1158 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1159 result,
1160 scratch1,
1161 scratch2,
1162 gc_required,
1163 TAG_OBJECT);
1164
1165 // Set the map, length and hash field.
1166 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001167 Immediate(isolate()->factory()->ascii_string_map()));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001168 mov(FieldOperand(result, String::kLengthOffset),
1169 Immediate(Smi::FromInt(length)));
1170 mov(FieldOperand(result, String::kHashFieldOffset),
1171 Immediate(String::kEmptyHashField));
1172}
1173
1174
Steve Blockd0582a62009-12-15 09:54:21 +00001175void MacroAssembler::AllocateConsString(Register result,
1176 Register scratch1,
1177 Register scratch2,
1178 Label* gc_required) {
1179 // Allocate heap number in new space.
1180 AllocateInNewSpace(ConsString::kSize,
1181 result,
1182 scratch1,
1183 scratch2,
1184 gc_required,
1185 TAG_OBJECT);
1186
1187 // Set the map. The other fields are left uninitialized.
1188 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001189 Immediate(isolate()->factory()->cons_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001190}
1191
1192
1193void MacroAssembler::AllocateAsciiConsString(Register result,
1194 Register scratch1,
1195 Register scratch2,
1196 Label* gc_required) {
1197 // Allocate heap number in new space.
1198 AllocateInNewSpace(ConsString::kSize,
1199 result,
1200 scratch1,
1201 scratch2,
1202 gc_required,
1203 TAG_OBJECT);
1204
1205 // Set the map. The other fields are left uninitialized.
1206 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001207 Immediate(isolate()->factory()->cons_ascii_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001208}
1209
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001210
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001211void MacroAssembler::AllocateSlicedString(Register result,
1212 Register scratch1,
1213 Register scratch2,
1214 Label* gc_required) {
1215 // Allocate heap number in new space.
1216 AllocateInNewSpace(SlicedString::kSize,
1217 result,
1218 scratch1,
1219 scratch2,
1220 gc_required,
1221 TAG_OBJECT);
1222
1223 // Set the map. The other fields are left uninitialized.
1224 mov(FieldOperand(result, HeapObject::kMapOffset),
1225 Immediate(isolate()->factory()->sliced_string_map()));
1226}
1227
1228
1229void MacroAssembler::AllocateAsciiSlicedString(Register result,
1230 Register scratch1,
1231 Register scratch2,
1232 Label* gc_required) {
1233 // Allocate heap number in new space.
1234 AllocateInNewSpace(SlicedString::kSize,
1235 result,
1236 scratch1,
1237 scratch2,
1238 gc_required,
1239 TAG_OBJECT);
1240
1241 // Set the map. The other fields are left uninitialized.
1242 mov(FieldOperand(result, HeapObject::kMapOffset),
1243 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1244}
1245
1246
Ben Murdochb8e0da22011-05-16 14:20:40 +01001247// Copy memory, byte-by-byte, from source to destination. Not optimized for
1248// long or aligned copies. The contents of scratch and length are destroyed.
1249// Source and destination are incremented by length.
1250// Many variants of movsb, loop unrolling, word moves, and indexed operands
1251// have been tried here already, and this is fastest.
1252// A simpler loop is faster on small copies, but 30% slower on large ones.
1253// The cld() instruction must have been emitted, to set the direction flag(),
1254// before calling this function.
1255void MacroAssembler::CopyBytes(Register source,
1256 Register destination,
1257 Register length,
1258 Register scratch) {
1259 Label loop, done, short_string, short_loop;
1260 // Experimentation shows that the short string loop is faster if length < 10.
1261 cmp(Operand(length), Immediate(10));
1262 j(less_equal, &short_string);
1263
1264 ASSERT(source.is(esi));
1265 ASSERT(destination.is(edi));
1266 ASSERT(length.is(ecx));
1267
1268 // Because source is 4-byte aligned in our uses of this function,
1269 // we keep source aligned for the rep_movs call by copying the odd bytes
1270 // at the end of the ranges.
1271 mov(scratch, Operand(source, length, times_1, -4));
1272 mov(Operand(destination, length, times_1, -4), scratch);
1273 mov(scratch, ecx);
1274 shr(ecx, 2);
1275 rep_movs();
1276 and_(Operand(scratch), Immediate(0x3));
1277 add(destination, Operand(scratch));
1278 jmp(&done);
1279
1280 bind(&short_string);
1281 test(length, Operand(length));
1282 j(zero, &done);
1283
1284 bind(&short_loop);
1285 mov_b(scratch, Operand(source, 0));
1286 mov_b(Operand(destination, 0), scratch);
1287 inc(source);
1288 inc(destination);
1289 dec(length);
1290 j(not_zero, &short_loop);
1291
1292 bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001293}
1294
Steve Blockd0582a62009-12-15 09:54:21 +00001295
Steve Blocka7e24c12009-10-30 11:49:00 +00001296void MacroAssembler::NegativeZeroTest(Register result,
1297 Register op,
1298 Label* then_label) {
1299 Label ok;
1300 test(result, Operand(result));
Ben Murdoch257744e2011-11-30 15:57:28 +00001301 j(not_zero, &ok);
Steve Blocka7e24c12009-10-30 11:49:00 +00001302 test(op, Operand(op));
Ben Murdoch257744e2011-11-30 15:57:28 +00001303 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001304 bind(&ok);
1305}
1306
1307
1308void MacroAssembler::NegativeZeroTest(Register result,
1309 Register op1,
1310 Register op2,
1311 Register scratch,
1312 Label* then_label) {
1313 Label ok;
1314 test(result, Operand(result));
Ben Murdoch257744e2011-11-30 15:57:28 +00001315 j(not_zero, &ok);
Steve Blocka7e24c12009-10-30 11:49:00 +00001316 mov(scratch, Operand(op1));
1317 or_(scratch, Operand(op2));
Ben Murdoch257744e2011-11-30 15:57:28 +00001318 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001319 bind(&ok);
1320}
1321
1322
1323void MacroAssembler::TryGetFunctionPrototype(Register function,
1324 Register result,
1325 Register scratch,
1326 Label* miss) {
1327 // Check that the receiver isn't a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001328 JumpIfSmi(function, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001329
1330 // Check that the function really is a function.
1331 CmpObjectType(function, JS_FUNCTION_TYPE, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001332 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001333
1334 // Make sure that the function has an instance prototype.
1335 Label non_instance;
1336 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1337 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00001338 j(not_zero, &non_instance);
Steve Blocka7e24c12009-10-30 11:49:00 +00001339
1340 // Get the prototype or initial map from the function.
1341 mov(result,
1342 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1343
1344 // If the prototype or initial map is the hole, don't return it and
1345 // simply miss the cache instead. This will allow us to allocate a
1346 // prototype object on-demand in the runtime system.
Steve Block44f0eee2011-05-26 01:26:41 +01001347 cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001348 j(equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001349
1350 // If the function does not have an initial map, we're done.
1351 Label done;
1352 CmpObjectType(result, MAP_TYPE, scratch);
1353 j(not_equal, &done);
1354
1355 // Get the prototype from the initial map.
1356 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1357 jmp(&done);
1358
1359 // Non-instance prototype: Fetch prototype from constructor field
1360 // in initial map.
1361 bind(&non_instance);
1362 mov(result, FieldOperand(result, Map::kConstructorOffset));
1363
1364 // All done.
1365 bind(&done);
1366}
1367
1368
Ben Murdoch257744e2011-11-30 15:57:28 +00001369void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
Leon Clarkee46be812010-01-19 14:06:41 +00001370 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Ben Murdoch257744e2011-11-30 15:57:28 +00001371 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001372}
1373
1374
John Reck59135872010-11-02 12:39:01 -07001375MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001376 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001377 Object* result;
1378 { MaybeObject* maybe_result = stub->TryGetCode();
1379 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001380 }
John Reck59135872010-11-02 12:39:01 -07001381 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001382 return result;
1383}
1384
1385
Steve Blockd0582a62009-12-15 09:54:21 +00001386void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001387 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001388 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1389}
1390
1391
John Reck59135872010-11-02 12:39:01 -07001392MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001393 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001394 Object* result;
1395 { MaybeObject* maybe_result = stub->TryGetCode();
1396 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001397 }
John Reck59135872010-11-02 12:39:01 -07001398 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001399 return result;
1400}
1401
1402
Steve Blocka7e24c12009-10-30 11:49:00 +00001403void MacroAssembler::StubReturn(int argc) {
1404 ASSERT(argc >= 1 && generating_stub());
1405 ret((argc - 1) * kPointerSize);
1406}
1407
1408
1409void MacroAssembler::IllegalOperation(int num_arguments) {
1410 if (num_arguments > 0) {
1411 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1412 }
Steve Block44f0eee2011-05-26 01:26:41 +01001413 mov(eax, Immediate(isolate()->factory()->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001414}
1415
1416
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001417void MacroAssembler::IndexFromHash(Register hash, Register index) {
1418 // The assert checks that the constants for the maximum number of digits
1419 // for an array index cached in the hash field and the number of bits
1420 // reserved for it does not conflict.
1421 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1422 (1 << String::kArrayIndexValueBits));
1423 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1424 // the low kHashShift bits.
1425 and_(hash, String::kArrayIndexValueMask);
1426 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1427 if (String::kHashShift > kSmiTagSize) {
1428 shr(hash, String::kHashShift - kSmiTagSize);
1429 }
1430 if (!index.is(hash)) {
1431 mov(index, hash);
1432 }
1433}
1434
1435
Steve Blocka7e24c12009-10-30 11:49:00 +00001436void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1437 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1438}
1439
1440
Ben Murdochb0fe1622011-05-05 13:52:32 +01001441void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +01001442 const Runtime::Function* function = Runtime::FunctionForId(id);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001443 Set(eax, Immediate(function->nargs));
Steve Block44f0eee2011-05-26 01:26:41 +01001444 mov(ebx, Immediate(ExternalReference(function, isolate())));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001445 CEntryStub ces(1);
1446 ces.SaveDoubles();
1447 CallStub(&ces);
1448}
1449
1450
John Reck59135872010-11-02 12:39:01 -07001451MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1452 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001453 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1454}
1455
1456
Steve Block44f0eee2011-05-26 01:26:41 +01001457void MacroAssembler::CallRuntime(const Runtime::Function* f,
1458 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001459 // If the expected number of arguments of the runtime function is
1460 // constant, we check that the actual number of arguments match the
1461 // expectation.
1462 if (f->nargs >= 0 && f->nargs != num_arguments) {
1463 IllegalOperation(num_arguments);
1464 return;
1465 }
1466
Leon Clarke4515c472010-02-03 11:58:03 +00001467 // TODO(1236192): Most runtime routines don't need the number of
1468 // arguments passed in because it is constant. At some point we
1469 // should remove this need and make the runtime routine entry code
1470 // smarter.
1471 Set(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01001472 mov(ebx, Immediate(ExternalReference(f, isolate())));
Leon Clarke4515c472010-02-03 11:58:03 +00001473 CEntryStub ces(1);
1474 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001475}
1476
1477
Steve Block44f0eee2011-05-26 01:26:41 +01001478MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -07001479 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001480 if (f->nargs >= 0 && f->nargs != num_arguments) {
1481 IllegalOperation(num_arguments);
1482 // Since we did not call the stub, there was no allocation failure.
1483 // Return some non-failure object.
Steve Block44f0eee2011-05-26 01:26:41 +01001484 return isolate()->heap()->undefined_value();
Leon Clarkee46be812010-01-19 14:06:41 +00001485 }
1486
Leon Clarke4515c472010-02-03 11:58:03 +00001487 // TODO(1236192): Most runtime routines don't need the number of
1488 // arguments passed in because it is constant. At some point we
1489 // should remove this need and make the runtime routine entry code
1490 // smarter.
1491 Set(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01001492 mov(ebx, Immediate(ExternalReference(f, isolate())));
Leon Clarke4515c472010-02-03 11:58:03 +00001493 CEntryStub ces(1);
1494 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001495}
1496
1497
Ben Murdochbb769b22010-08-11 14:56:33 +01001498void MacroAssembler::CallExternalReference(ExternalReference ref,
1499 int num_arguments) {
1500 mov(eax, Immediate(num_arguments));
1501 mov(ebx, Immediate(ref));
1502
1503 CEntryStub stub(1);
1504 CallStub(&stub);
1505}
1506
1507
Steve Block6ded16b2010-05-10 14:33:55 +01001508void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1509 int num_arguments,
1510 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001511 // TODO(1236192): Most runtime routines don't need the number of
1512 // arguments passed in because it is constant. At some point we
1513 // should remove this need and make the runtime routine entry code
1514 // smarter.
1515 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001516 JumpToExternalReference(ext);
1517}
1518
1519
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001520MaybeObject* MacroAssembler::TryTailCallExternalReference(
1521 const ExternalReference& ext, int num_arguments, int result_size) {
1522 // TODO(1236192): Most runtime routines don't need the number of
1523 // arguments passed in because it is constant. At some point we
1524 // should remove this need and make the runtime routine entry code
1525 // smarter.
1526 Set(eax, Immediate(num_arguments));
1527 return TryJumpToExternalReference(ext);
1528}
1529
1530
Steve Block6ded16b2010-05-10 14:33:55 +01001531void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1532 int num_arguments,
1533 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01001534 TailCallExternalReference(ExternalReference(fid, isolate()),
1535 num_arguments,
1536 result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001537}
1538
1539
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001540MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
1541 int num_arguments,
1542 int result_size) {
1543 return TryTailCallExternalReference(
Steve Block44f0eee2011-05-26 01:26:41 +01001544 ExternalReference(fid, isolate()), num_arguments, result_size);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001545}
1546
1547
Ben Murdochb0fe1622011-05-05 13:52:32 +01001548// If true, a Handle<T> returned by value from a function with cdecl calling
1549// convention will be returned directly as a value of location_ field in a
1550// register eax.
1551// If false, it is returned as a pointer to a preallocated by caller memory
1552// region. Pointer to this region should be passed to a function as an
1553// implicit first argument.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001554#if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001555static const bool kReturnHandlesDirectly = true;
John Reck59135872010-11-02 12:39:01 -07001556#else
Ben Murdochb0fe1622011-05-05 13:52:32 +01001557static const bool kReturnHandlesDirectly = false;
John Reck59135872010-11-02 12:39:01 -07001558#endif
1559
1560
1561Operand ApiParameterOperand(int index) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001562 return Operand(
1563 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001564}
1565
1566
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001567void MacroAssembler::PrepareCallApiFunction(int argc) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001568 if (kReturnHandlesDirectly) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001569 EnterApiExitFrame(argc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001570 // When handles are returned directly we don't have to allocate extra
John Reck59135872010-11-02 12:39:01 -07001571 // space for and pass an out parameter.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001572 if (emit_debug_code()) {
1573 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1574 }
John Reck59135872010-11-02 12:39:01 -07001575 } else {
1576 // We allocate two additional slots: return value and pointer to it.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001577 EnterApiExitFrame(argc + 2);
John Reck59135872010-11-02 12:39:01 -07001578
John Reck59135872010-11-02 12:39:01 -07001579 // The argument slots are filled as follows:
1580 //
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001581 // n + 1: output slot
John Reck59135872010-11-02 12:39:01 -07001582 // n: arg n
1583 // ...
1584 // 1: arg1
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001585 // 0: pointer to the output slot
John Reck59135872010-11-02 12:39:01 -07001586
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001587 lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1588 mov(Operand(esp, 0 * kPointerSize), esi);
Steve Block44f0eee2011-05-26 01:26:41 +01001589 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001590 mov(Operand(esi, 0), Immediate(0));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001591 }
1592 }
1593}
1594
1595
1596MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
1597 int stack_space) {
Steve Blockd0582a62009-12-15 09:54:21 +00001598 ExternalReference next_address =
1599 ExternalReference::handle_scope_next_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001600 ExternalReference limit_address =
1601 ExternalReference::handle_scope_limit_address();
John Reck59135872010-11-02 12:39:01 -07001602 ExternalReference level_address =
1603 ExternalReference::handle_scope_level_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001604
John Reck59135872010-11-02 12:39:01 -07001605 // Allocate HandleScope in callee-save registers.
1606 mov(ebx, Operand::StaticVariable(next_address));
1607 mov(edi, Operand::StaticVariable(limit_address));
1608 add(Operand::StaticVariable(level_address), Immediate(1));
Steve Blockd0582a62009-12-15 09:54:21 +00001609
John Reck59135872010-11-02 12:39:01 -07001610 // Call the api function!
1611 call(function->address(), RelocInfo::RUNTIME_ENTRY);
1612
Ben Murdochb0fe1622011-05-05 13:52:32 +01001613 if (!kReturnHandlesDirectly) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001614 // PrepareCallApiFunction saved pointer to the output slot into
1615 // callee-save register esi.
1616 mov(eax, Operand(esi, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001617 }
Steve Blockd0582a62009-12-15 09:54:21 +00001618
John Reck59135872010-11-02 12:39:01 -07001619 Label empty_handle;
1620 Label prologue;
1621 Label promote_scheduled_exception;
1622 Label delete_allocated_handles;
1623 Label leave_exit_frame;
Leon Clarkee46be812010-01-19 14:06:41 +00001624
John Reck59135872010-11-02 12:39:01 -07001625 // Check if the result handle holds 0.
1626 test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00001627 j(zero, &empty_handle);
John Reck59135872010-11-02 12:39:01 -07001628 // It was non-zero. Dereference to get the result value.
1629 mov(eax, Operand(eax, 0));
1630 bind(&prologue);
1631 // No more valid handles (the result handle was the last one). Restore
1632 // previous handle scope.
1633 mov(Operand::StaticVariable(next_address), ebx);
1634 sub(Operand::StaticVariable(level_address), Immediate(1));
1635 Assert(above_equal, "Invalid HandleScope level");
1636 cmp(edi, Operand::StaticVariable(limit_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00001637 j(not_equal, &delete_allocated_handles);
John Reck59135872010-11-02 12:39:01 -07001638 bind(&leave_exit_frame);
Leon Clarkee46be812010-01-19 14:06:41 +00001639
John Reck59135872010-11-02 12:39:01 -07001640 // Check if the function scheduled an exception.
1641 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +01001642 ExternalReference::scheduled_exception_address(isolate());
John Reck59135872010-11-02 12:39:01 -07001643 cmp(Operand::StaticVariable(scheduled_exception_address),
Steve Block44f0eee2011-05-26 01:26:41 +01001644 Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001645 j(not_equal, &promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001646 LeaveApiExitFrame();
1647 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001648 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001649 MaybeObject* result =
1650 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1651 if (result->IsFailure()) {
1652 return result;
1653 }
John Reck59135872010-11-02 12:39:01 -07001654 bind(&empty_handle);
1655 // It was zero; the result is undefined.
Steve Block44f0eee2011-05-26 01:26:41 +01001656 mov(eax, isolate()->factory()->undefined_value());
John Reck59135872010-11-02 12:39:01 -07001657 jmp(&prologue);
Leon Clarkee46be812010-01-19 14:06:41 +00001658
John Reck59135872010-11-02 12:39:01 -07001659 // HandleScope limit has changed. Delete allocated extensions.
Steve Block44f0eee2011-05-26 01:26:41 +01001660 ExternalReference delete_extensions =
1661 ExternalReference::delete_handle_scope_extensions(isolate());
John Reck59135872010-11-02 12:39:01 -07001662 bind(&delete_allocated_handles);
1663 mov(Operand::StaticVariable(limit_address), edi);
1664 mov(edi, eax);
Steve Block44f0eee2011-05-26 01:26:41 +01001665 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1666 mov(eax, Immediate(delete_extensions));
John Reck59135872010-11-02 12:39:01 -07001667 call(Operand(eax));
1668 mov(eax, edi);
1669 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001670
1671 return result;
Steve Blockd0582a62009-12-15 09:54:21 +00001672}
1673
1674
Steve Block6ded16b2010-05-10 14:33:55 +01001675void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001676 // Set the entry point and jump to the C entry runtime stub.
1677 mov(ebx, Immediate(ext));
1678 CEntryStub ces(1);
1679 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1680}
1681
1682
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001683MaybeObject* MacroAssembler::TryJumpToExternalReference(
1684 const ExternalReference& ext) {
1685 // Set the entry point and jump to the C entry runtime stub.
1686 mov(ebx, Immediate(ext));
1687 CEntryStub ces(1);
1688 return TryTailCallStub(&ces);
1689}
1690
1691
Ben Murdoch257744e2011-11-30 15:57:28 +00001692void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
1693 // This macro takes the dst register to make the code more readable
1694 // at the call sites. However, the dst register has to be ecx to
1695 // follow the calling convention which requires the call type to be
1696 // in ecx.
1697 ASSERT(dst.is(ecx));
1698 if (call_kind == CALL_AS_FUNCTION) {
1699 // Set to some non-zero smi by updating the least significant
1700 // byte.
1701 mov_b(Operand(dst), 1 << kSmiTagSize);
1702 } else {
1703 // Set to smi zero by clearing the register.
1704 xor_(dst, Operand(dst));
1705 }
1706}
1707
1708
Steve Blocka7e24c12009-10-30 11:49:00 +00001709void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1710 const ParameterCount& actual,
1711 Handle<Code> code_constant,
1712 const Operand& code_operand,
Ben Murdoch257744e2011-11-30 15:57:28 +00001713 Label* done,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001714 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001715 Label::Distance done_near,
1716 const CallWrapper& call_wrapper,
1717 CallKind call_kind) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001718 bool definitely_matches = false;
1719 Label invoke;
1720 if (expected.is_immediate()) {
1721 ASSERT(actual.is_immediate());
1722 if (expected.immediate() == actual.immediate()) {
1723 definitely_matches = true;
1724 } else {
1725 mov(eax, actual.immediate());
1726 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1727 if (expected.immediate() == sentinel) {
1728 // Don't worry about adapting arguments for builtins that
1729 // don't want that done. Skip adaption code by making it look
1730 // like we have a match between expected and actual number of
1731 // arguments.
1732 definitely_matches = true;
1733 } else {
1734 mov(ebx, expected.immediate());
1735 }
1736 }
1737 } else {
1738 if (actual.is_immediate()) {
1739 // Expected is in register, actual is immediate. This is the
1740 // case when we invoke function values without going through the
1741 // IC mechanism.
1742 cmp(expected.reg(), actual.immediate());
1743 j(equal, &invoke);
1744 ASSERT(expected.reg().is(ebx));
1745 mov(eax, actual.immediate());
1746 } else if (!expected.reg().is(actual.reg())) {
1747 // Both expected and actual are in (different) registers. This
1748 // is the case when we invoke functions using call and apply.
1749 cmp(expected.reg(), Operand(actual.reg()));
1750 j(equal, &invoke);
1751 ASSERT(actual.reg().is(eax));
1752 ASSERT(expected.reg().is(ebx));
1753 }
1754 }
1755
1756 if (!definitely_matches) {
1757 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001758 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001759 if (!code_constant.is_null()) {
1760 mov(edx, Immediate(code_constant));
1761 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1762 } else if (!code_operand.is_reg(edx)) {
1763 mov(edx, code_operand);
1764 }
1765
1766 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001767 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
1768 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001769 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00001770 call_wrapper.AfterCall();
1771 jmp(done, done_near);
Steve Blocka7e24c12009-10-30 11:49:00 +00001772 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001773 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001774 jmp(adaptor, RelocInfo::CODE_TARGET);
1775 }
1776 bind(&invoke);
1777 }
1778}
1779
1780
1781void MacroAssembler::InvokeCode(const Operand& code,
1782 const ParameterCount& expected,
1783 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001784 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001785 const CallWrapper& call_wrapper,
1786 CallKind call_kind) {
1787 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001788 InvokePrologue(expected, actual, Handle<Code>::null(), code,
Ben Murdoch257744e2011-11-30 15:57:28 +00001789 &done, flag, Label::kNear, call_wrapper,
1790 call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001791 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001792 call_wrapper.BeforeCall(CallSize(code));
1793 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001794 call(code);
Ben Murdoch257744e2011-11-30 15:57:28 +00001795 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00001796 } else {
1797 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001798 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001799 jmp(code);
1800 }
1801 bind(&done);
1802}
1803
1804
1805void MacroAssembler::InvokeCode(Handle<Code> code,
1806 const ParameterCount& expected,
1807 const ParameterCount& actual,
1808 RelocInfo::Mode rmode,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001809 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001810 const CallWrapper& call_wrapper,
1811 CallKind call_kind) {
1812 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001813 Operand dummy(eax);
Ben Murdoch257744e2011-11-30 15:57:28 +00001814 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
1815 call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001816 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001817 call_wrapper.BeforeCall(CallSize(code, rmode));
1818 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001819 call(code, rmode);
Ben Murdoch257744e2011-11-30 15:57:28 +00001820 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00001821 } else {
1822 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001823 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001824 jmp(code, rmode);
1825 }
1826 bind(&done);
1827}
1828
1829
1830void MacroAssembler::InvokeFunction(Register fun,
1831 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001832 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001833 const CallWrapper& call_wrapper,
1834 CallKind call_kind) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001835 ASSERT(fun.is(edi));
1836 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1837 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1838 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001839 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001840
1841 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001842 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001843 expected, actual, flag, call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001844}
1845
1846
Andrei Popescu402d9372010-02-26 13:31:12 +00001847void MacroAssembler::InvokeFunction(JSFunction* function,
1848 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001849 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001850 const CallWrapper& call_wrapper,
1851 CallKind call_kind) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001852 ASSERT(function->is_compiled());
1853 // Get the function and setup the context.
1854 mov(edi, Immediate(Handle<JSFunction>(function)));
1855 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001856
Andrei Popescu402d9372010-02-26 13:31:12 +00001857 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001858 if (V8::UseCrankshaft()) {
1859 // TODO(kasperl): For now, we always call indirectly through the
1860 // code field in the function to allow recompilation to take effect
1861 // without changing any of the call sites.
1862 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001863 expected, actual, flag, call_wrapper, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001864 } else {
1865 Handle<Code> code(function->code());
1866 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
Ben Murdoch257744e2011-11-30 15:57:28 +00001867 flag, call_wrapper, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001868 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001869}
1870
1871
Ben Murdochb0fe1622011-05-05 13:52:32 +01001872void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1873 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001874 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001875 // Calls are not allowed in some stubs.
1876 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1877
1878 // Rely on the assertion to check that the number of provided
1879 // arguments match the expected number of arguments. Fake a
1880 // parameter count to avoid emitting code to do the check.
1881 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001882 GetBuiltinFunction(edi, id);
1883 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001884 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +00001885}
1886
Steve Block791712a2010-08-27 10:21:07 +01001887void MacroAssembler::GetBuiltinFunction(Register target,
1888 Builtins::JavaScript id) {
1889 // Load the JavaScript builtin function from the builtins object.
1890 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1891 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1892 mov(target, FieldOperand(target,
1893 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1894}
Steve Blocka7e24c12009-10-30 11:49:00 +00001895
1896void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001897 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001898 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001899 GetBuiltinFunction(edi, id);
1900 // Load the code entry point from the function into the target register.
1901 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001902}
1903
1904
Steve Blockd0582a62009-12-15 09:54:21 +00001905void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1906 if (context_chain_length > 0) {
1907 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001908 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00001909 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001910 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00001911 }
Steve Block1e0659c2011-05-24 12:43:12 +01001912 } else {
1913 // Slot is in the current function context. Move it into the
1914 // destination register in case we store into it (the write barrier
1915 // cannot be allowed to destroy the context in esi).
1916 mov(dst, esi);
1917 }
1918
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001919 // We should not have found a with context by walking the context chain
Steve Block1e0659c2011-05-24 12:43:12 +01001920 // (i.e., the static scope chain and runtime context chain do not agree).
1921 // A variable occurring in such a scope should have slot type LOOKUP and
1922 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01001923 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001924 cmp(FieldOperand(dst, HeapObject::kMapOffset),
1925 isolate()->factory()->with_context_map());
1926 Check(not_equal, "Variable resolved to with context.");
Steve Blockd0582a62009-12-15 09:54:21 +00001927 }
1928}
1929
1930
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001931void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1932 // Load the global or builtins object from the current context.
1933 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1934 // Load the global context from the global or builtins object.
1935 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1936 // Load the function from the global context.
1937 mov(function, Operand(function, Context::SlotOffset(index)));
1938}
1939
1940
1941void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1942 Register map) {
1943 // Load the initial map. The global functions all have initial maps.
1944 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01001945 if (emit_debug_code()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001946 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00001947 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001948 jmp(&ok);
1949 bind(&fail);
1950 Abort("Global functions must have initial map");
1951 bind(&ok);
1952 }
1953}
1954
Steve Blockd0582a62009-12-15 09:54:21 +00001955
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001956// Store the value in register src in the safepoint register stack
1957// slot for register dst.
1958void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1959 mov(SafepointRegisterSlot(dst), src);
1960}
1961
1962
1963void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
1964 mov(SafepointRegisterSlot(dst), src);
1965}
1966
1967
1968void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1969 mov(dst, SafepointRegisterSlot(src));
1970}
1971
1972
1973Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1974 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1975}
1976
1977
Ben Murdochb0fe1622011-05-05 13:52:32 +01001978int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
1979 // The registers are pushed starting with the lowest encoding,
1980 // which means that lowest encodings are furthest away from
1981 // the stack pointer.
1982 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1983 return kNumSafepointRegisters - reg_code - 1;
1984}
1985
1986
Steve Blocka7e24c12009-10-30 11:49:00 +00001987void MacroAssembler::Ret() {
1988 ret(0);
1989}
1990
1991
Steve Block1e0659c2011-05-24 12:43:12 +01001992void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1993 if (is_uint16(bytes_dropped)) {
1994 ret(bytes_dropped);
1995 } else {
1996 pop(scratch);
1997 add(Operand(esp), Immediate(bytes_dropped));
1998 push(scratch);
1999 ret(0);
2000 }
2001}
2002
2003
2004
2005
Leon Clarkee46be812010-01-19 14:06:41 +00002006void MacroAssembler::Drop(int stack_elements) {
2007 if (stack_elements > 0) {
2008 add(Operand(esp), Immediate(stack_elements * kPointerSize));
2009 }
2010}
2011
2012
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002013void MacroAssembler::Move(Register dst, Register src) {
2014 if (!dst.is(src)) {
2015 mov(dst, src);
2016 }
2017}
2018
2019
Leon Clarkee46be812010-01-19 14:06:41 +00002020void MacroAssembler::Move(Register dst, Handle<Object> value) {
2021 mov(dst, value);
2022}
2023
2024
Steve Blocka7e24c12009-10-30 11:49:00 +00002025void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2026 if (FLAG_native_code_counters && counter->Enabled()) {
2027 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2028 }
2029}
2030
2031
2032void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2033 ASSERT(value > 0);
2034 if (FLAG_native_code_counters && counter->Enabled()) {
2035 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2036 if (value == 1) {
2037 inc(operand);
2038 } else {
2039 add(operand, Immediate(value));
2040 }
2041 }
2042}
2043
2044
2045void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2046 ASSERT(value > 0);
2047 if (FLAG_native_code_counters && counter->Enabled()) {
2048 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2049 if (value == 1) {
2050 dec(operand);
2051 } else {
2052 sub(operand, Immediate(value));
2053 }
2054 }
2055}
2056
2057
Leon Clarked91b9f72010-01-27 17:25:45 +00002058void MacroAssembler::IncrementCounter(Condition cc,
2059 StatsCounter* counter,
2060 int value) {
2061 ASSERT(value > 0);
2062 if (FLAG_native_code_counters && counter->Enabled()) {
2063 Label skip;
2064 j(NegateCondition(cc), &skip);
2065 pushfd();
2066 IncrementCounter(counter, value);
2067 popfd();
2068 bind(&skip);
2069 }
2070}
2071
2072
2073void MacroAssembler::DecrementCounter(Condition cc,
2074 StatsCounter* counter,
2075 int value) {
2076 ASSERT(value > 0);
2077 if (FLAG_native_code_counters && counter->Enabled()) {
2078 Label skip;
2079 j(NegateCondition(cc), &skip);
2080 pushfd();
2081 DecrementCounter(counter, value);
2082 popfd();
2083 bind(&skip);
2084 }
2085}
2086
2087
Steve Blocka7e24c12009-10-30 11:49:00 +00002088void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +01002089 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +00002090}
2091
2092
Iain Merrick75681382010-08-19 15:07:18 +01002093void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002094 if (emit_debug_code()) {
2095 Factory* factory = isolate()->factory();
Iain Merrick75681382010-08-19 15:07:18 +01002096 Label ok;
2097 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002098 Immediate(factory->fixed_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002099 j(equal, &ok);
2100 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002101 Immediate(factory->fixed_double_array_map()));
2102 j(equal, &ok);
2103 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002104 Immediate(factory->fixed_cow_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002105 j(equal, &ok);
2106 Abort("JSObject with fast elements map has slow elements");
2107 bind(&ok);
2108 }
2109}
2110
2111
Steve Blocka7e24c12009-10-30 11:49:00 +00002112void MacroAssembler::Check(Condition cc, const char* msg) {
2113 Label L;
Ben Murdoch257744e2011-11-30 15:57:28 +00002114 j(cc, &L);
Steve Blocka7e24c12009-10-30 11:49:00 +00002115 Abort(msg);
2116 // will not return here
2117 bind(&L);
2118}
2119
2120
Steve Block6ded16b2010-05-10 14:33:55 +01002121void MacroAssembler::CheckStackAlignment() {
2122 int frame_alignment = OS::ActivationFrameAlignment();
2123 int frame_alignment_mask = frame_alignment - 1;
2124 if (frame_alignment > kPointerSize) {
2125 ASSERT(IsPowerOf2(frame_alignment));
2126 Label alignment_as_expected;
2127 test(esp, Immediate(frame_alignment_mask));
2128 j(zero, &alignment_as_expected);
2129 // Abort if stack is not aligned.
2130 int3();
2131 bind(&alignment_as_expected);
2132 }
2133}
2134
2135
Steve Blocka7e24c12009-10-30 11:49:00 +00002136void MacroAssembler::Abort(const char* msg) {
2137 // We want to pass the msg string like a smi to avoid GC
2138 // problems, however msg is not guaranteed to be aligned
2139 // properly. Instead, we pass an aligned pointer that is
2140 // a proper v8 smi, but also pass the alignment difference
2141 // from the real pointer as a smi.
2142 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2143 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2144 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2145#ifdef DEBUG
2146 if (msg != NULL) {
2147 RecordComment("Abort message: ");
2148 RecordComment(msg);
2149 }
2150#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002151 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +01002152 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002153
Steve Blocka7e24c12009-10-30 11:49:00 +00002154 push(eax);
2155 push(Immediate(p0));
2156 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2157 CallRuntime(Runtime::kAbort, 2);
2158 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00002159 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00002160}
2161
2162
Ben Murdoch257744e2011-11-30 15:57:28 +00002163void MacroAssembler::LoadInstanceDescriptors(Register map,
2164 Register descriptors) {
2165 mov(descriptors,
2166 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2167 Label not_smi;
2168 JumpIfNotSmi(descriptors, &not_smi);
2169 mov(descriptors, isolate()->factory()->empty_descriptor_array());
2170 bind(&not_smi);
Iain Merrick75681382010-08-19 15:07:18 +01002171}
2172
2173
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002174void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2175 Register scratch,
2176 int power) {
2177 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2178 HeapNumber::kExponentBits));
2179 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2180 movd(dst, Operand(scratch));
2181 psllq(dst, HeapNumber::kMantissaBits);
2182}
2183
2184
Andrei Popescu402d9372010-02-26 13:31:12 +00002185void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2186 Register instance_type,
2187 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002188 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002189 if (!scratch.is(instance_type)) {
2190 mov(scratch, instance_type);
2191 }
2192 and_(scratch,
2193 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2194 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2195 j(not_equal, failure);
2196}
2197
2198
Leon Clarked91b9f72010-01-27 17:25:45 +00002199void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2200 Register object2,
2201 Register scratch1,
2202 Register scratch2,
2203 Label* failure) {
2204 // Check that both objects are not smis.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002205 STATIC_ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002206 mov(scratch1, Operand(object1));
2207 and_(scratch1, Operand(object2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002208 JumpIfSmi(scratch1, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00002209
2210 // Load instance type for both strings.
2211 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2212 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2213 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2214 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2215
2216 // Check that both are flat ascii strings.
2217 const int kFlatAsciiStringMask =
2218 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2219 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2220 // Interleave bits from both instance types and compare them in one check.
2221 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2222 and_(scratch1, kFlatAsciiStringMask);
2223 and_(scratch2, kFlatAsciiStringMask);
2224 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2225 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2226 j(not_equal, failure);
2227}
2228
2229
Steve Block6ded16b2010-05-10 14:33:55 +01002230void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002231 int frame_alignment = OS::ActivationFrameAlignment();
2232 if (frame_alignment != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01002233 // Make stack end at alignment and make room for num_arguments words
2234 // and the original value of esp.
2235 mov(scratch, esp);
2236 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002237 ASSERT(IsPowerOf2(frame_alignment));
2238 and_(esp, -frame_alignment);
Steve Block6ded16b2010-05-10 14:33:55 +01002239 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2240 } else {
2241 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
2242 }
2243}
2244
2245
2246void MacroAssembler::CallCFunction(ExternalReference function,
2247 int num_arguments) {
2248 // Trashing eax is ok as it will be the return value.
2249 mov(Operand(eax), Immediate(function));
2250 CallCFunction(eax, num_arguments);
2251}
2252
2253
2254void MacroAssembler::CallCFunction(Register function,
2255 int num_arguments) {
2256 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01002257 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01002258 CheckStackAlignment();
2259 }
2260
2261 call(Operand(function));
2262 if (OS::ActivationFrameAlignment() != 0) {
2263 mov(esp, Operand(esp, num_arguments * kPointerSize));
2264 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002265 add(Operand(esp), Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002266 }
2267}
2268
2269
Steve Blocka7e24c12009-10-30 11:49:00 +00002270CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002271 : address_(address),
2272 size_(size),
2273 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002274 // Create a new macro assembler pointing to the address of the code to patch.
2275 // The size is adjusted with kGap on order for the assembler to generate size
2276 // bytes of instructions without failing with buffer size constraints.
2277 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2278}
2279
2280
2281CodePatcher::~CodePatcher() {
2282 // Indicate that code has changed.
2283 CPU::FlushICache(address_, size_);
2284
2285 // Check that the code was patched as expected.
2286 ASSERT(masm_.pc_ == address_ + size_);
2287 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2288}
2289
2290
2291} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002292
2293#endif // V8_TARGET_ARCH_IA32