blob: ce6d6a6ea0aeaa527b31d4afbeb170b49b1951f9 [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
Ben Murdoch8b112d22011-06-08 16:22:53 +010044MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
Ben Murdoch8b112d22011-06-08 16:22:53 +010047 allow_stub_calls_(true) {
48 if (isolate() != NULL) {
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50 isolate());
51 }
Steve Blocka7e24c12009-10-30 11:49:00 +000052}
53
54
Steve Block6ded16b2010-05-10 14:33:55 +010055void MacroAssembler::RecordWriteHelper(Register object,
56 Register addr,
57 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +010058 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +010059 // Check that the object is not in new space.
60 Label not_in_new_space;
61 InNewSpace(object, scratch, not_equal, &not_in_new_space);
62 Abort("new-space object passed to RecordWriteHelper");
63 bind(&not_in_new_space);
64 }
65
Steve Blocka7e24c12009-10-30 11:49:00 +000066 // Compute the page start address from the heap object pointer, and reuse
67 // the 'object' register for it.
Steve Block6ded16b2010-05-10 14:33:55 +010068 and_(object, ~Page::kPageAlignmentMask);
Steve Blocka7e24c12009-10-30 11:49:00 +000069
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010070 // Compute number of region covering addr. See Page::GetRegionNumberForAddress
71 // method for more details.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010072 shr(addr, Page::kRegionSizeLog2);
Ben Murdoch69a99ed2011-11-30 16:03:39 +000073 and_(addr, Page::kPageAlignmentMask >> Page::kRegionSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +000074
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010075 // Set dirty mark for region.
Ben Murdoch257744e2011-11-30 15:57:28 +000076 // Bit tests with a memory operand should be avoided on Intel processors,
77 // as they usually have long latency and multiple uops. We load the bit base
78 // operand to a register at first and store it back after bit set.
79 mov(scratch, Operand(object, Page::kDirtyFlagOffset));
80 bts(Operand(scratch), addr);
81 mov(Operand(object, Page::kDirtyFlagOffset), scratch);
82}
83
84
85void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
86 XMMRegister scratch_reg,
87 Register result_reg) {
88 Label done;
89 ExternalReference zero_ref = ExternalReference::address_of_zero();
90 movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
91 Set(result_reg, Immediate(0));
92 ucomisd(input_reg, scratch_reg);
93 j(below, &done, Label::kNear);
94 ExternalReference half_ref = ExternalReference::address_of_one_half();
95 movdbl(scratch_reg, Operand::StaticVariable(half_ref));
96 addsd(scratch_reg, input_reg);
97 cvttsd2si(result_reg, Operand(scratch_reg));
98 test(result_reg, Immediate(0xFFFFFF00));
99 j(zero, &done, Label::kNear);
100 Set(result_reg, Immediate(255));
101 bind(&done);
102}
103
104
105void MacroAssembler::ClampUint8(Register reg) {
106 Label done;
107 test(reg, Immediate(0xFFFFFF00));
108 j(zero, &done, Label::kNear);
109 setcc(negative, reg); // 1 if negative, 0 if positive.
110 dec_b(reg); // 0 if negative, 255 if positive.
111 bind(&done);
112}
113
114
115void MacroAssembler::InNewSpace(Register object,
116 Register scratch,
117 Condition cc,
118 Label* branch,
119 Label::Distance branch_near) {
120 ASSERT(cc == equal || cc == not_equal);
121 if (Serializer::enabled()) {
122 // Can't do arithmetic on external references if it might get serialized.
123 mov(scratch, Operand(object));
124 // The mask isn't really an address. We load it as an external reference in
125 // case the size of the new space is different between the snapshot maker
126 // and the running system.
127 and_(Operand(scratch),
128 Immediate(ExternalReference::new_space_mask(isolate())));
129 cmp(Operand(scratch),
130 Immediate(ExternalReference::new_space_start(isolate())));
131 j(cc, branch, branch_near);
132 } else {
133 int32_t new_space_start = reinterpret_cast<int32_t>(
134 ExternalReference::new_space_start(isolate()).address());
135 lea(scratch, Operand(object, -new_space_start));
136 and_(scratch, isolate()->heap()->NewSpaceMask());
137 j(cc, branch, branch_near);
138 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000139}
140
141
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100142void MacroAssembler::RecordWrite(Register object,
143 int offset,
144 Register value,
145 Register scratch) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100146 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen.
Ben Murdoch257744e2011-11-30 15:57:28 +0000148 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +0000149
150 // Skip barrier if writing a smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000151 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000152 JumpIfSmi(value, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000153
Ben Murdoch257744e2011-11-30 15:57:28 +0000154 InNewSpace(object, value, equal, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000155
Steve Block6ded16b2010-05-10 14:33:55 +0100156 // The offset is relative to a tagged or untagged HeapObject pointer,
157 // so either offset or offset + kHeapObjectTag must be a
158 // multiple of kPointerSize.
159 ASSERT(IsAligned(offset, kPointerSize) ||
160 IsAligned(offset + kHeapObjectTag, kPointerSize));
161
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100162 Register dst = scratch;
163 if (offset != 0) {
164 lea(dst, Operand(object, offset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000165 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100166 // Array access: calculate the destination address in the same manner as
167 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
168 // into an array of words.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000169 STATIC_ASSERT(kSmiTagSize == 1);
170 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100171 lea(dst, Operand(object, dst, times_half_pointer_size,
172 FixedArray::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000173 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100174 RecordWriteHelper(object, dst, value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000175
176 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000177
178 // Clobber all input registers when running with the debug-code flag
179 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100180 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100181 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
182 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
183 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000184 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000185}
186
187
Steve Block8defd9f2010-07-08 12:39:36 +0100188void MacroAssembler::RecordWrite(Register object,
189 Register address,
190 Register value) {
Steve Block8defd9f2010-07-08 12:39:36 +0100191 // First, check if a write barrier is even needed. The tests below
192 // catch stores of Smis and stores into young gen.
193 Label done;
194
195 // Skip barrier if writing a smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000196 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000197 JumpIfSmi(value, &done, Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100198
199 InNewSpace(object, value, equal, &done);
200
201 RecordWriteHelper(object, address, value);
202
203 bind(&done);
204
205 // Clobber all input registers when running with the debug-code flag
206 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100207 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100208 mov(object, Immediate(BitCast<int32_t>(kZapValue)));
209 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
210 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
211 }
212}
213
214
Steve Blocka7e24c12009-10-30 11:49:00 +0000215#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000216void MacroAssembler::DebugBreak() {
217 Set(eax, Immediate(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100218 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
Andrei Popescu402d9372010-02-26 13:31:12 +0000219 CEntryStub ces(1);
220 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
221}
Steve Blocka7e24c12009-10-30 11:49:00 +0000222#endif
223
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100224
Steve Blocka7e24c12009-10-30 11:49:00 +0000225void MacroAssembler::Set(Register dst, const Immediate& x) {
226 if (x.is_zero()) {
Steve Block053d10c2011-06-13 19:13:29 +0100227 xor_(dst, Operand(dst)); // Shorter than mov.
Steve Blocka7e24c12009-10-30 11:49:00 +0000228 } else {
229 mov(dst, x);
230 }
231}
232
233
234void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
235 mov(dst, x);
236}
237
238
Steve Block053d10c2011-06-13 19:13:29 +0100239bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
240 static const int kMaxImmediateBits = 17;
241 if (x.rmode_ != RelocInfo::NONE) return false;
242 return !is_intn(x.x_, kMaxImmediateBits);
243}
244
245
246void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
247 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
248 Set(dst, Immediate(x.x_ ^ jit_cookie()));
249 xor_(dst, jit_cookie());
250 } else {
251 Set(dst, x);
252 }
253}
254
255
256void MacroAssembler::SafePush(const Immediate& x) {
257 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
258 push(Immediate(x.x_ ^ jit_cookie()));
259 xor_(Operand(esp, 0), Immediate(jit_cookie()));
260 } else {
261 push(x);
262 }
263}
264
265
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000266void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
267 // see ROOT_ACCESSOR macro in factory.h
268 Handle<Object> value(&isolate()->heap()->roots_address()[index]);
269 cmp(with, value);
270}
271
272
Steve Blocka7e24c12009-10-30 11:49:00 +0000273void MacroAssembler::CmpObjectType(Register heap_object,
274 InstanceType type,
275 Register map) {
276 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
277 CmpInstanceType(map, type);
278}
279
280
281void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
282 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
283 static_cast<int8_t>(type));
284}
285
286
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000287void MacroAssembler::CheckFastElements(Register map,
288 Label* fail,
289 Label::Distance distance) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000290 STATIC_ASSERT(FAST_ELEMENTS == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000291 cmpb(FieldOperand(map, Map::kBitField2Offset),
292 Map::kMaximumBitField2FastElementValue);
293 j(above, fail, distance);
294}
295
296
Andrei Popescu31002712010-02-23 13:46:05 +0000297void MacroAssembler::CheckMap(Register obj,
298 Handle<Map> map,
299 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000300 SmiCheckType smi_check_type) {
301 if (smi_check_type == DO_SMI_CHECK) {
302 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +0000303 }
304 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
305 j(not_equal, fail);
306}
307
308
Ben Murdoch257744e2011-11-30 15:57:28 +0000309void MacroAssembler::DispatchMap(Register obj,
310 Handle<Map> map,
311 Handle<Code> success,
312 SmiCheckType smi_check_type) {
313 Label fail;
314 if (smi_check_type == DO_SMI_CHECK) {
315 JumpIfSmi(obj, &fail);
316 }
317 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
318 j(equal, success);
319
320 bind(&fail);
321}
322
323
Leon Clarkee46be812010-01-19 14:06:41 +0000324Condition MacroAssembler::IsObjectStringType(Register heap_object,
325 Register map,
326 Register instance_type) {
327 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
328 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000329 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +0000330 test(instance_type, Immediate(kIsNotStringMask));
331 return zero;
332}
333
334
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100335void MacroAssembler::IsObjectJSObjectType(Register heap_object,
336 Register map,
337 Register scratch,
338 Label* fail) {
339 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
340 IsInstanceJSObjectType(map, scratch, fail);
341}
342
343
344void MacroAssembler::IsInstanceJSObjectType(Register map,
345 Register scratch,
346 Label* fail) {
347 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000348 sub(Operand(scratch), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
349 cmp(scratch,
350 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100351 j(above, fail);
352}
353
354
Steve Blocka7e24c12009-10-30 11:49:00 +0000355void MacroAssembler::FCmp() {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100356 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000357 fucomip();
358 ffree(0);
359 fincstp();
360 } else {
361 fucompp();
362 push(eax);
363 fnstsw_ax();
364 sahf();
365 pop(eax);
366 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000367}
368
369
Steve Block6ded16b2010-05-10 14:33:55 +0100370void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000371 Label ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000372 JumpIfSmi(object, &ok);
Andrei Popescu402d9372010-02-26 13:31:12 +0000373 cmp(FieldOperand(object, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +0100374 isolate()->factory()->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100375 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000376 bind(&ok);
377}
378
379
Steve Block6ded16b2010-05-10 14:33:55 +0100380void MacroAssembler::AbortIfNotSmi(Register object) {
381 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100382 Assert(equal, "Operand is not a smi");
383}
384
385
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100386void MacroAssembler::AbortIfNotString(Register object) {
387 test(object, Immediate(kSmiTagMask));
388 Assert(not_equal, "Operand is not a string");
389 push(object);
390 mov(object, FieldOperand(object, HeapObject::kMapOffset));
391 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
392 pop(object);
393 Assert(below, "Operand is not a string");
394}
395
396
Iain Merrick75681382010-08-19 15:07:18 +0100397void MacroAssembler::AbortIfSmi(Register object) {
398 test(object, Immediate(kSmiTagMask));
399 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100400}
401
402
Steve Blocka7e24c12009-10-30 11:49:00 +0000403void MacroAssembler::EnterFrame(StackFrame::Type type) {
404 push(ebp);
405 mov(ebp, Operand(esp));
406 push(esi);
407 push(Immediate(Smi::FromInt(type)));
408 push(Immediate(CodeObject()));
Steve Block44f0eee2011-05-26 01:26:41 +0100409 if (emit_debug_code()) {
410 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000411 Check(not_equal, "code object not properly patched");
412 }
413}
414
415
416void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +0100417 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000418 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
419 Immediate(Smi::FromInt(type)));
420 Check(equal, "stack frame types must match");
421 }
422 leave();
423}
424
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100425
426void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 // Setup the frame structure on the stack.
428 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
429 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
430 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
431 push(ebp);
432 mov(ebp, Operand(esp));
433
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100434 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000435 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000436 push(Immediate(0)); // Saved entry sp, patched before call.
437 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000438
439 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +0000440 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100441 isolate());
Ben Murdoch589d6972011-11-30 16:04:58 +0000442 ExternalReference context_address(Isolate::kContextAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100443 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000444 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
445 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000446}
Steve Blocka7e24c12009-10-30 11:49:00 +0000447
Steve Blocka7e24c12009-10-30 11:49:00 +0000448
Ben Murdochb0fe1622011-05-05 13:52:32 +0100449void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
450 // Optionally save all XMM registers.
451 if (save_doubles) {
452 CpuFeatures::Scope scope(SSE2);
453 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
454 sub(Operand(esp), Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +0100455 const int offset = -2 * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100456 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
457 XMMRegister reg = XMMRegister::from_code(i);
458 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
459 }
460 } else {
461 sub(Operand(esp), Immediate(argc * kPointerSize));
462 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000463
464 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +0100465 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000466 if (kFrameAlignment > 0) {
467 ASSERT(IsPowerOf2(kFrameAlignment));
468 and_(esp, -kFrameAlignment);
469 }
470
471 // Patch the saved entry sp.
472 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
473}
474
475
Ben Murdochb0fe1622011-05-05 13:52:32 +0100476void MacroAssembler::EnterExitFrame(bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100477 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000478
479 // Setup argc and argv in callee-saved registers.
480 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
481 mov(edi, Operand(eax));
482 lea(esi, Operand(ebp, eax, times_4, offset));
483
Steve Block44f0eee2011-05-26 01:26:41 +0100484 // Reserve space for argc, argv and isolate.
485 EnterExitFrameEpilogue(3, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000486}
487
488
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800489void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100490 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100491 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +0000492}
493
494
Ben Murdochb0fe1622011-05-05 13:52:32 +0100495void MacroAssembler::LeaveExitFrame(bool save_doubles) {
496 // Optionally restore all XMM registers.
497 if (save_doubles) {
498 CpuFeatures::Scope scope(SSE2);
Steve Block1e0659c2011-05-24 12:43:12 +0100499 const int offset = -2 * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100500 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
501 XMMRegister reg = XMMRegister::from_code(i);
502 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
503 }
504 }
505
Steve Blocka7e24c12009-10-30 11:49:00 +0000506 // Get the return address from the stack and restore the frame pointer.
507 mov(ecx, Operand(ebp, 1 * kPointerSize));
508 mov(ebp, Operand(ebp, 0 * kPointerSize));
509
510 // Pop the arguments and the receiver from the caller stack.
511 lea(esp, Operand(esi, 1 * kPointerSize));
512
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800513 // Push the return address to get ready to return.
514 push(ecx);
515
516 LeaveExitFrameEpilogue();
517}
518
519void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000520 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +0000521 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000522 mov(esi, Operand::StaticVariable(context_address));
523#ifdef DEBUG
524 mov(Operand::StaticVariable(context_address), Immediate(0));
525#endif
526
Steve Blocka7e24c12009-10-30 11:49:00 +0000527 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +0000528 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100529 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000530 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
531}
532
533
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800534void MacroAssembler::LeaveApiExitFrame() {
535 mov(esp, Operand(ebp));
536 pop(ebp);
537
538 LeaveExitFrameEpilogue();
539}
540
541
Steve Blocka7e24c12009-10-30 11:49:00 +0000542void MacroAssembler::PushTryHandler(CodeLocation try_location,
543 HandlerType type) {
544 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000545 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
546 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
547 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
548 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
549 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
550 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +0000551 // The pc (return address) is already on TOS.
552 if (try_location == IN_JAVASCRIPT) {
553 if (type == TRY_CATCH_HANDLER) {
554 push(Immediate(StackHandler::TRY_CATCH));
555 } else {
556 push(Immediate(StackHandler::TRY_FINALLY));
557 }
558 push(ebp);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000559 push(esi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000560 } else {
561 ASSERT(try_location == IN_JS_ENTRY);
562 // The frame pointer does not point to a JS frame so we save NULL
563 // for ebp. We expect the code throwing an exception to check ebp
564 // before dereferencing it to restore the context.
565 push(Immediate(StackHandler::ENTRY));
566 push(Immediate(0)); // NULL frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000567 push(Immediate(Smi::FromInt(0))); // No context.
Steve Blocka7e24c12009-10-30 11:49:00 +0000568 }
569 // Save the current handler as the next handler.
Ben Murdoch589d6972011-11-30 16:04:58 +0000570 push(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100571 isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +0000572 // Link this handler as the new current one.
Ben Murdoch589d6972011-11-30 16:04:58 +0000573 mov(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100574 isolate())),
575 esp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000576}
577
578
Leon Clarkee46be812010-01-19 14:06:41 +0000579void MacroAssembler::PopTryHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000580 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch589d6972011-11-30 16:04:58 +0000581 pop(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100582 isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +0000583 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
584}
585
586
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100587void MacroAssembler::Throw(Register value) {
588 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000589 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
590 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
591 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
592 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
593 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
594 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100595 // eax must hold the exception.
596 if (!value.is(eax)) {
597 mov(eax, value);
598 }
599
600 // Drop the sp to the top of the handler.
Ben Murdoch589d6972011-11-30 16:04:58 +0000601 ExternalReference handler_address(Isolate::kHandlerAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100602 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100603 mov(esp, Operand::StaticVariable(handler_address));
604
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000605 // Restore next handler, context, and frame pointer; discard handler state.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100606 pop(Operand::StaticVariable(handler_address));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000607 pop(esi); // Context.
608 pop(ebp); // Frame pointer.
609 pop(edx); // State.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100610
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000611 // If the handler is a JS frame, restore the context to the frame.
612 // (edx == ENTRY) == (ebp == 0) == (esi == 0), so we could test any
613 // of them.
Ben Murdoch257744e2011-11-30 15:57:28 +0000614 Label skip;
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000615 cmp(Operand(edx), Immediate(StackHandler::ENTRY));
Ben Murdoch257744e2011-11-30 15:57:28 +0000616 j(equal, &skip, Label::kNear);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000617 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100618 bind(&skip);
619
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100620 ret(0);
621}
622
623
624void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
625 Register value) {
626 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000627 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
628 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
629 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
630 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
631 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
632 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100633
634 // eax must hold the exception.
635 if (!value.is(eax)) {
636 mov(eax, value);
637 }
638
639 // Drop sp to the top stack handler.
Ben Murdoch589d6972011-11-30 16:04:58 +0000640 ExternalReference handler_address(Isolate::kHandlerAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100641 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100642 mov(esp, Operand::StaticVariable(handler_address));
643
644 // Unwind the handlers until the ENTRY handler is found.
Ben Murdoch257744e2011-11-30 15:57:28 +0000645 Label loop, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100646 bind(&loop);
647 // Load the type of the current stack handler.
648 const int kStateOffset = StackHandlerConstants::kStateOffset;
649 cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
Ben Murdoch257744e2011-11-30 15:57:28 +0000650 j(equal, &done, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100651 // Fetch the next handler in the list.
652 const int kNextOffset = StackHandlerConstants::kNextOffset;
653 mov(esp, Operand(esp, kNextOffset));
654 jmp(&loop);
655 bind(&done);
656
657 // Set the top handler address to next handler past the current ENTRY handler.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100658 pop(Operand::StaticVariable(handler_address));
659
660 if (type == OUT_OF_MEMORY) {
661 // Set external caught exception to false.
Steve Block44f0eee2011-05-26 01:26:41 +0100662 ExternalReference external_caught(
Ben Murdoch589d6972011-11-30 16:04:58 +0000663 Isolate::kExternalCaughtExceptionAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100664 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100665 mov(eax, false);
666 mov(Operand::StaticVariable(external_caught), eax);
667
668 // Set pending exception and eax to out of memory exception.
Ben Murdoch589d6972011-11-30 16:04:58 +0000669 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100670 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100671 mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
672 mov(Operand::StaticVariable(pending_exception), eax);
673 }
674
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000675 // Discard the context saved in the handler and clear the context pointer.
676 pop(edx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100677 Set(esi, Immediate(0));
678
679 // Restore fp from handler and discard handler state.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100680 pop(ebp);
681 pop(edx); // State.
682
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100683 ret(0);
684}
685
686
Steve Blocka7e24c12009-10-30 11:49:00 +0000687void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
688 Register scratch,
689 Label* miss) {
690 Label same_contexts;
691
692 ASSERT(!holder_reg.is(scratch));
693
694 // Load current lexical context from the stack frame.
695 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
696
697 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +0100698 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 cmp(Operand(scratch), Immediate(0));
700 Check(not_equal, "we should not have an empty lexical context");
701 }
702 // Load the global context of the current context.
703 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
704 mov(scratch, FieldOperand(scratch, offset));
705 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
706
707 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +0100708 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 push(scratch);
710 // Read the first word and compare to global_context_map.
711 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100712 cmp(scratch, isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 Check(equal, "JSGlobalObject::global_context should be a global context.");
714 pop(scratch);
715 }
716
717 // Check if both contexts are the same.
718 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000719 j(equal, &same_contexts);
Steve Blocka7e24c12009-10-30 11:49:00 +0000720
721 // Compare security tokens, save holder_reg on the stack so we can use it
722 // as a temporary register.
723 //
724 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
725 push(holder_reg);
726 // Check that the security token in the calling global object is
727 // compatible with the security token in the receiving global
728 // object.
729 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
730
731 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +0100732 if (emit_debug_code()) {
733 cmp(holder_reg, isolate()->factory()->null_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000734 Check(not_equal, "JSGlobalProxy::context() should not be null.");
735
736 push(holder_reg);
737 // Read the first word and compare to global_context_map(),
738 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100739 cmp(holder_reg, isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +0000740 Check(equal, "JSGlobalObject::global_context should be a global context.");
741 pop(holder_reg);
742 }
743
744 int token_offset = Context::kHeaderSize +
745 Context::SECURITY_TOKEN_INDEX * kPointerSize;
746 mov(scratch, FieldOperand(scratch, token_offset));
747 cmp(scratch, FieldOperand(holder_reg, token_offset));
748 pop(holder_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +0000749 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000750
751 bind(&same_contexts);
752}
753
754
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000755// Compute the hash code from the untagged key. This must be kept in sync
756// with ComputeIntegerHash in utils.h.
757//
758// Note: r0 will contain hash code
759void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
760 // Xor original key with a seed.
761 if (Serializer::enabled()) {
762 ExternalReference roots_address =
763 ExternalReference::roots_address(isolate());
764 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
765 mov(scratch, Operand::StaticArray(scratch,
766 times_pointer_size,
767 roots_address));
768 SmiUntag(scratch);
769 xor_(r0, Operand(scratch));
770 } else {
771 int32_t seed = isolate()->heap()->HashSeed();
772 xor_(r0, seed);
773 }
774
775 // hash = ~hash + (hash << 15);
776 mov(scratch, r0);
777 not_(r0);
778 shl(scratch, 15);
779 add(r0, Operand(scratch));
780 // hash = hash ^ (hash >> 12);
781 mov(scratch, r0);
782 shr(scratch, 12);
783 xor_(r0, Operand(scratch));
784 // hash = hash + (hash << 2);
785 lea(r0, Operand(r0, r0, times_4, 0));
786 // hash = hash ^ (hash >> 4);
787 mov(scratch, r0);
788 shr(scratch, 4);
789 xor_(r0, Operand(scratch));
790 // hash = hash * 2057;
791 imul(r0, r0, 2057);
792 // hash = hash ^ (hash >> 16);
793 mov(scratch, r0);
794 shr(scratch, 16);
795 xor_(r0, Operand(scratch));
796}
797
798
799
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000800void MacroAssembler::LoadFromNumberDictionary(Label* miss,
801 Register elements,
802 Register key,
803 Register r0,
804 Register r1,
805 Register r2,
806 Register result) {
807 // Register use:
808 //
809 // elements - holds the slow-case elements of the receiver and is unchanged.
810 //
811 // key - holds the smi key on entry and is unchanged.
812 //
813 // Scratch registers:
814 //
815 // r0 - holds the untagged key on entry and holds the hash once computed.
816 //
817 // r1 - used to hold the capacity mask of the dictionary
818 //
819 // r2 - used for the index into the dictionary.
820 //
821 // result - holds the result on exit if the load succeeds and we fall through.
822
823 Label done;
824
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000825 GetNumberHash(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000826
827 // Compute capacity mask.
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000828 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000829 shr(r1, kSmiTagSize); // convert smi to int
830 dec(r1);
831
832 // Generate an unrolled loop that performs a few probes before giving up.
833 const int kProbes = 4;
834 for (int i = 0; i < kProbes; i++) {
835 // Use r2 for index calculations and keep the hash intact in r0.
836 mov(r2, r0);
837 // Compute the masked index: (hash + i + i * i) & mask.
838 if (i > 0) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000839 add(Operand(r2), Immediate(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000840 }
841 and_(r2, Operand(r1));
842
843 // Scale the index by multiplying by the entry size.
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000844 ASSERT(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000845 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
846
847 // Check if the key matches.
848 cmp(key, FieldOperand(elements,
849 r2,
850 times_pointer_size,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000851 SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000852 if (i != (kProbes - 1)) {
853 j(equal, &done);
854 } else {
855 j(not_equal, miss);
856 }
857 }
858
859 bind(&done);
860 // Check that the value is a normal propety.
861 const int kDetailsOffset =
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000862 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000863 ASSERT_EQ(NORMAL, 0);
864 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +0000865 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000866 j(not_zero, miss);
867
868 // Get the value at the masked, scaled index.
869 const int kValueOffset =
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000870 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000871 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
872}
873
874
Steve Blocka7e24c12009-10-30 11:49:00 +0000875void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000876 Register scratch,
877 AllocationFlags flags) {
878 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +0100879 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000880
881 // Just return if allocation top is already known.
882 if ((flags & RESULT_CONTAINS_TOP) != 0) {
883 // No use of scratch if allocation top is provided.
884 ASSERT(scratch.is(no_reg));
885#ifdef DEBUG
886 // Assert that result actually contains top on entry.
887 cmp(result, Operand::StaticVariable(new_space_allocation_top));
888 Check(equal, "Unexpected allocation top");
889#endif
890 return;
891 }
892
893 // Move address of new object to result. Use scratch register if available.
894 if (scratch.is(no_reg)) {
895 mov(result, Operand::StaticVariable(new_space_allocation_top));
896 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +0000897 mov(Operand(scratch), Immediate(new_space_allocation_top));
898 mov(result, Operand(scratch, 0));
899 }
900}
901
902
903void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
904 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +0100905 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000906 test(result_end, Immediate(kObjectAlignmentMask));
907 Check(zero, "Unaligned allocation in new space");
908 }
909
Steve Blocka7e24c12009-10-30 11:49:00 +0000910 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +0100911 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000912
913 // Update new top. Use scratch if available.
914 if (scratch.is(no_reg)) {
915 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
916 } else {
917 mov(Operand(scratch, 0), result_end);
918 }
919}
920
921
922void MacroAssembler::AllocateInNewSpace(int object_size,
923 Register result,
924 Register result_end,
925 Register scratch,
926 Label* gc_required,
927 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700928 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +0100929 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -0700930 // Trash the registers to simulate an allocation failure.
931 mov(result, Immediate(0x7091));
932 if (result_end.is_valid()) {
933 mov(result_end, Immediate(0x7191));
934 }
935 if (scratch.is_valid()) {
936 mov(scratch, Immediate(0x7291));
937 }
938 }
939 jmp(gc_required);
940 return;
941 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000942 ASSERT(!result.is(result_end));
943
944 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800945 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +0000946
Ben Murdochbb769b22010-08-11 14:56:33 +0100947 Register top_reg = result_end.is_valid() ? result_end : result;
948
Steve Blocka7e24c12009-10-30 11:49:00 +0000949 // Calculate new top and bail out if new space is exhausted.
950 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +0100951 ExternalReference::new_space_allocation_limit_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +0100952
Steve Block1e0659c2011-05-24 12:43:12 +0100953 if (!top_reg.is(result)) {
954 mov(top_reg, result);
Ben Murdochbb769b22010-08-11 14:56:33 +0100955 }
Steve Block1e0659c2011-05-24 12:43:12 +0100956 add(Operand(top_reg), Immediate(object_size));
Ben Murdoch257744e2011-11-30 15:57:28 +0000957 j(carry, gc_required);
Ben Murdochbb769b22010-08-11 14:56:33 +0100958 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +0000959 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +0000960
Leon Clarkee46be812010-01-19 14:06:41 +0000961 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +0100962 UpdateAllocationTopHelper(top_reg, scratch);
963
964 // Tag result if requested.
965 if (top_reg.is(result)) {
966 if ((flags & TAG_OBJECT) != 0) {
967 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
968 } else {
969 sub(Operand(result), Immediate(object_size));
970 }
971 } else if ((flags & TAG_OBJECT) != 0) {
972 add(Operand(result), Immediate(kHeapObjectTag));
973 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000974}
975
976
977void MacroAssembler::AllocateInNewSpace(int header_size,
978 ScaleFactor element_size,
979 Register element_count,
980 Register result,
981 Register result_end,
982 Register scratch,
983 Label* gc_required,
984 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -0700985 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +0100986 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -0700987 // Trash the registers to simulate an allocation failure.
988 mov(result, Immediate(0x7091));
989 mov(result_end, Immediate(0x7191));
990 if (scratch.is_valid()) {
991 mov(scratch, Immediate(0x7291));
992 }
993 // Register element_count is not modified by the function.
994 }
995 jmp(gc_required);
996 return;
997 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000998 ASSERT(!result.is(result_end));
999
1000 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001001 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001002
1003 // Calculate new top and bail out if new space is exhausted.
1004 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001005 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01001006
1007 // We assume that element_count*element_size + header_size does not
1008 // overflow.
1009 lea(result_end, Operand(element_count, element_size, header_size));
1010 add(result_end, Operand(result));
1011 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001012 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1013 j(above, gc_required);
1014
Steve Blocka7e24c12009-10-30 11:49:00 +00001015 // Tag result if requested.
1016 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00001017 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001018 }
Leon Clarkee46be812010-01-19 14:06:41 +00001019
1020 // Update allocation top.
1021 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001022}
1023
1024
1025void MacroAssembler::AllocateInNewSpace(Register object_size,
1026 Register result,
1027 Register result_end,
1028 Register scratch,
1029 Label* gc_required,
1030 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001031 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001032 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001033 // Trash the registers to simulate an allocation failure.
1034 mov(result, Immediate(0x7091));
1035 mov(result_end, Immediate(0x7191));
1036 if (scratch.is_valid()) {
1037 mov(scratch, Immediate(0x7291));
1038 }
1039 // object_size is left unchanged by this function.
1040 }
1041 jmp(gc_required);
1042 return;
1043 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 ASSERT(!result.is(result_end));
1045
1046 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001047 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001048
1049 // Calculate new top and bail out if new space is exhausted.
1050 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001051 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001052 if (!object_size.is(result_end)) {
1053 mov(result_end, object_size);
1054 }
1055 add(result_end, Operand(result));
Ben Murdoch257744e2011-11-30 15:57:28 +00001056 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001057 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001058 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001059
Steve Blocka7e24c12009-10-30 11:49:00 +00001060 // Tag result if requested.
1061 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00001062 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001063 }
Leon Clarkee46be812010-01-19 14:06:41 +00001064
1065 // Update allocation top.
1066 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001067}
1068
1069
1070void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1071 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01001072 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001073
1074 // Make sure the object has no tag before resetting top.
1075 and_(Operand(object), Immediate(~kHeapObjectTagMask));
1076#ifdef DEBUG
1077 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1078 Check(below, "Undo allocation of non allocated memory");
1079#endif
1080 mov(Operand::StaticVariable(new_space_allocation_top), object);
1081}
1082
1083
Steve Block3ce2e202009-11-05 08:53:23 +00001084void MacroAssembler::AllocateHeapNumber(Register result,
1085 Register scratch1,
1086 Register scratch2,
1087 Label* gc_required) {
1088 // Allocate heap number in new space.
1089 AllocateInNewSpace(HeapNumber::kSize,
1090 result,
1091 scratch1,
1092 scratch2,
1093 gc_required,
1094 TAG_OBJECT);
1095
1096 // Set the map.
1097 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001098 Immediate(isolate()->factory()->heap_number_map()));
Steve Block3ce2e202009-11-05 08:53:23 +00001099}
1100
1101
Steve Blockd0582a62009-12-15 09:54:21 +00001102void MacroAssembler::AllocateTwoByteString(Register result,
1103 Register length,
1104 Register scratch1,
1105 Register scratch2,
1106 Register scratch3,
1107 Label* gc_required) {
1108 // Calculate the number of bytes needed for the characters in the string while
1109 // observing object alignment.
1110 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00001111 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00001112 // scratch1 = length * 2 + kObjectAlignmentMask.
1113 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001114 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
1115
1116 // Allocate two byte string in new space.
1117 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1118 times_1,
1119 scratch1,
1120 result,
1121 scratch2,
1122 scratch3,
1123 gc_required,
1124 TAG_OBJECT);
1125
1126 // Set the map, length and hash field.
1127 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001128 Immediate(isolate()->factory()->string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001129 mov(scratch1, length);
1130 SmiTag(scratch1);
1131 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001132 mov(FieldOperand(result, String::kHashFieldOffset),
1133 Immediate(String::kEmptyHashField));
1134}
1135
1136
1137void MacroAssembler::AllocateAsciiString(Register result,
1138 Register length,
1139 Register scratch1,
1140 Register scratch2,
1141 Register scratch3,
1142 Label* gc_required) {
1143 // Calculate the number of bytes needed for the characters in the string while
1144 // observing object alignment.
1145 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1146 mov(scratch1, length);
1147 ASSERT(kCharSize == 1);
1148 add(Operand(scratch1), Immediate(kObjectAlignmentMask));
1149 and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
1150
1151 // Allocate ascii string in new space.
1152 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1153 times_1,
1154 scratch1,
1155 result,
1156 scratch2,
1157 scratch3,
1158 gc_required,
1159 TAG_OBJECT);
1160
1161 // Set the map, length and hash field.
1162 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001163 Immediate(isolate()->factory()->ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001164 mov(scratch1, length);
1165 SmiTag(scratch1);
1166 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001167 mov(FieldOperand(result, String::kHashFieldOffset),
1168 Immediate(String::kEmptyHashField));
1169}
1170
1171
Iain Merrick9ac36c92010-09-13 15:29:50 +01001172void MacroAssembler::AllocateAsciiString(Register result,
1173 int length,
1174 Register scratch1,
1175 Register scratch2,
1176 Label* gc_required) {
1177 ASSERT(length > 0);
1178
1179 // Allocate ascii string in new space.
1180 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1181 result,
1182 scratch1,
1183 scratch2,
1184 gc_required,
1185 TAG_OBJECT);
1186
1187 // Set the map, length and hash field.
1188 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001189 Immediate(isolate()->factory()->ascii_string_map()));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001190 mov(FieldOperand(result, String::kLengthOffset),
1191 Immediate(Smi::FromInt(length)));
1192 mov(FieldOperand(result, String::kHashFieldOffset),
1193 Immediate(String::kEmptyHashField));
1194}
1195
1196
Ben Murdoch589d6972011-11-30 16:04:58 +00001197void MacroAssembler::AllocateTwoByteConsString(Register result,
Steve Blockd0582a62009-12-15 09:54:21 +00001198 Register scratch1,
1199 Register scratch2,
1200 Label* gc_required) {
1201 // Allocate heap number in new space.
1202 AllocateInNewSpace(ConsString::kSize,
1203 result,
1204 scratch1,
1205 scratch2,
1206 gc_required,
1207 TAG_OBJECT);
1208
1209 // Set the map. The other fields are left uninitialized.
1210 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001211 Immediate(isolate()->factory()->cons_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001212}
1213
1214
1215void MacroAssembler::AllocateAsciiConsString(Register result,
1216 Register scratch1,
1217 Register scratch2,
1218 Label* gc_required) {
1219 // Allocate heap number in new space.
1220 AllocateInNewSpace(ConsString::kSize,
1221 result,
1222 scratch1,
1223 scratch2,
1224 gc_required,
1225 TAG_OBJECT);
1226
1227 // Set the map. The other fields are left uninitialized.
1228 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001229 Immediate(isolate()->factory()->cons_ascii_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001230}
1231
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001232
Ben Murdoch589d6972011-11-30 16:04:58 +00001233void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001234 Register scratch1,
1235 Register scratch2,
1236 Label* gc_required) {
1237 // Allocate heap number in new space.
1238 AllocateInNewSpace(SlicedString::kSize,
1239 result,
1240 scratch1,
1241 scratch2,
1242 gc_required,
1243 TAG_OBJECT);
1244
1245 // Set the map. The other fields are left uninitialized.
1246 mov(FieldOperand(result, HeapObject::kMapOffset),
1247 Immediate(isolate()->factory()->sliced_string_map()));
1248}
1249
1250
1251void MacroAssembler::AllocateAsciiSlicedString(Register result,
1252 Register scratch1,
1253 Register scratch2,
1254 Label* gc_required) {
1255 // Allocate heap number in new space.
1256 AllocateInNewSpace(SlicedString::kSize,
1257 result,
1258 scratch1,
1259 scratch2,
1260 gc_required,
1261 TAG_OBJECT);
1262
1263 // Set the map. The other fields are left uninitialized.
1264 mov(FieldOperand(result, HeapObject::kMapOffset),
1265 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1266}
1267
1268
Ben Murdochb8e0da22011-05-16 14:20:40 +01001269// Copy memory, byte-by-byte, from source to destination. Not optimized for
1270// long or aligned copies. The contents of scratch and length are destroyed.
1271// Source and destination are incremented by length.
1272// Many variants of movsb, loop unrolling, word moves, and indexed operands
1273// have been tried here already, and this is fastest.
1274// A simpler loop is faster on small copies, but 30% slower on large ones.
1275// The cld() instruction must have been emitted, to set the direction flag(),
1276// before calling this function.
1277void MacroAssembler::CopyBytes(Register source,
1278 Register destination,
1279 Register length,
1280 Register scratch) {
1281 Label loop, done, short_string, short_loop;
1282 // Experimentation shows that the short string loop is faster if length < 10.
1283 cmp(Operand(length), Immediate(10));
1284 j(less_equal, &short_string);
1285
1286 ASSERT(source.is(esi));
1287 ASSERT(destination.is(edi));
1288 ASSERT(length.is(ecx));
1289
1290 // Because source is 4-byte aligned in our uses of this function,
1291 // we keep source aligned for the rep_movs call by copying the odd bytes
1292 // at the end of the ranges.
1293 mov(scratch, Operand(source, length, times_1, -4));
1294 mov(Operand(destination, length, times_1, -4), scratch);
1295 mov(scratch, ecx);
1296 shr(ecx, 2);
1297 rep_movs();
1298 and_(Operand(scratch), Immediate(0x3));
1299 add(destination, Operand(scratch));
1300 jmp(&done);
1301
1302 bind(&short_string);
1303 test(length, Operand(length));
1304 j(zero, &done);
1305
1306 bind(&short_loop);
1307 mov_b(scratch, Operand(source, 0));
1308 mov_b(Operand(destination, 0), scratch);
1309 inc(source);
1310 inc(destination);
1311 dec(length);
1312 j(not_zero, &short_loop);
1313
1314 bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001315}
1316
Steve Blockd0582a62009-12-15 09:54:21 +00001317
Steve Blocka7e24c12009-10-30 11:49:00 +00001318void MacroAssembler::NegativeZeroTest(Register result,
1319 Register op,
1320 Label* then_label) {
1321 Label ok;
1322 test(result, Operand(result));
Ben Murdoch257744e2011-11-30 15:57:28 +00001323 j(not_zero, &ok);
Steve Blocka7e24c12009-10-30 11:49:00 +00001324 test(op, Operand(op));
Ben Murdoch257744e2011-11-30 15:57:28 +00001325 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001326 bind(&ok);
1327}
1328
1329
1330void MacroAssembler::NegativeZeroTest(Register result,
1331 Register op1,
1332 Register op2,
1333 Register scratch,
1334 Label* then_label) {
1335 Label ok;
1336 test(result, Operand(result));
Ben Murdoch257744e2011-11-30 15:57:28 +00001337 j(not_zero, &ok);
Steve Blocka7e24c12009-10-30 11:49:00 +00001338 mov(scratch, Operand(op1));
1339 or_(scratch, Operand(op2));
Ben Murdoch257744e2011-11-30 15:57:28 +00001340 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001341 bind(&ok);
1342}
1343
1344
1345void MacroAssembler::TryGetFunctionPrototype(Register function,
1346 Register result,
1347 Register scratch,
1348 Label* miss) {
1349 // Check that the receiver isn't a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001350 JumpIfSmi(function, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001351
1352 // Check that the function really is a function.
1353 CmpObjectType(function, JS_FUNCTION_TYPE, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001354 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001355
1356 // Make sure that the function has an instance prototype.
1357 Label non_instance;
1358 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1359 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00001360 j(not_zero, &non_instance);
Steve Blocka7e24c12009-10-30 11:49:00 +00001361
1362 // Get the prototype or initial map from the function.
1363 mov(result,
1364 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1365
1366 // If the prototype or initial map is the hole, don't return it and
1367 // simply miss the cache instead. This will allow us to allocate a
1368 // prototype object on-demand in the runtime system.
Steve Block44f0eee2011-05-26 01:26:41 +01001369 cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001370 j(equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001371
1372 // If the function does not have an initial map, we're done.
1373 Label done;
1374 CmpObjectType(result, MAP_TYPE, scratch);
1375 j(not_equal, &done);
1376
1377 // Get the prototype from the initial map.
1378 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1379 jmp(&done);
1380
1381 // Non-instance prototype: Fetch prototype from constructor field
1382 // in initial map.
1383 bind(&non_instance);
1384 mov(result, FieldOperand(result, Map::kConstructorOffset));
1385
1386 // All done.
1387 bind(&done);
1388}
1389
1390
Ben Murdoch257744e2011-11-30 15:57:28 +00001391void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
Leon Clarkee46be812010-01-19 14:06:41 +00001392 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Ben Murdoch257744e2011-11-30 15:57:28 +00001393 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001394}
1395
1396
John Reck59135872010-11-02 12:39:01 -07001397MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001398 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001399 Object* result;
1400 { MaybeObject* maybe_result = stub->TryGetCode();
1401 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001402 }
John Reck59135872010-11-02 12:39:01 -07001403 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001404 return result;
1405}
1406
1407
Steve Blockd0582a62009-12-15 09:54:21 +00001408void MacroAssembler::TailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001409 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Steve Blockd0582a62009-12-15 09:54:21 +00001410 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1411}
1412
1413
John Reck59135872010-11-02 12:39:01 -07001414MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
Leon Clarkee46be812010-01-19 14:06:41 +00001415 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
John Reck59135872010-11-02 12:39:01 -07001416 Object* result;
1417 { MaybeObject* maybe_result = stub->TryGetCode();
1418 if (!maybe_result->ToObject(&result)) return maybe_result;
Leon Clarkee46be812010-01-19 14:06:41 +00001419 }
John Reck59135872010-11-02 12:39:01 -07001420 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
Leon Clarkee46be812010-01-19 14:06:41 +00001421 return result;
1422}
1423
1424
Steve Blocka7e24c12009-10-30 11:49:00 +00001425void MacroAssembler::StubReturn(int argc) {
1426 ASSERT(argc >= 1 && generating_stub());
1427 ret((argc - 1) * kPointerSize);
1428}
1429
1430
1431void MacroAssembler::IllegalOperation(int num_arguments) {
1432 if (num_arguments > 0) {
1433 add(Operand(esp), Immediate(num_arguments * kPointerSize));
1434 }
Steve Block44f0eee2011-05-26 01:26:41 +01001435 mov(eax, Immediate(isolate()->factory()->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001436}
1437
1438
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001439void MacroAssembler::IndexFromHash(Register hash, Register index) {
1440 // The assert checks that the constants for the maximum number of digits
1441 // for an array index cached in the hash field and the number of bits
1442 // reserved for it does not conflict.
1443 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1444 (1 << String::kArrayIndexValueBits));
1445 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1446 // the low kHashShift bits.
1447 and_(hash, String::kArrayIndexValueMask);
1448 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1449 if (String::kHashShift > kSmiTagSize) {
1450 shr(hash, String::kHashShift - kSmiTagSize);
1451 }
1452 if (!index.is(hash)) {
1453 mov(index, hash);
1454 }
1455}
1456
1457
Steve Blocka7e24c12009-10-30 11:49:00 +00001458void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1459 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1460}
1461
1462
Ben Murdochb0fe1622011-05-05 13:52:32 +01001463void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +01001464 const Runtime::Function* function = Runtime::FunctionForId(id);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001465 Set(eax, Immediate(function->nargs));
Steve Block44f0eee2011-05-26 01:26:41 +01001466 mov(ebx, Immediate(ExternalReference(function, isolate())));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001467 CEntryStub ces(1);
1468 ces.SaveDoubles();
1469 CallStub(&ces);
1470}
1471
1472
John Reck59135872010-11-02 12:39:01 -07001473MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
1474 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001475 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
1476}
1477
1478
Steve Block44f0eee2011-05-26 01:26:41 +01001479void MacroAssembler::CallRuntime(const Runtime::Function* f,
1480 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001481 // If the expected number of arguments of the runtime function is
1482 // constant, we check that the actual number of arguments match the
1483 // expectation.
1484 if (f->nargs >= 0 && f->nargs != num_arguments) {
1485 IllegalOperation(num_arguments);
1486 return;
1487 }
1488
Leon Clarke4515c472010-02-03 11:58:03 +00001489 // TODO(1236192): Most runtime routines don't need the number of
1490 // arguments passed in because it is constant. At some point we
1491 // should remove this need and make the runtime routine entry code
1492 // smarter.
1493 Set(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01001494 mov(ebx, Immediate(ExternalReference(f, isolate())));
Leon Clarke4515c472010-02-03 11:58:03 +00001495 CEntryStub ces(1);
1496 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001497}
1498
1499
Steve Block44f0eee2011-05-26 01:26:41 +01001500MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -07001501 int num_arguments) {
Leon Clarkee46be812010-01-19 14:06:41 +00001502 if (f->nargs >= 0 && f->nargs != num_arguments) {
1503 IllegalOperation(num_arguments);
1504 // Since we did not call the stub, there was no allocation failure.
1505 // Return some non-failure object.
Steve Block44f0eee2011-05-26 01:26:41 +01001506 return isolate()->heap()->undefined_value();
Leon Clarkee46be812010-01-19 14:06:41 +00001507 }
1508
Leon Clarke4515c472010-02-03 11:58:03 +00001509 // TODO(1236192): Most runtime routines don't need the number of
1510 // arguments passed in because it is constant. At some point we
1511 // should remove this need and make the runtime routine entry code
1512 // smarter.
1513 Set(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01001514 mov(ebx, Immediate(ExternalReference(f, isolate())));
Leon Clarke4515c472010-02-03 11:58:03 +00001515 CEntryStub ces(1);
1516 return TryCallStub(&ces);
Leon Clarkee46be812010-01-19 14:06:41 +00001517}
1518
1519
Ben Murdochbb769b22010-08-11 14:56:33 +01001520void MacroAssembler::CallExternalReference(ExternalReference ref,
1521 int num_arguments) {
1522 mov(eax, Immediate(num_arguments));
1523 mov(ebx, Immediate(ref));
1524
1525 CEntryStub stub(1);
1526 CallStub(&stub);
1527}
1528
1529
Steve Block6ded16b2010-05-10 14:33:55 +01001530void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1531 int num_arguments,
1532 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001533 // TODO(1236192): Most runtime routines don't need the number of
1534 // arguments passed in because it is constant. At some point we
1535 // should remove this need and make the runtime routine entry code
1536 // smarter.
1537 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001538 JumpToExternalReference(ext);
1539}
1540
1541
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001542MaybeObject* MacroAssembler::TryTailCallExternalReference(
1543 const ExternalReference& ext, int num_arguments, int result_size) {
1544 // TODO(1236192): Most runtime routines don't need the number of
1545 // arguments passed in because it is constant. At some point we
1546 // should remove this need and make the runtime routine entry code
1547 // smarter.
1548 Set(eax, Immediate(num_arguments));
1549 return TryJumpToExternalReference(ext);
1550}
1551
1552
Steve Block6ded16b2010-05-10 14:33:55 +01001553void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1554 int num_arguments,
1555 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01001556 TailCallExternalReference(ExternalReference(fid, isolate()),
1557 num_arguments,
1558 result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001559}
1560
1561
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001562MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
1563 int num_arguments,
1564 int result_size) {
1565 return TryTailCallExternalReference(
Steve Block44f0eee2011-05-26 01:26:41 +01001566 ExternalReference(fid, isolate()), num_arguments, result_size);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001567}
1568
1569
Ben Murdochb0fe1622011-05-05 13:52:32 +01001570// If true, a Handle<T> returned by value from a function with cdecl calling
1571// convention will be returned directly as a value of location_ field in a
1572// register eax.
1573// If false, it is returned as a pointer to a preallocated by caller memory
1574// region. Pointer to this region should be passed to a function as an
1575// implicit first argument.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001576#if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001577static const bool kReturnHandlesDirectly = true;
John Reck59135872010-11-02 12:39:01 -07001578#else
Ben Murdochb0fe1622011-05-05 13:52:32 +01001579static const bool kReturnHandlesDirectly = false;
John Reck59135872010-11-02 12:39:01 -07001580#endif
1581
1582
1583Operand ApiParameterOperand(int index) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001584 return Operand(
1585 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001586}
1587
1588
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001589void MacroAssembler::PrepareCallApiFunction(int argc) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001590 if (kReturnHandlesDirectly) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001591 EnterApiExitFrame(argc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001592 // When handles are returned directly we don't have to allocate extra
John Reck59135872010-11-02 12:39:01 -07001593 // space for and pass an out parameter.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001594 if (emit_debug_code()) {
1595 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1596 }
John Reck59135872010-11-02 12:39:01 -07001597 } else {
1598 // We allocate two additional slots: return value and pointer to it.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001599 EnterApiExitFrame(argc + 2);
John Reck59135872010-11-02 12:39:01 -07001600
John Reck59135872010-11-02 12:39:01 -07001601 // The argument slots are filled as follows:
1602 //
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001603 // n + 1: output slot
John Reck59135872010-11-02 12:39:01 -07001604 // n: arg n
1605 // ...
1606 // 1: arg1
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001607 // 0: pointer to the output slot
John Reck59135872010-11-02 12:39:01 -07001608
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001609 lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1610 mov(Operand(esp, 0 * kPointerSize), esi);
Steve Block44f0eee2011-05-26 01:26:41 +01001611 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001612 mov(Operand(esi, 0), Immediate(0));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001613 }
1614 }
1615}
1616
1617
1618MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
1619 int stack_space) {
Steve Blockd0582a62009-12-15 09:54:21 +00001620 ExternalReference next_address =
1621 ExternalReference::handle_scope_next_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001622 ExternalReference limit_address =
1623 ExternalReference::handle_scope_limit_address();
John Reck59135872010-11-02 12:39:01 -07001624 ExternalReference level_address =
1625 ExternalReference::handle_scope_level_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001626
John Reck59135872010-11-02 12:39:01 -07001627 // Allocate HandleScope in callee-save registers.
1628 mov(ebx, Operand::StaticVariable(next_address));
1629 mov(edi, Operand::StaticVariable(limit_address));
1630 add(Operand::StaticVariable(level_address), Immediate(1));
Steve Blockd0582a62009-12-15 09:54:21 +00001631
John Reck59135872010-11-02 12:39:01 -07001632 // Call the api function!
1633 call(function->address(), RelocInfo::RUNTIME_ENTRY);
1634
Ben Murdochb0fe1622011-05-05 13:52:32 +01001635 if (!kReturnHandlesDirectly) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001636 // PrepareCallApiFunction saved pointer to the output slot into
1637 // callee-save register esi.
1638 mov(eax, Operand(esi, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001639 }
Steve Blockd0582a62009-12-15 09:54:21 +00001640
John Reck59135872010-11-02 12:39:01 -07001641 Label empty_handle;
1642 Label prologue;
1643 Label promote_scheduled_exception;
1644 Label delete_allocated_handles;
1645 Label leave_exit_frame;
Leon Clarkee46be812010-01-19 14:06:41 +00001646
John Reck59135872010-11-02 12:39:01 -07001647 // Check if the result handle holds 0.
1648 test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00001649 j(zero, &empty_handle);
John Reck59135872010-11-02 12:39:01 -07001650 // It was non-zero. Dereference to get the result value.
1651 mov(eax, Operand(eax, 0));
1652 bind(&prologue);
1653 // No more valid handles (the result handle was the last one). Restore
1654 // previous handle scope.
1655 mov(Operand::StaticVariable(next_address), ebx);
1656 sub(Operand::StaticVariable(level_address), Immediate(1));
1657 Assert(above_equal, "Invalid HandleScope level");
1658 cmp(edi, Operand::StaticVariable(limit_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00001659 j(not_equal, &delete_allocated_handles);
John Reck59135872010-11-02 12:39:01 -07001660 bind(&leave_exit_frame);
Leon Clarkee46be812010-01-19 14:06:41 +00001661
John Reck59135872010-11-02 12:39:01 -07001662 // Check if the function scheduled an exception.
1663 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +01001664 ExternalReference::scheduled_exception_address(isolate());
John Reck59135872010-11-02 12:39:01 -07001665 cmp(Operand::StaticVariable(scheduled_exception_address),
Steve Block44f0eee2011-05-26 01:26:41 +01001666 Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001667 j(not_equal, &promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001668 LeaveApiExitFrame();
1669 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001670 bind(&promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001671 MaybeObject* result =
1672 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1673 if (result->IsFailure()) {
1674 return result;
1675 }
John Reck59135872010-11-02 12:39:01 -07001676 bind(&empty_handle);
1677 // It was zero; the result is undefined.
Steve Block44f0eee2011-05-26 01:26:41 +01001678 mov(eax, isolate()->factory()->undefined_value());
John Reck59135872010-11-02 12:39:01 -07001679 jmp(&prologue);
Leon Clarkee46be812010-01-19 14:06:41 +00001680
John Reck59135872010-11-02 12:39:01 -07001681 // HandleScope limit has changed. Delete allocated extensions.
Steve Block44f0eee2011-05-26 01:26:41 +01001682 ExternalReference delete_extensions =
1683 ExternalReference::delete_handle_scope_extensions(isolate());
John Reck59135872010-11-02 12:39:01 -07001684 bind(&delete_allocated_handles);
1685 mov(Operand::StaticVariable(limit_address), edi);
1686 mov(edi, eax);
Steve Block44f0eee2011-05-26 01:26:41 +01001687 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1688 mov(eax, Immediate(delete_extensions));
John Reck59135872010-11-02 12:39:01 -07001689 call(Operand(eax));
1690 mov(eax, edi);
1691 jmp(&leave_exit_frame);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001692
1693 return result;
Steve Blockd0582a62009-12-15 09:54:21 +00001694}
1695
1696
Steve Block6ded16b2010-05-10 14:33:55 +01001697void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001698 // Set the entry point and jump to the C entry runtime stub.
1699 mov(ebx, Immediate(ext));
1700 CEntryStub ces(1);
1701 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1702}
1703
1704
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001705MaybeObject* MacroAssembler::TryJumpToExternalReference(
1706 const ExternalReference& ext) {
1707 // Set the entry point and jump to the C entry runtime stub.
1708 mov(ebx, Immediate(ext));
1709 CEntryStub ces(1);
1710 return TryTailCallStub(&ces);
1711}
1712
1713
Ben Murdoch257744e2011-11-30 15:57:28 +00001714void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
1715 // This macro takes the dst register to make the code more readable
1716 // at the call sites. However, the dst register has to be ecx to
1717 // follow the calling convention which requires the call type to be
1718 // in ecx.
1719 ASSERT(dst.is(ecx));
1720 if (call_kind == CALL_AS_FUNCTION) {
1721 // Set to some non-zero smi by updating the least significant
1722 // byte.
1723 mov_b(Operand(dst), 1 << kSmiTagSize);
1724 } else {
1725 // Set to smi zero by clearing the register.
1726 xor_(dst, Operand(dst));
1727 }
1728}
1729
1730
Steve Blocka7e24c12009-10-30 11:49:00 +00001731void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1732 const ParameterCount& actual,
1733 Handle<Code> code_constant,
1734 const Operand& code_operand,
Ben Murdoch257744e2011-11-30 15:57:28 +00001735 Label* done,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001736 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001737 Label::Distance done_near,
1738 const CallWrapper& call_wrapper,
1739 CallKind call_kind) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001740 bool definitely_matches = false;
1741 Label invoke;
1742 if (expected.is_immediate()) {
1743 ASSERT(actual.is_immediate());
1744 if (expected.immediate() == actual.immediate()) {
1745 definitely_matches = true;
1746 } else {
1747 mov(eax, actual.immediate());
1748 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1749 if (expected.immediate() == sentinel) {
1750 // Don't worry about adapting arguments for builtins that
1751 // don't want that done. Skip adaption code by making it look
1752 // like we have a match between expected and actual number of
1753 // arguments.
1754 definitely_matches = true;
1755 } else {
1756 mov(ebx, expected.immediate());
1757 }
1758 }
1759 } else {
1760 if (actual.is_immediate()) {
1761 // Expected is in register, actual is immediate. This is the
1762 // case when we invoke function values without going through the
1763 // IC mechanism.
1764 cmp(expected.reg(), actual.immediate());
1765 j(equal, &invoke);
1766 ASSERT(expected.reg().is(ebx));
1767 mov(eax, actual.immediate());
1768 } else if (!expected.reg().is(actual.reg())) {
1769 // Both expected and actual are in (different) registers. This
1770 // is the case when we invoke functions using call and apply.
1771 cmp(expected.reg(), Operand(actual.reg()));
1772 j(equal, &invoke);
1773 ASSERT(actual.reg().is(eax));
1774 ASSERT(expected.reg().is(ebx));
1775 }
1776 }
1777
1778 if (!definitely_matches) {
1779 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001780 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001781 if (!code_constant.is_null()) {
1782 mov(edx, Immediate(code_constant));
1783 add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
1784 } else if (!code_operand.is_reg(edx)) {
1785 mov(edx, code_operand);
1786 }
1787
1788 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001789 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
1790 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001791 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00001792 call_wrapper.AfterCall();
1793 jmp(done, done_near);
Steve Blocka7e24c12009-10-30 11:49:00 +00001794 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001795 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001796 jmp(adaptor, RelocInfo::CODE_TARGET);
1797 }
1798 bind(&invoke);
1799 }
1800}
1801
1802
1803void MacroAssembler::InvokeCode(const Operand& code,
1804 const ParameterCount& expected,
1805 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001806 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001807 const CallWrapper& call_wrapper,
1808 CallKind call_kind) {
1809 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001810 InvokePrologue(expected, actual, Handle<Code>::null(), code,
Ben Murdoch257744e2011-11-30 15:57:28 +00001811 &done, flag, Label::kNear, call_wrapper,
1812 call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001813 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001814 call_wrapper.BeforeCall(CallSize(code));
1815 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001816 call(code);
Ben Murdoch257744e2011-11-30 15:57:28 +00001817 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00001818 } else {
1819 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001820 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001821 jmp(code);
1822 }
1823 bind(&done);
1824}
1825
1826
1827void MacroAssembler::InvokeCode(Handle<Code> code,
1828 const ParameterCount& expected,
1829 const ParameterCount& actual,
1830 RelocInfo::Mode rmode,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001831 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001832 const CallWrapper& call_wrapper,
1833 CallKind call_kind) {
1834 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +00001835 Operand dummy(eax);
Ben Murdoch257744e2011-11-30 15:57:28 +00001836 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
1837 call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001838 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001839 call_wrapper.BeforeCall(CallSize(code, rmode));
1840 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001841 call(code, rmode);
Ben Murdoch257744e2011-11-30 15:57:28 +00001842 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00001843 } else {
1844 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001845 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001846 jmp(code, rmode);
1847 }
1848 bind(&done);
1849}
1850
1851
1852void MacroAssembler::InvokeFunction(Register fun,
1853 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001854 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001855 const CallWrapper& call_wrapper,
1856 CallKind call_kind) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001857 ASSERT(fun.is(edi));
1858 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1859 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1860 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001861 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001862
1863 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01001864 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001865 expected, actual, flag, call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001866}
1867
1868
Andrei Popescu402d9372010-02-26 13:31:12 +00001869void MacroAssembler::InvokeFunction(JSFunction* function,
1870 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001871 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001872 const CallWrapper& call_wrapper,
1873 CallKind call_kind) {
Andrei Popescu402d9372010-02-26 13:31:12 +00001874 ASSERT(function->is_compiled());
1875 // Get the function and setup the context.
1876 mov(edi, Immediate(Handle<JSFunction>(function)));
1877 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001878
Andrei Popescu402d9372010-02-26 13:31:12 +00001879 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001880 if (V8::UseCrankshaft()) {
1881 // TODO(kasperl): For now, we always call indirectly through the
1882 // code field in the function to allow recompilation to take effect
1883 // without changing any of the call sites.
1884 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001885 expected, actual, flag, call_wrapper, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001886 } else {
1887 Handle<Code> code(function->code());
1888 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET,
Ben Murdoch257744e2011-11-30 15:57:28 +00001889 flag, call_wrapper, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001890 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001891}
1892
1893
Ben Murdochb0fe1622011-05-05 13:52:32 +01001894void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
1895 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001896 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001897 // Calls are not allowed in some stubs.
1898 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1899
1900 // Rely on the assertion to check that the number of provided
1901 // arguments match the expected number of arguments. Fake a
1902 // parameter count to avoid emitting code to do the check.
1903 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01001904 GetBuiltinFunction(edi, id);
1905 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001906 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +00001907}
1908
Steve Block791712a2010-08-27 10:21:07 +01001909void MacroAssembler::GetBuiltinFunction(Register target,
1910 Builtins::JavaScript id) {
1911 // Load the JavaScript builtin function from the builtins object.
1912 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1913 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
1914 mov(target, FieldOperand(target,
1915 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
1916}
Steve Blocka7e24c12009-10-30 11:49:00 +00001917
1918void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01001919 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00001920 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01001921 GetBuiltinFunction(edi, id);
1922 // Load the code entry point from the function into the target register.
1923 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001924}
1925
1926
Steve Blockd0582a62009-12-15 09:54:21 +00001927void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1928 if (context_chain_length > 0) {
1929 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001930 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00001931 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001932 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00001933 }
Steve Block1e0659c2011-05-24 12:43:12 +01001934 } else {
1935 // Slot is in the current function context. Move it into the
1936 // destination register in case we store into it (the write barrier
1937 // cannot be allowed to destroy the context in esi).
1938 mov(dst, esi);
1939 }
1940
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001941 // We should not have found a with context by walking the context chain
Steve Block1e0659c2011-05-24 12:43:12 +01001942 // (i.e., the static scope chain and runtime context chain do not agree).
1943 // A variable occurring in such a scope should have slot type LOOKUP and
1944 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01001945 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001946 cmp(FieldOperand(dst, HeapObject::kMapOffset),
1947 isolate()->factory()->with_context_map());
1948 Check(not_equal, "Variable resolved to with context.");
Steve Blockd0582a62009-12-15 09:54:21 +00001949 }
1950}
1951
1952
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001953void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1954 // Load the global or builtins object from the current context.
1955 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1956 // Load the global context from the global or builtins object.
1957 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1958 // Load the function from the global context.
1959 mov(function, Operand(function, Context::SlotOffset(index)));
1960}
1961
1962
1963void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
1964 Register map) {
1965 // Load the initial map. The global functions all have initial maps.
1966 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01001967 if (emit_debug_code()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001968 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00001969 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001970 jmp(&ok);
1971 bind(&fail);
1972 Abort("Global functions must have initial map");
1973 bind(&ok);
1974 }
1975}
1976
Steve Blockd0582a62009-12-15 09:54:21 +00001977
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001978// Store the value in register src in the safepoint register stack
1979// slot for register dst.
1980void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1981 mov(SafepointRegisterSlot(dst), src);
1982}
1983
1984
1985void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
1986 mov(SafepointRegisterSlot(dst), src);
1987}
1988
1989
1990void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1991 mov(dst, SafepointRegisterSlot(src));
1992}
1993
1994
1995Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1996 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1997}
1998
1999
Ben Murdochb0fe1622011-05-05 13:52:32 +01002000int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2001 // The registers are pushed starting with the lowest encoding,
2002 // which means that lowest encodings are furthest away from
2003 // the stack pointer.
2004 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2005 return kNumSafepointRegisters - reg_code - 1;
2006}
2007
2008
Steve Blocka7e24c12009-10-30 11:49:00 +00002009void MacroAssembler::Ret() {
2010 ret(0);
2011}
2012
2013
Steve Block1e0659c2011-05-24 12:43:12 +01002014void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2015 if (is_uint16(bytes_dropped)) {
2016 ret(bytes_dropped);
2017 } else {
2018 pop(scratch);
2019 add(Operand(esp), Immediate(bytes_dropped));
2020 push(scratch);
2021 ret(0);
2022 }
2023}
2024
2025
Leon Clarkee46be812010-01-19 14:06:41 +00002026void MacroAssembler::Drop(int stack_elements) {
2027 if (stack_elements > 0) {
2028 add(Operand(esp), Immediate(stack_elements * kPointerSize));
2029 }
2030}
2031
2032
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002033void MacroAssembler::Move(Register dst, Register src) {
2034 if (!dst.is(src)) {
2035 mov(dst, src);
2036 }
2037}
2038
2039
Leon Clarkee46be812010-01-19 14:06:41 +00002040void MacroAssembler::Move(Register dst, Handle<Object> value) {
2041 mov(dst, value);
2042}
2043
2044
Steve Blocka7e24c12009-10-30 11:49:00 +00002045void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2046 if (FLAG_native_code_counters && counter->Enabled()) {
2047 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2048 }
2049}
2050
2051
2052void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2053 ASSERT(value > 0);
2054 if (FLAG_native_code_counters && counter->Enabled()) {
2055 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2056 if (value == 1) {
2057 inc(operand);
2058 } else {
2059 add(operand, Immediate(value));
2060 }
2061 }
2062}
2063
2064
2065void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2066 ASSERT(value > 0);
2067 if (FLAG_native_code_counters && counter->Enabled()) {
2068 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2069 if (value == 1) {
2070 dec(operand);
2071 } else {
2072 sub(operand, Immediate(value));
2073 }
2074 }
2075}
2076
2077
Leon Clarked91b9f72010-01-27 17:25:45 +00002078void MacroAssembler::IncrementCounter(Condition cc,
2079 StatsCounter* counter,
2080 int value) {
2081 ASSERT(value > 0);
2082 if (FLAG_native_code_counters && counter->Enabled()) {
2083 Label skip;
2084 j(NegateCondition(cc), &skip);
2085 pushfd();
2086 IncrementCounter(counter, value);
2087 popfd();
2088 bind(&skip);
2089 }
2090}
2091
2092
2093void MacroAssembler::DecrementCounter(Condition cc,
2094 StatsCounter* counter,
2095 int value) {
2096 ASSERT(value > 0);
2097 if (FLAG_native_code_counters && counter->Enabled()) {
2098 Label skip;
2099 j(NegateCondition(cc), &skip);
2100 pushfd();
2101 DecrementCounter(counter, value);
2102 popfd();
2103 bind(&skip);
2104 }
2105}
2106
2107
Steve Blocka7e24c12009-10-30 11:49:00 +00002108void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +01002109 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +00002110}
2111
2112
Iain Merrick75681382010-08-19 15:07:18 +01002113void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002114 if (emit_debug_code()) {
2115 Factory* factory = isolate()->factory();
Iain Merrick75681382010-08-19 15:07:18 +01002116 Label ok;
2117 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002118 Immediate(factory->fixed_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002119 j(equal, &ok);
2120 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002121 Immediate(factory->fixed_double_array_map()));
2122 j(equal, &ok);
2123 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002124 Immediate(factory->fixed_cow_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002125 j(equal, &ok);
2126 Abort("JSObject with fast elements map has slow elements");
2127 bind(&ok);
2128 }
2129}
2130
2131
Steve Blocka7e24c12009-10-30 11:49:00 +00002132void MacroAssembler::Check(Condition cc, const char* msg) {
2133 Label L;
Ben Murdoch257744e2011-11-30 15:57:28 +00002134 j(cc, &L);
Steve Blocka7e24c12009-10-30 11:49:00 +00002135 Abort(msg);
2136 // will not return here
2137 bind(&L);
2138}
2139
2140
Steve Block6ded16b2010-05-10 14:33:55 +01002141void MacroAssembler::CheckStackAlignment() {
2142 int frame_alignment = OS::ActivationFrameAlignment();
2143 int frame_alignment_mask = frame_alignment - 1;
2144 if (frame_alignment > kPointerSize) {
2145 ASSERT(IsPowerOf2(frame_alignment));
2146 Label alignment_as_expected;
2147 test(esp, Immediate(frame_alignment_mask));
2148 j(zero, &alignment_as_expected);
2149 // Abort if stack is not aligned.
2150 int3();
2151 bind(&alignment_as_expected);
2152 }
2153}
2154
2155
Steve Blocka7e24c12009-10-30 11:49:00 +00002156void MacroAssembler::Abort(const char* msg) {
2157 // We want to pass the msg string like a smi to avoid GC
2158 // problems, however msg is not guaranteed to be aligned
2159 // properly. Instead, we pass an aligned pointer that is
2160 // a proper v8 smi, but also pass the alignment difference
2161 // from the real pointer as a smi.
2162 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2163 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2164 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2165#ifdef DEBUG
2166 if (msg != NULL) {
2167 RecordComment("Abort message: ");
2168 RecordComment(msg);
2169 }
2170#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002171 // Disable stub call restrictions to always allow calls to abort.
Ben Murdoch086aeea2011-05-13 15:57:08 +01002172 AllowStubCallsScope allow_scope(this, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002173
Steve Blocka7e24c12009-10-30 11:49:00 +00002174 push(eax);
2175 push(Immediate(p0));
2176 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2177 CallRuntime(Runtime::kAbort, 2);
2178 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00002179 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00002180}
2181
2182
Ben Murdoch257744e2011-11-30 15:57:28 +00002183void MacroAssembler::LoadInstanceDescriptors(Register map,
2184 Register descriptors) {
2185 mov(descriptors,
2186 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2187 Label not_smi;
2188 JumpIfNotSmi(descriptors, &not_smi);
2189 mov(descriptors, isolate()->factory()->empty_descriptor_array());
2190 bind(&not_smi);
Iain Merrick75681382010-08-19 15:07:18 +01002191}
2192
2193
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002194void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2195 Register scratch,
2196 int power) {
2197 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2198 HeapNumber::kExponentBits));
2199 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2200 movd(dst, Operand(scratch));
2201 psllq(dst, HeapNumber::kMantissaBits);
2202}
2203
2204
Andrei Popescu402d9372010-02-26 13:31:12 +00002205void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2206 Register instance_type,
2207 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002208 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002209 if (!scratch.is(instance_type)) {
2210 mov(scratch, instance_type);
2211 }
2212 and_(scratch,
2213 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2214 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2215 j(not_equal, failure);
2216}
2217
2218
Leon Clarked91b9f72010-01-27 17:25:45 +00002219void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2220 Register object2,
2221 Register scratch1,
2222 Register scratch2,
2223 Label* failure) {
2224 // Check that both objects are not smis.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002225 STATIC_ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002226 mov(scratch1, Operand(object1));
2227 and_(scratch1, Operand(object2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002228 JumpIfSmi(scratch1, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00002229
2230 // Load instance type for both strings.
2231 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2232 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2233 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2234 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2235
2236 // Check that both are flat ascii strings.
2237 const int kFlatAsciiStringMask =
2238 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2239 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2240 // Interleave bits from both instance types and compare them in one check.
2241 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2242 and_(scratch1, kFlatAsciiStringMask);
2243 and_(scratch2, kFlatAsciiStringMask);
2244 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2245 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2246 j(not_equal, failure);
2247}
2248
2249
Steve Block6ded16b2010-05-10 14:33:55 +01002250void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002251 int frame_alignment = OS::ActivationFrameAlignment();
2252 if (frame_alignment != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01002253 // Make stack end at alignment and make room for num_arguments words
2254 // and the original value of esp.
2255 mov(scratch, esp);
2256 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002257 ASSERT(IsPowerOf2(frame_alignment));
2258 and_(esp, -frame_alignment);
Steve Block6ded16b2010-05-10 14:33:55 +01002259 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2260 } else {
2261 sub(Operand(esp), Immediate(num_arguments * kPointerSize));
2262 }
2263}
2264
2265
2266void MacroAssembler::CallCFunction(ExternalReference function,
2267 int num_arguments) {
2268 // Trashing eax is ok as it will be the return value.
2269 mov(Operand(eax), Immediate(function));
2270 CallCFunction(eax, num_arguments);
2271}
2272
2273
2274void MacroAssembler::CallCFunction(Register function,
2275 int num_arguments) {
2276 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01002277 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01002278 CheckStackAlignment();
2279 }
2280
2281 call(Operand(function));
2282 if (OS::ActivationFrameAlignment() != 0) {
2283 mov(esp, Operand(esp, num_arguments * kPointerSize));
2284 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002285 add(Operand(esp), Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002286 }
2287}
2288
2289
Steve Blocka7e24c12009-10-30 11:49:00 +00002290CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002291 : address_(address),
2292 size_(size),
2293 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002294 // Create a new macro assembler pointing to the address of the code to patch.
2295 // The size is adjusted with kGap on order for the assembler to generate size
2296 // bytes of instructions without failing with buffer size constraints.
2297 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2298}
2299
2300
2301CodePatcher::~CodePatcher() {
2302 // Indicate that code has changed.
2303 CPU::FlushICache(address_, size_);
2304
2305 // Check that the code was patched as expected.
2306 ASSERT(masm_.pc_ == address_ + size_);
2307 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2308}
2309
2310
2311} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002312
2313#endif // V8_TARGET_ARCH_IA32