blob: fcae7a2fcf78c1a4c73efc80998531309cb8bf09 [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "debug.h"
35#include "runtime.h"
36#include "serialize.h"
37
38namespace v8 {
39namespace internal {
40
41// -------------------------------------------------------------------------
42// MacroAssembler implementation.
43
Ben Murdoch8b112d22011-06-08 16:22:53 +010044MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000046 generating_stub_(false),
Ben Murdoch592a9fc2012-03-05 11:04:45 +000047 allow_stub_calls_(true),
48 has_frame_(false) {
Ben Murdoch8b112d22011-06-08 16:22:53 +010049 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
51 isolate());
52 }
Steve Blocka7e24c12009-10-30 11:49:00 +000053}
54
55
Ben Murdoch592a9fc2012-03-05 11:04:45 +000056void MacroAssembler::InNewSpace(
57 Register object,
58 Register scratch,
59 Condition cc,
60 Label* condition_met,
61 Label::Distance condition_met_distance) {
62 ASSERT(cc == equal || cc == not_equal);
63 if (scratch.is(object)) {
64 and_(scratch, Immediate(~Page::kPageAlignmentMask));
65 } else {
66 mov(scratch, Immediate(~Page::kPageAlignmentMask));
67 and_(scratch, object);
Steve Block6ded16b2010-05-10 14:33:55 +010068 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +000069 // Check that we can use a test_b.
70 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
71 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
72 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
73 | (1 << MemoryChunk::IN_TO_SPACE);
74 // If non-zero, the page belongs to new-space.
75 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
76 static_cast<uint8_t>(mask));
77 j(cc, condition_met, condition_met_distance);
78}
Steve Block6ded16b2010-05-10 14:33:55 +010079
Steve Blocka7e24c12009-10-30 11:49:00 +000080
Ben Murdoch592a9fc2012-03-05 11:04:45 +000081void MacroAssembler::RememberedSetHelper(
82 Register object, // Only used for debug checks.
83 Register addr,
84 Register scratch,
85 SaveFPRegsMode save_fp,
86 MacroAssembler::RememberedSetFinalAction and_then) {
87 Label done;
88 if (FLAG_debug_code) {
89 Label ok;
90 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
91 int3();
92 bind(&ok);
93 }
94 // Load store buffer top.
95 ExternalReference store_buffer =
96 ExternalReference::store_buffer_top(isolate());
97 mov(scratch, Operand::StaticVariable(store_buffer));
98 // Store pointer to buffer.
99 mov(Operand(scratch, 0), addr);
100 // Increment buffer top.
101 add(scratch, Immediate(kPointerSize));
102 // Write back new top of buffer.
103 mov(Operand::StaticVariable(store_buffer), scratch);
104 // Call stub on end of buffer.
105 // Check for end of buffer.
106 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107 if (and_then == kReturnAtEnd) {
108 Label buffer_overflowed;
109 j(not_equal, &buffer_overflowed, Label::kNear);
110 ret(0);
111 bind(&buffer_overflowed);
112 } else {
113 ASSERT(and_then == kFallThroughAtEnd);
114 j(equal, &done, Label::kNear);
115 }
116 StoreBufferOverflowStub store_buffer_overflow =
117 StoreBufferOverflowStub(save_fp);
118 CallStub(&store_buffer_overflow);
119 if (and_then == kReturnAtEnd) {
120 ret(0);
121 } else {
122 ASSERT(and_then == kFallThroughAtEnd);
123 bind(&done);
124 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000125}
126
127
128void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
129 XMMRegister scratch_reg,
130 Register result_reg) {
131 Label done;
132 ExternalReference zero_ref = ExternalReference::address_of_zero();
133 movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
134 Set(result_reg, Immediate(0));
135 ucomisd(input_reg, scratch_reg);
136 j(below, &done, Label::kNear);
137 ExternalReference half_ref = ExternalReference::address_of_one_half();
138 movdbl(scratch_reg, Operand::StaticVariable(half_ref));
139 addsd(scratch_reg, input_reg);
140 cvttsd2si(result_reg, Operand(scratch_reg));
141 test(result_reg, Immediate(0xFFFFFF00));
142 j(zero, &done, Label::kNear);
143 Set(result_reg, Immediate(255));
144 bind(&done);
145}
146
147
148void MacroAssembler::ClampUint8(Register reg) {
149 Label done;
150 test(reg, Immediate(0xFFFFFF00));
151 j(zero, &done, Label::kNear);
152 setcc(negative, reg); // 1 if negative, 0 if positive.
153 dec_b(reg); // 0 if negative, 255 if positive.
154 bind(&done);
155}
156
157
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000158void MacroAssembler::RecordWriteArray(Register object,
159 Register value,
160 Register index,
161 SaveFPRegsMode save_fp,
162 RememberedSetAction remembered_set_action,
163 SmiCheck smi_check) {
164 // First, check if a write barrier is even needed. The tests below
165 // catch stores of Smis.
166 Label done;
167
168 // Skip barrier if writing a smi.
169 if (smi_check == INLINE_SMI_CHECK) {
170 ASSERT_EQ(0, kSmiTag);
171 test(value, Immediate(kSmiTagMask));
172 j(zero, &done);
173 }
174
175 // Array access: calculate the destination address in the same manner as
176 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
177 // into an array of words.
178 Register dst = index;
179 lea(dst, Operand(object, index, times_half_pointer_size,
180 FixedArray::kHeaderSize - kHeapObjectTag));
181
182 RecordWrite(
183 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
184
185 bind(&done);
186
187 // Clobber clobbered input registers when running with the debug-code flag
188 // turned on to provoke errors.
189 if (emit_debug_code()) {
190 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
191 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000192 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000193}
194
195
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000196void MacroAssembler::RecordWriteField(
197 Register object,
198 int offset,
199 Register value,
200 Register dst,
201 SaveFPRegsMode save_fp,
202 RememberedSetAction remembered_set_action,
203 SmiCheck smi_check) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 // First, check if a write barrier is even needed. The tests below
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000205 // catch stores of Smis.
Ben Murdoch257744e2011-11-30 15:57:28 +0000206 Label done;
Steve Blocka7e24c12009-10-30 11:49:00 +0000207
208 // Skip barrier if writing a smi.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000209 if (smi_check == INLINE_SMI_CHECK) {
210 JumpIfSmi(value, &done, Label::kNear);
Steve Blocka7e24c12009-10-30 11:49:00 +0000211 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000212
213 // Although the object register is tagged, the offset is relative to the start
214 // of the object, so so offset must be a multiple of kPointerSize.
215 ASSERT(IsAligned(offset, kPointerSize));
216
217 lea(dst, FieldOperand(object, offset));
218 if (emit_debug_code()) {
219 Label ok;
220 test_b(dst, (1 << kPointerSizeLog2) - 1);
221 j(zero, &ok, Label::kNear);
222 int3();
223 bind(&ok);
224 }
225
226 RecordWrite(
227 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
Steve Blocka7e24c12009-10-30 11:49:00 +0000228
229 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000230
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000231 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000232 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100233 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100234 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000235 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
Leon Clarke4515c472010-02-03 11:58:03 +0000236 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000237}
238
239
Steve Block8defd9f2010-07-08 12:39:36 +0100240void MacroAssembler::RecordWrite(Register object,
241 Register address,
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000242 Register value,
243 SaveFPRegsMode fp_mode,
244 RememberedSetAction remembered_set_action,
245 SmiCheck smi_check) {
246 ASSERT(!object.is(value));
247 ASSERT(!object.is(address));
248 ASSERT(!value.is(address));
249 if (emit_debug_code()) {
250 AbortIfSmi(object);
251 }
252
253 if (remembered_set_action == OMIT_REMEMBERED_SET &&
254 !FLAG_incremental_marking) {
255 return;
256 }
257
258 if (FLAG_debug_code) {
259 Label ok;
260 cmp(value, Operand(address, 0));
261 j(equal, &ok, Label::kNear);
262 int3();
263 bind(&ok);
264 }
265
Steve Block8defd9f2010-07-08 12:39:36 +0100266 // First, check if a write barrier is even needed. The tests below
267 // catch stores of Smis and stores into young gen.
268 Label done;
269
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000270 if (smi_check == INLINE_SMI_CHECK) {
271 // Skip barrier if writing a smi.
272 JumpIfSmi(value, &done, Label::kNear);
273 }
Steve Block8defd9f2010-07-08 12:39:36 +0100274
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000275 CheckPageFlag(value,
276 value, // Used as scratch.
277 MemoryChunk::kPointersToHereAreInterestingMask,
278 zero,
279 &done,
280 Label::kNear);
281 CheckPageFlag(object,
282 value, // Used as scratch.
283 MemoryChunk::kPointersFromHereAreInterestingMask,
284 zero,
285 &done,
286 Label::kNear);
Steve Block8defd9f2010-07-08 12:39:36 +0100287
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000288 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
289 CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +0100290
291 bind(&done);
292
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000293 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100294 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100295 if (emit_debug_code()) {
Steve Block8defd9f2010-07-08 12:39:36 +0100296 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
297 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
298 }
299}
300
301
Steve Blocka7e24c12009-10-30 11:49:00 +0000302#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +0000303void MacroAssembler::DebugBreak() {
304 Set(eax, Immediate(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100305 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
Andrei Popescu402d9372010-02-26 13:31:12 +0000306 CEntryStub ces(1);
307 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
308}
Steve Blocka7e24c12009-10-30 11:49:00 +0000309#endif
310
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100311
Steve Blocka7e24c12009-10-30 11:49:00 +0000312void MacroAssembler::Set(Register dst, const Immediate& x) {
313 if (x.is_zero()) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000314 xor_(dst, dst); // Shorter than mov.
Steve Blocka7e24c12009-10-30 11:49:00 +0000315 } else {
316 mov(dst, x);
317 }
318}
319
320
321void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
322 mov(dst, x);
323}
324
325
Steve Block053d10c2011-06-13 19:13:29 +0100326bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
327 static const int kMaxImmediateBits = 17;
328 if (x.rmode_ != RelocInfo::NONE) return false;
329 return !is_intn(x.x_, kMaxImmediateBits);
330}
331
332
333void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
334 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
335 Set(dst, Immediate(x.x_ ^ jit_cookie()));
336 xor_(dst, jit_cookie());
337 } else {
338 Set(dst, x);
339 }
340}
341
342
343void MacroAssembler::SafePush(const Immediate& x) {
344 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
345 push(Immediate(x.x_ ^ jit_cookie()));
346 xor_(Operand(esp, 0), Immediate(jit_cookie()));
347 } else {
348 push(x);
349 }
350}
351
352
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000353void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
354 // see ROOT_ACCESSOR macro in factory.h
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000355 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000356 cmp(with, value);
357}
358
359
Steve Blocka7e24c12009-10-30 11:49:00 +0000360void MacroAssembler::CmpObjectType(Register heap_object,
361 InstanceType type,
362 Register map) {
363 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
364 CmpInstanceType(map, type);
365}
366
367
368void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
369 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
370 static_cast<int8_t>(type));
371}
372
373
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000374void MacroAssembler::CheckFastElements(Register map,
375 Label* fail,
376 Label::Distance distance) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000377 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
378 STATIC_ASSERT(FAST_ELEMENTS == 1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000379 cmpb(FieldOperand(map, Map::kBitField2Offset),
380 Map::kMaximumBitField2FastElementValue);
381 j(above, fail, distance);
382}
383
384
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000385void MacroAssembler::CheckFastObjectElements(Register map,
386 Label* fail,
387 Label::Distance distance) {
388 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
389 STATIC_ASSERT(FAST_ELEMENTS == 1);
390 cmpb(FieldOperand(map, Map::kBitField2Offset),
391 Map::kMaximumBitField2FastSmiOnlyElementValue);
392 j(below_equal, fail, distance);
393 cmpb(FieldOperand(map, Map::kBitField2Offset),
394 Map::kMaximumBitField2FastElementValue);
395 j(above, fail, distance);
396}
397
398
399void MacroAssembler::CheckFastSmiOnlyElements(Register map,
400 Label* fail,
401 Label::Distance distance) {
402 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
403 cmpb(FieldOperand(map, Map::kBitField2Offset),
404 Map::kMaximumBitField2FastSmiOnlyElementValue);
405 j(above, fail, distance);
406}
407
408
409void MacroAssembler::StoreNumberToDoubleElements(
410 Register maybe_number,
411 Register elements,
412 Register key,
413 Register scratch1,
414 XMMRegister scratch2,
415 Label* fail,
416 bool specialize_for_processor) {
417 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
418 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
419
420 CheckMap(maybe_number,
421 isolate()->factory()->heap_number_map(),
422 fail,
423 DONT_DO_SMI_CHECK);
424
425 // Double value, canonicalize NaN.
426 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
427 cmp(FieldOperand(maybe_number, offset),
428 Immediate(kNaNOrInfinityLowerBoundUpper32));
429 j(greater_equal, &maybe_nan, Label::kNear);
430
431 bind(&not_nan);
432 ExternalReference canonical_nan_reference =
433 ExternalReference::address_of_canonical_non_hole_nan();
434 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
435 CpuFeatures::Scope use_sse2(SSE2);
436 movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
437 bind(&have_double_value);
438 movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
439 scratch2);
440 } else {
441 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
442 bind(&have_double_value);
443 fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
444 }
445 jmp(&done);
446
447 bind(&maybe_nan);
448 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
449 // it's an Infinity, and the non-NaN code path applies.
450 j(greater, &is_nan, Label::kNear);
451 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
452 j(zero, &not_nan);
453 bind(&is_nan);
454 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
455 CpuFeatures::Scope use_sse2(SSE2);
456 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
457 } else {
458 fld_d(Operand::StaticVariable(canonical_nan_reference));
459 }
460 jmp(&have_double_value, Label::kNear);
461
462 bind(&smi_value);
463 // Value is a smi. Convert to a double and store.
464 // Preserve original value.
465 mov(scratch1, maybe_number);
466 SmiUntag(scratch1);
467 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
468 CpuFeatures::Scope fscope(SSE2);
469 cvtsi2sd(scratch2, scratch1);
470 movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
471 scratch2);
472 } else {
473 push(scratch1);
474 fild_s(Operand(esp, 0));
475 pop(scratch1);
476 fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
477 }
478 bind(&done);
479}
480
481
Andrei Popescu31002712010-02-23 13:46:05 +0000482void MacroAssembler::CheckMap(Register obj,
483 Handle<Map> map,
484 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +0000485 SmiCheckType smi_check_type) {
486 if (smi_check_type == DO_SMI_CHECK) {
487 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +0000488 }
489 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
490 j(not_equal, fail);
491}
492
493
Ben Murdoch257744e2011-11-30 15:57:28 +0000494void MacroAssembler::DispatchMap(Register obj,
495 Handle<Map> map,
496 Handle<Code> success,
497 SmiCheckType smi_check_type) {
498 Label fail;
499 if (smi_check_type == DO_SMI_CHECK) {
500 JumpIfSmi(obj, &fail);
501 }
502 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
503 j(equal, success);
504
505 bind(&fail);
506}
507
508
Leon Clarkee46be812010-01-19 14:06:41 +0000509Condition MacroAssembler::IsObjectStringType(Register heap_object,
510 Register map,
511 Register instance_type) {
512 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
513 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000514 STATIC_ASSERT(kNotStringTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +0000515 test(instance_type, Immediate(kIsNotStringMask));
516 return zero;
517}
518
519
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100520void MacroAssembler::IsObjectJSObjectType(Register heap_object,
521 Register map,
522 Register scratch,
523 Label* fail) {
524 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
525 IsInstanceJSObjectType(map, scratch, fail);
526}
527
528
529void MacroAssembler::IsInstanceJSObjectType(Register map,
530 Register scratch,
531 Label* fail) {
532 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000533 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000534 cmp(scratch,
535 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100536 j(above, fail);
537}
538
539
Steve Blocka7e24c12009-10-30 11:49:00 +0000540void MacroAssembler::FCmp() {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100541 if (CpuFeatures::IsSupported(CMOV)) {
Steve Block3ce2e202009-11-05 08:53:23 +0000542 fucomip();
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000543 fstp(0);
Steve Block3ce2e202009-11-05 08:53:23 +0000544 } else {
545 fucompp();
546 push(eax);
547 fnstsw_ax();
548 sahf();
549 pop(eax);
550 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000551}
552
553
Steve Block6ded16b2010-05-10 14:33:55 +0100554void MacroAssembler::AbortIfNotNumber(Register object) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000555 Label ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000556 JumpIfSmi(object, &ok);
Andrei Popescu402d9372010-02-26 13:31:12 +0000557 cmp(FieldOperand(object, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +0100558 isolate()->factory()->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +0100559 Assert(equal, "Operand not a number");
Andrei Popescu402d9372010-02-26 13:31:12 +0000560 bind(&ok);
561}
562
563
Steve Block6ded16b2010-05-10 14:33:55 +0100564void MacroAssembler::AbortIfNotSmi(Register object) {
565 test(object, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +0100566 Assert(equal, "Operand is not a smi");
567}
568
569
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100570void MacroAssembler::AbortIfNotString(Register object) {
571 test(object, Immediate(kSmiTagMask));
572 Assert(not_equal, "Operand is not a string");
573 push(object);
574 mov(object, FieldOperand(object, HeapObject::kMapOffset));
575 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
576 pop(object);
577 Assert(below, "Operand is not a string");
578}
579
580
Iain Merrick75681382010-08-19 15:07:18 +0100581void MacroAssembler::AbortIfSmi(Register object) {
582 test(object, Immediate(kSmiTagMask));
583 Assert(not_equal, "Operand is a smi");
Steve Block6ded16b2010-05-10 14:33:55 +0100584}
585
586
Steve Blocka7e24c12009-10-30 11:49:00 +0000587void MacroAssembler::EnterFrame(StackFrame::Type type) {
588 push(ebp);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000589 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000590 push(esi);
591 push(Immediate(Smi::FromInt(type)));
592 push(Immediate(CodeObject()));
Steve Block44f0eee2011-05-26 01:26:41 +0100593 if (emit_debug_code()) {
594 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000595 Check(not_equal, "code object not properly patched");
596 }
597}
598
599
600void MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Block44f0eee2011-05-26 01:26:41 +0100601 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000602 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
603 Immediate(Smi::FromInt(type)));
604 Check(equal, "stack frame types must match");
605 }
606 leave();
607}
608
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100609
610void MacroAssembler::EnterExitFramePrologue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000611 // Setup the frame structure on the stack.
612 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
613 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
614 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
615 push(ebp);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000616 mov(ebp, esp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000617
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100618 // Reserve room for entry stack pointer and push the code object.
Steve Blocka7e24c12009-10-30 11:49:00 +0000619 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +0000620 push(Immediate(0)); // Saved entry sp, patched before call.
621 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
Steve Blocka7e24c12009-10-30 11:49:00 +0000622
623 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +0000624 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100625 isolate());
Ben Murdoch589d6972011-11-30 16:04:58 +0000626 ExternalReference context_address(Isolate::kContextAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100627 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
629 mov(Operand::StaticVariable(context_address), esi);
Steve Blockd0582a62009-12-15 09:54:21 +0000630}
Steve Blocka7e24c12009-10-30 11:49:00 +0000631
Steve Blocka7e24c12009-10-30 11:49:00 +0000632
Ben Murdochb0fe1622011-05-05 13:52:32 +0100633void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
634 // Optionally save all XMM registers.
635 if (save_doubles) {
636 CpuFeatures::Scope scope(SSE2);
637 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000638 sub(esp, Immediate(space));
Steve Block1e0659c2011-05-24 12:43:12 +0100639 const int offset = -2 * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100640 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
641 XMMRegister reg = XMMRegister::from_code(i);
642 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
643 }
644 } else {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000645 sub(esp, Immediate(argc * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100646 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000647
648 // Get the required frame alignment for the OS.
Steve Block44f0eee2011-05-26 01:26:41 +0100649 const int kFrameAlignment = OS::ActivationFrameAlignment();
Steve Blocka7e24c12009-10-30 11:49:00 +0000650 if (kFrameAlignment > 0) {
651 ASSERT(IsPowerOf2(kFrameAlignment));
652 and_(esp, -kFrameAlignment);
653 }
654
655 // Patch the saved entry sp.
656 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
657}
658
659
Ben Murdochb0fe1622011-05-05 13:52:32 +0100660void MacroAssembler::EnterExitFrame(bool save_doubles) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100661 EnterExitFramePrologue();
Steve Blockd0582a62009-12-15 09:54:21 +0000662
663 // Setup argc and argv in callee-saved registers.
664 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000665 mov(edi, eax);
Steve Blockd0582a62009-12-15 09:54:21 +0000666 lea(esi, Operand(ebp, eax, times_4, offset));
667
Steve Block44f0eee2011-05-26 01:26:41 +0100668 // Reserve space for argc, argv and isolate.
669 EnterExitFrameEpilogue(3, save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000670}
671
672
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800673void MacroAssembler::EnterApiExitFrame(int argc) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100674 EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100675 EnterExitFrameEpilogue(argc, false);
Steve Blockd0582a62009-12-15 09:54:21 +0000676}
677
678
Ben Murdochb0fe1622011-05-05 13:52:32 +0100679void MacroAssembler::LeaveExitFrame(bool save_doubles) {
680 // Optionally restore all XMM registers.
681 if (save_doubles) {
682 CpuFeatures::Scope scope(SSE2);
Steve Block1e0659c2011-05-24 12:43:12 +0100683 const int offset = -2 * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100684 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
685 XMMRegister reg = XMMRegister::from_code(i);
686 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
687 }
688 }
689
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 // Get the return address from the stack and restore the frame pointer.
691 mov(ecx, Operand(ebp, 1 * kPointerSize));
692 mov(ebp, Operand(ebp, 0 * kPointerSize));
693
694 // Pop the arguments and the receiver from the caller stack.
695 lea(esp, Operand(esi, 1 * kPointerSize));
696
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800697 // Push the return address to get ready to return.
698 push(ecx);
699
700 LeaveExitFrameEpilogue();
701}
702
703void MacroAssembler::LeaveExitFrameEpilogue() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000704 // Restore current context from top and clear it in debug mode.
Ben Murdoch589d6972011-11-30 16:04:58 +0000705 ExternalReference context_address(Isolate::kContextAddress, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000706 mov(esi, Operand::StaticVariable(context_address));
707#ifdef DEBUG
708 mov(Operand::StaticVariable(context_address), Immediate(0));
709#endif
710
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 // Clear the top frame.
Ben Murdoch589d6972011-11-30 16:04:58 +0000712 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100713 isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
715}
716
717
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800718void MacroAssembler::LeaveApiExitFrame() {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000719 mov(esp, ebp);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800720 pop(ebp);
721
722 LeaveExitFrameEpilogue();
723}
724
725
Steve Blocka7e24c12009-10-30 11:49:00 +0000726void MacroAssembler::PushTryHandler(CodeLocation try_location,
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000727 HandlerType type,
728 int handler_index) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000729 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000730 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
731 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000732 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
733 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
734 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
735 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
736
737 // We will build up the handler from the bottom by pushing on the stack.
738 // First compute the state and push the frame pointer and context.
739 unsigned state = StackHandler::OffsetField::encode(handler_index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000740 if (try_location == IN_JAVASCRIPT) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000741 push(ebp);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000742 push(esi);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000743 state |= (type == TRY_CATCH_HANDLER)
744 ? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
745 : StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 } else {
747 ASSERT(try_location == IN_JS_ENTRY);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000748 // The frame pointer does not point to a JS frame so we save NULL for
749 // ebp. We expect the code throwing an exception to check ebp before
750 // dereferencing it to restore the context.
Steve Blocka7e24c12009-10-30 11:49:00 +0000751 push(Immediate(0)); // NULL frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000752 push(Immediate(Smi::FromInt(0))); // No context.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000753 state |= StackHandler::KindField::encode(StackHandler::ENTRY);
Steve Blocka7e24c12009-10-30 11:49:00 +0000754 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000755
756 // Push the state and the code object.
757 push(Immediate(state));
758 push(CodeObject());
759
760 // Link the current handler as the next handler.
761 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
762 push(Operand::StaticVariable(handler_address));
763 // Set this new handler as the current one.
764 mov(Operand::StaticVariable(handler_address), esp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000765}
766
767
Leon Clarkee46be812010-01-19 14:06:41 +0000768void MacroAssembler::PopTryHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000769 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000770 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
771 pop(Operand::StaticVariable(handler_address));
772 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
773}
774
775
776void MacroAssembler::JumpToHandlerEntry() {
777 // Compute the handler entry address and jump to it. The handler table is
778 // a fixed array of (smi-tagged) code offsets.
779 // eax = exception, edi = code object, edx = state.
780 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
781 shr(edx, StackHandler::kKindWidth);
782 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
783 SmiUntag(edx);
784 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
785 jmp(edi);
Leon Clarkee46be812010-01-19 14:06:41 +0000786}
787
788
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100789void MacroAssembler::Throw(Register value) {
790 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000791 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
792 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000793 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
794 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
795 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
796 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
797
798 // The exception is expected in eax.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100799 if (!value.is(eax)) {
800 mov(eax, value);
801 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000802 // Drop the stack pointer to the top of the top handler.
803 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100804 mov(esp, Operand::StaticVariable(handler_address));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000805 // Restore the next handler.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100806 pop(Operand::StaticVariable(handler_address));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000807
808 // Remove the code object and state, compute the handler address in edi.
809 pop(edi); // Code object.
810 pop(edx); // Index and state.
811
812 // Restore the context and frame pointer.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000813 pop(esi); // Context.
814 pop(ebp); // Frame pointer.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100815
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000816 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000817 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
818 // ebp or esi.
Ben Murdoch257744e2011-11-30 15:57:28 +0000819 Label skip;
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000820 test(esi, esi);
821 j(zero, &skip, Label::kNear);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000822 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100823 bind(&skip);
824
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000825 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100826}
827
828
829void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
830 Register value) {
831 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000832 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
833 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000834 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
835 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
836 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
837 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100838
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000839 // The exception is expected in eax.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100840 if (type == OUT_OF_MEMORY) {
841 // Set external caught exception to false.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000842 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
843 isolate());
844 mov(Operand::StaticVariable(external_caught), Immediate(false));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100845
846 // Set pending exception and eax to out of memory exception.
Ben Murdoch589d6972011-11-30 16:04:58 +0000847 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Steve Block44f0eee2011-05-26 01:26:41 +0100848 isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100849 mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
850 mov(Operand::StaticVariable(pending_exception), eax);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000851 } else if (!value.is(eax)) {
852 mov(eax, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100853 }
854
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000855 // Drop the stack pointer to the top of the top stack handler.
856 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
857 mov(esp, Operand::StaticVariable(handler_address));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100858
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000859 // Unwind the handlers until the top ENTRY handler is found.
860 Label fetch_next, check_kind;
861 jmp(&check_kind, Label::kNear);
862 bind(&fetch_next);
863 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
864
865 bind(&check_kind);
866 STATIC_ASSERT(StackHandler::ENTRY == 0);
867 test(Operand(esp, StackHandlerConstants::kStateOffset),
868 Immediate(StackHandler::KindField::kMask));
869 j(not_zero, &fetch_next);
870
871 // Set the top handler address to next handler past the top ENTRY handler.
872 pop(Operand::StaticVariable(handler_address));
873
874 // Remove the code object and state, compute the handler address in edi.
875 pop(edi); // Code object.
876 pop(edx); // Index and state.
877
878 // Clear the context pointer and frame pointer (0 was saved in the handler).
879 pop(esi);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100880 pop(ebp);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100881
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000882 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100883}
884
885
Steve Blocka7e24c12009-10-30 11:49:00 +0000886void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
887 Register scratch,
888 Label* miss) {
889 Label same_contexts;
890
891 ASSERT(!holder_reg.is(scratch));
892
893 // Load current lexical context from the stack frame.
894 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
895
896 // When generating debug code, make sure the lexical context is set.
Steve Block44f0eee2011-05-26 01:26:41 +0100897 if (emit_debug_code()) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000898 cmp(scratch, Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000899 Check(not_equal, "we should not have an empty lexical context");
900 }
901 // Load the global context of the current context.
902 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
903 mov(scratch, FieldOperand(scratch, offset));
904 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
905
906 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +0100907 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000908 push(scratch);
909 // Read the first word and compare to global_context_map.
910 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100911 cmp(scratch, isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +0000912 Check(equal, "JSGlobalObject::global_context should be a global context.");
913 pop(scratch);
914 }
915
916 // Check if both contexts are the same.
917 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000918 j(equal, &same_contexts);
Steve Blocka7e24c12009-10-30 11:49:00 +0000919
920 // Compare security tokens, save holder_reg on the stack so we can use it
921 // as a temporary register.
922 //
923 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
924 push(holder_reg);
925 // Check that the security token in the calling global object is
926 // compatible with the security token in the receiving global
927 // object.
928 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
929
930 // Check the context is a global context.
Steve Block44f0eee2011-05-26 01:26:41 +0100931 if (emit_debug_code()) {
932 cmp(holder_reg, isolate()->factory()->null_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000933 Check(not_equal, "JSGlobalProxy::context() should not be null.");
934
935 push(holder_reg);
936 // Read the first word and compare to global_context_map(),
937 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100938 cmp(holder_reg, isolate()->factory()->global_context_map());
Steve Blocka7e24c12009-10-30 11:49:00 +0000939 Check(equal, "JSGlobalObject::global_context should be a global context.");
940 pop(holder_reg);
941 }
942
943 int token_offset = Context::kHeaderSize +
944 Context::SECURITY_TOKEN_INDEX * kPointerSize;
945 mov(scratch, FieldOperand(scratch, token_offset));
946 cmp(scratch, FieldOperand(holder_reg, token_offset));
947 pop(holder_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +0000948 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000949
950 bind(&same_contexts);
951}
952
953
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000954void MacroAssembler::LoadFromNumberDictionary(Label* miss,
955 Register elements,
956 Register key,
957 Register r0,
958 Register r1,
959 Register r2,
960 Register result) {
961 // Register use:
962 //
963 // elements - holds the slow-case elements of the receiver and is unchanged.
964 //
965 // key - holds the smi key on entry and is unchanged.
966 //
967 // Scratch registers:
968 //
969 // r0 - holds the untagged key on entry and holds the hash once computed.
970 //
971 // r1 - used to hold the capacity mask of the dictionary
972 //
973 // r2 - used for the index into the dictionary.
974 //
975 // result - holds the result on exit if the load succeeds and we fall through.
976
977 Label done;
978
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000979 // Compute the hash code from the untagged key. This must be kept in sync
980 // with ComputeIntegerHash in utils.h.
981 //
982 // hash = ~hash + (hash << 15);
983 mov(r1, r0);
984 not_(r0);
985 shl(r1, 15);
986 add(r0, r1);
987 // hash = hash ^ (hash >> 12);
988 mov(r1, r0);
989 shr(r1, 12);
990 xor_(r0, r1);
991 // hash = hash + (hash << 2);
992 lea(r0, Operand(r0, r0, times_4, 0));
993 // hash = hash ^ (hash >> 4);
994 mov(r1, r0);
995 shr(r1, 4);
996 xor_(r0, r1);
997 // hash = hash * 2057;
998 imul(r0, r0, 2057);
999 // hash = hash ^ (hash >> 16);
1000 mov(r1, r0);
1001 shr(r1, 16);
1002 xor_(r0, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001003
1004 // Compute capacity mask.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001005 mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001006 shr(r1, kSmiTagSize); // convert smi to int
1007 dec(r1);
1008
1009 // Generate an unrolled loop that performs a few probes before giving up.
1010 const int kProbes = 4;
1011 for (int i = 0; i < kProbes; i++) {
1012 // Use r2 for index calculations and keep the hash intact in r0.
1013 mov(r2, r0);
1014 // Compute the masked index: (hash + i + i * i) & mask.
1015 if (i > 0) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001016 add(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001017 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001018 and_(r2, r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001019
1020 // Scale the index by multiplying by the entry size.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001021 ASSERT(NumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001022 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1023
1024 // Check if the key matches.
1025 cmp(key, FieldOperand(elements,
1026 r2,
1027 times_pointer_size,
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001028 NumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001029 if (i != (kProbes - 1)) {
1030 j(equal, &done);
1031 } else {
1032 j(not_equal, miss);
1033 }
1034 }
1035
1036 bind(&done);
1037 // Check that the value is a normal propety.
1038 const int kDetailsOffset =
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001039 NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001040 ASSERT_EQ(NORMAL, 0);
1041 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00001042 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001043 j(not_zero, miss);
1044
1045 // Get the value at the masked, scaled index.
1046 const int kValueOffset =
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001047 NumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001048 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1049}
1050
1051
Steve Blocka7e24c12009-10-30 11:49:00 +00001052void MacroAssembler::LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001053 Register scratch,
1054 AllocationFlags flags) {
1055 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01001056 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001057
1058 // Just return if allocation top is already known.
1059 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1060 // No use of scratch if allocation top is provided.
1061 ASSERT(scratch.is(no_reg));
1062#ifdef DEBUG
1063 // Assert that result actually contains top on entry.
1064 cmp(result, Operand::StaticVariable(new_space_allocation_top));
1065 Check(equal, "Unexpected allocation top");
1066#endif
1067 return;
1068 }
1069
1070 // Move address of new object to result. Use scratch register if available.
1071 if (scratch.is(no_reg)) {
1072 mov(result, Operand::StaticVariable(new_space_allocation_top));
1073 } else {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001074 mov(scratch, Immediate(new_space_allocation_top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001075 mov(result, Operand(scratch, 0));
1076 }
1077}
1078
1079
1080void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1081 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01001082 if (emit_debug_code()) {
Steve Blockd0582a62009-12-15 09:54:21 +00001083 test(result_end, Immediate(kObjectAlignmentMask));
1084 Check(zero, "Unaligned allocation in new space");
1085 }
1086
Steve Blocka7e24c12009-10-30 11:49:00 +00001087 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01001088 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001089
1090 // Update new top. Use scratch if available.
1091 if (scratch.is(no_reg)) {
1092 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
1093 } else {
1094 mov(Operand(scratch, 0), result_end);
1095 }
1096}
1097
1098
1099void MacroAssembler::AllocateInNewSpace(int object_size,
1100 Register result,
1101 Register result_end,
1102 Register scratch,
1103 Label* gc_required,
1104 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001105 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001106 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001107 // Trash the registers to simulate an allocation failure.
1108 mov(result, Immediate(0x7091));
1109 if (result_end.is_valid()) {
1110 mov(result_end, Immediate(0x7191));
1111 }
1112 if (scratch.is_valid()) {
1113 mov(scratch, Immediate(0x7291));
1114 }
1115 }
1116 jmp(gc_required);
1117 return;
1118 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001119 ASSERT(!result.is(result_end));
1120
1121 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001122 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001123
Ben Murdochbb769b22010-08-11 14:56:33 +01001124 Register top_reg = result_end.is_valid() ? result_end : result;
1125
Steve Blocka7e24c12009-10-30 11:49:00 +00001126 // Calculate new top and bail out if new space is exhausted.
1127 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001128 ExternalReference::new_space_allocation_limit_address(isolate());
Ben Murdochbb769b22010-08-11 14:56:33 +01001129
Steve Block1e0659c2011-05-24 12:43:12 +01001130 if (!top_reg.is(result)) {
1131 mov(top_reg, result);
Ben Murdochbb769b22010-08-11 14:56:33 +01001132 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001133 add(top_reg, Immediate(object_size));
Ben Murdoch257744e2011-11-30 15:57:28 +00001134 j(carry, gc_required);
Ben Murdochbb769b22010-08-11 14:56:33 +01001135 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001136 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001137
Leon Clarkee46be812010-01-19 14:06:41 +00001138 // Update allocation top.
Ben Murdochbb769b22010-08-11 14:56:33 +01001139 UpdateAllocationTopHelper(top_reg, scratch);
1140
1141 // Tag result if requested.
1142 if (top_reg.is(result)) {
1143 if ((flags & TAG_OBJECT) != 0) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001144 sub(result, Immediate(object_size - kHeapObjectTag));
Ben Murdochbb769b22010-08-11 14:56:33 +01001145 } else {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001146 sub(result, Immediate(object_size));
Ben Murdochbb769b22010-08-11 14:56:33 +01001147 }
1148 } else if ((flags & TAG_OBJECT) != 0) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001149 add(result, Immediate(kHeapObjectTag));
Ben Murdochbb769b22010-08-11 14:56:33 +01001150 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001151}
1152
1153
1154void MacroAssembler::AllocateInNewSpace(int header_size,
1155 ScaleFactor element_size,
1156 Register element_count,
1157 Register result,
1158 Register result_end,
1159 Register scratch,
1160 Label* gc_required,
1161 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001162 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001163 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001164 // Trash the registers to simulate an allocation failure.
1165 mov(result, Immediate(0x7091));
1166 mov(result_end, Immediate(0x7191));
1167 if (scratch.is_valid()) {
1168 mov(scratch, Immediate(0x7291));
1169 }
1170 // Register element_count is not modified by the function.
1171 }
1172 jmp(gc_required);
1173 return;
1174 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001175 ASSERT(!result.is(result_end));
1176
1177 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001178 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001179
1180 // Calculate new top and bail out if new space is exhausted.
1181 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001182 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01001183
1184 // We assume that element_count*element_size + header_size does not
1185 // overflow.
1186 lea(result_end, Operand(element_count, element_size, header_size));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001187 add(result_end, result);
Steve Block1e0659c2011-05-24 12:43:12 +01001188 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001189 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1190 j(above, gc_required);
1191
Steve Blocka7e24c12009-10-30 11:49:00 +00001192 // Tag result if requested.
1193 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00001194 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001195 }
Leon Clarkee46be812010-01-19 14:06:41 +00001196
1197 // Update allocation top.
1198 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001199}
1200
1201
1202void MacroAssembler::AllocateInNewSpace(Register object_size,
1203 Register result,
1204 Register result_end,
1205 Register scratch,
1206 Label* gc_required,
1207 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001208 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001209 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001210 // Trash the registers to simulate an allocation failure.
1211 mov(result, Immediate(0x7091));
1212 mov(result_end, Immediate(0x7191));
1213 if (scratch.is_valid()) {
1214 mov(scratch, Immediate(0x7291));
1215 }
1216 // object_size is left unchanged by this function.
1217 }
1218 jmp(gc_required);
1219 return;
1220 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001221 ASSERT(!result.is(result_end));
1222
1223 // Load address of new object into result.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001224 LoadAllocationTopHelper(result, scratch, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001225
1226 // Calculate new top and bail out if new space is exhausted.
1227 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001228 ExternalReference::new_space_allocation_limit_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001229 if (!object_size.is(result_end)) {
1230 mov(result_end, object_size);
1231 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001232 add(result_end, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001233 j(carry, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001234 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00001235 j(above, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00001236
Steve Blocka7e24c12009-10-30 11:49:00 +00001237 // Tag result if requested.
1238 if ((flags & TAG_OBJECT) != 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00001239 lea(result, Operand(result, kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001240 }
Leon Clarkee46be812010-01-19 14:06:41 +00001241
1242 // Update allocation top.
1243 UpdateAllocationTopHelper(result_end, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00001244}
1245
1246
1247void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1248 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01001249 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001250
1251 // Make sure the object has no tag before resetting top.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001252 and_(object, Immediate(~kHeapObjectTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001253#ifdef DEBUG
1254 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1255 Check(below, "Undo allocation of non allocated memory");
1256#endif
1257 mov(Operand::StaticVariable(new_space_allocation_top), object);
1258}
1259
1260
Steve Block3ce2e202009-11-05 08:53:23 +00001261void MacroAssembler::AllocateHeapNumber(Register result,
1262 Register scratch1,
1263 Register scratch2,
1264 Label* gc_required) {
1265 // Allocate heap number in new space.
1266 AllocateInNewSpace(HeapNumber::kSize,
1267 result,
1268 scratch1,
1269 scratch2,
1270 gc_required,
1271 TAG_OBJECT);
1272
1273 // Set the map.
1274 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001275 Immediate(isolate()->factory()->heap_number_map()));
Steve Block3ce2e202009-11-05 08:53:23 +00001276}
1277
1278
Steve Blockd0582a62009-12-15 09:54:21 +00001279void MacroAssembler::AllocateTwoByteString(Register result,
1280 Register length,
1281 Register scratch1,
1282 Register scratch2,
1283 Register scratch3,
1284 Label* gc_required) {
1285 // Calculate the number of bytes needed for the characters in the string while
1286 // observing object alignment.
1287 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00001288 ASSERT(kShortSize == 2);
Leon Clarkee46be812010-01-19 14:06:41 +00001289 // scratch1 = length * 2 + kObjectAlignmentMask.
1290 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001291 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001292
1293 // Allocate two byte string in new space.
1294 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1295 times_1,
1296 scratch1,
1297 result,
1298 scratch2,
1299 scratch3,
1300 gc_required,
1301 TAG_OBJECT);
1302
1303 // Set the map, length and hash field.
1304 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001305 Immediate(isolate()->factory()->string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001306 mov(scratch1, length);
1307 SmiTag(scratch1);
1308 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001309 mov(FieldOperand(result, String::kHashFieldOffset),
1310 Immediate(String::kEmptyHashField));
1311}
1312
1313
1314void MacroAssembler::AllocateAsciiString(Register result,
1315 Register length,
1316 Register scratch1,
1317 Register scratch2,
1318 Register scratch3,
1319 Label* gc_required) {
1320 // Calculate the number of bytes needed for the characters in the string while
1321 // observing object alignment.
1322 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1323 mov(scratch1, length);
1324 ASSERT(kCharSize == 1);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001325 add(scratch1, Immediate(kObjectAlignmentMask));
1326 and_(scratch1, Immediate(~kObjectAlignmentMask));
Steve Blockd0582a62009-12-15 09:54:21 +00001327
1328 // Allocate ascii string in new space.
1329 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1330 times_1,
1331 scratch1,
1332 result,
1333 scratch2,
1334 scratch3,
1335 gc_required,
1336 TAG_OBJECT);
1337
1338 // Set the map, length and hash field.
1339 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001340 Immediate(isolate()->factory()->ascii_string_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01001341 mov(scratch1, length);
1342 SmiTag(scratch1);
1343 mov(FieldOperand(result, String::kLengthOffset), scratch1);
Steve Blockd0582a62009-12-15 09:54:21 +00001344 mov(FieldOperand(result, String::kHashFieldOffset),
1345 Immediate(String::kEmptyHashField));
1346}
1347
1348
Iain Merrick9ac36c92010-09-13 15:29:50 +01001349void MacroAssembler::AllocateAsciiString(Register result,
1350 int length,
1351 Register scratch1,
1352 Register scratch2,
1353 Label* gc_required) {
1354 ASSERT(length > 0);
1355
1356 // Allocate ascii string in new space.
1357 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1358 result,
1359 scratch1,
1360 scratch2,
1361 gc_required,
1362 TAG_OBJECT);
1363
1364 // Set the map, length and hash field.
1365 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001366 Immediate(isolate()->factory()->ascii_string_map()));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001367 mov(FieldOperand(result, String::kLengthOffset),
1368 Immediate(Smi::FromInt(length)));
1369 mov(FieldOperand(result, String::kHashFieldOffset),
1370 Immediate(String::kEmptyHashField));
1371}
1372
1373
Ben Murdoch589d6972011-11-30 16:04:58 +00001374void MacroAssembler::AllocateTwoByteConsString(Register result,
Steve Blockd0582a62009-12-15 09:54:21 +00001375 Register scratch1,
1376 Register scratch2,
1377 Label* gc_required) {
1378 // Allocate heap number in new space.
1379 AllocateInNewSpace(ConsString::kSize,
1380 result,
1381 scratch1,
1382 scratch2,
1383 gc_required,
1384 TAG_OBJECT);
1385
1386 // Set the map. The other fields are left uninitialized.
1387 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001388 Immediate(isolate()->factory()->cons_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001389}
1390
1391
1392void MacroAssembler::AllocateAsciiConsString(Register result,
1393 Register scratch1,
1394 Register scratch2,
1395 Label* gc_required) {
1396 // Allocate heap number in new space.
1397 AllocateInNewSpace(ConsString::kSize,
1398 result,
1399 scratch1,
1400 scratch2,
1401 gc_required,
1402 TAG_OBJECT);
1403
1404 // Set the map. The other fields are left uninitialized.
1405 mov(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001406 Immediate(isolate()->factory()->cons_ascii_string_map()));
Steve Blockd0582a62009-12-15 09:54:21 +00001407}
1408
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001409
Ben Murdoch589d6972011-11-30 16:04:58 +00001410void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001411 Register scratch1,
1412 Register scratch2,
1413 Label* gc_required) {
1414 // Allocate heap number in new space.
1415 AllocateInNewSpace(SlicedString::kSize,
1416 result,
1417 scratch1,
1418 scratch2,
1419 gc_required,
1420 TAG_OBJECT);
1421
1422 // Set the map. The other fields are left uninitialized.
1423 mov(FieldOperand(result, HeapObject::kMapOffset),
1424 Immediate(isolate()->factory()->sliced_string_map()));
1425}
1426
1427
1428void MacroAssembler::AllocateAsciiSlicedString(Register result,
1429 Register scratch1,
1430 Register scratch2,
1431 Label* gc_required) {
1432 // Allocate heap number in new space.
1433 AllocateInNewSpace(SlicedString::kSize,
1434 result,
1435 scratch1,
1436 scratch2,
1437 gc_required,
1438 TAG_OBJECT);
1439
1440 // Set the map. The other fields are left uninitialized.
1441 mov(FieldOperand(result, HeapObject::kMapOffset),
1442 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1443}
1444
1445
Ben Murdochb8e0da22011-05-16 14:20:40 +01001446// Copy memory, byte-by-byte, from source to destination. Not optimized for
1447// long or aligned copies. The contents of scratch and length are destroyed.
1448// Source and destination are incremented by length.
1449// Many variants of movsb, loop unrolling, word moves, and indexed operands
1450// have been tried here already, and this is fastest.
1451// A simpler loop is faster on small copies, but 30% slower on large ones.
1452// The cld() instruction must have been emitted, to set the direction flag(),
1453// before calling this function.
1454void MacroAssembler::CopyBytes(Register source,
1455 Register destination,
1456 Register length,
1457 Register scratch) {
1458 Label loop, done, short_string, short_loop;
1459 // Experimentation shows that the short string loop is faster if length < 10.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001460 cmp(length, Immediate(10));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001461 j(less_equal, &short_string);
1462
1463 ASSERT(source.is(esi));
1464 ASSERT(destination.is(edi));
1465 ASSERT(length.is(ecx));
1466
1467 // Because source is 4-byte aligned in our uses of this function,
1468 // we keep source aligned for the rep_movs call by copying the odd bytes
1469 // at the end of the ranges.
1470 mov(scratch, Operand(source, length, times_1, -4));
1471 mov(Operand(destination, length, times_1, -4), scratch);
1472 mov(scratch, ecx);
1473 shr(ecx, 2);
1474 rep_movs();
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001475 and_(scratch, Immediate(0x3));
1476 add(destination, scratch);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001477 jmp(&done);
1478
1479 bind(&short_string);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001480 test(length, length);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001481 j(zero, &done);
1482
1483 bind(&short_loop);
1484 mov_b(scratch, Operand(source, 0));
1485 mov_b(Operand(destination, 0), scratch);
1486 inc(source);
1487 inc(destination);
1488 dec(length);
1489 j(not_zero, &short_loop);
1490
1491 bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001492}
1493
Steve Blockd0582a62009-12-15 09:54:21 +00001494
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001495void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1496 Register end_offset,
1497 Register filler) {
1498 Label loop, entry;
1499 jmp(&entry);
1500 bind(&loop);
1501 mov(Operand(start_offset, 0), filler);
1502 add(start_offset, Immediate(kPointerSize));
1503 bind(&entry);
1504 cmp(start_offset, end_offset);
1505 j(less, &loop);
1506}
1507
1508
1509void MacroAssembler::BooleanBitTest(Register object,
1510 int field_offset,
1511 int bit_index) {
1512 bit_index += kSmiTagSize + kSmiShiftSize;
1513 ASSERT(IsPowerOf2(kBitsPerByte));
1514 int byte_index = bit_index / kBitsPerByte;
1515 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1516 test_b(FieldOperand(object, field_offset + byte_index),
1517 static_cast<byte>(1 << byte_bit_index));
1518}
1519
1520
1521
Steve Blocka7e24c12009-10-30 11:49:00 +00001522void MacroAssembler::NegativeZeroTest(Register result,
1523 Register op,
1524 Label* then_label) {
1525 Label ok;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001526 test(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001527 j(not_zero, &ok);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001528 test(op, op);
Ben Murdoch257744e2011-11-30 15:57:28 +00001529 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001530 bind(&ok);
1531}
1532
1533
1534void MacroAssembler::NegativeZeroTest(Register result,
1535 Register op1,
1536 Register op2,
1537 Register scratch,
1538 Label* then_label) {
1539 Label ok;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001540 test(result, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001541 j(not_zero, &ok);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001542 mov(scratch, op1);
1543 or_(scratch, op2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001544 j(sign, then_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00001545 bind(&ok);
1546}
1547
1548
1549void MacroAssembler::TryGetFunctionPrototype(Register function,
1550 Register result,
1551 Register scratch,
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001552 Label* miss,
1553 bool miss_on_bound_function) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001554 // Check that the receiver isn't a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001555 JumpIfSmi(function, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001556
1557 // Check that the function really is a function.
1558 CmpObjectType(function, JS_FUNCTION_TYPE, result);
Ben Murdoch257744e2011-11-30 15:57:28 +00001559 j(not_equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001560
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001561 if (miss_on_bound_function) {
1562 // If a bound function, go to miss label.
1563 mov(scratch,
1564 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1565 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1566 SharedFunctionInfo::kBoundFunction);
1567 j(not_zero, miss);
1568 }
1569
Steve Blocka7e24c12009-10-30 11:49:00 +00001570 // Make sure that the function has an instance prototype.
1571 Label non_instance;
1572 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1573 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00001574 j(not_zero, &non_instance);
Steve Blocka7e24c12009-10-30 11:49:00 +00001575
1576 // Get the prototype or initial map from the function.
1577 mov(result,
1578 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1579
1580 // If the prototype or initial map is the hole, don't return it and
1581 // simply miss the cache instead. This will allow us to allocate a
1582 // prototype object on-demand in the runtime system.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001583 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001584 j(equal, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001585
1586 // If the function does not have an initial map, we're done.
1587 Label done;
1588 CmpObjectType(result, MAP_TYPE, scratch);
1589 j(not_equal, &done);
1590
1591 // Get the prototype from the initial map.
1592 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1593 jmp(&done);
1594
1595 // Non-instance prototype: Fetch prototype from constructor field
1596 // in initial map.
1597 bind(&non_instance);
1598 mov(result, FieldOperand(result, Map::kConstructorOffset));
1599
1600 // All done.
1601 bind(&done);
1602}
1603
1604
Ben Murdoch257744e2011-11-30 15:57:28 +00001605void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001606 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
Ben Murdoch257744e2011-11-30 15:57:28 +00001607 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
Steve Blocka7e24c12009-10-30 11:49:00 +00001608}
1609
1610
Steve Blockd0582a62009-12-15 09:54:21 +00001611void MacroAssembler::TailCallStub(CodeStub* stub) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001612 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
Steve Blockd0582a62009-12-15 09:54:21 +00001613 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1614}
1615
1616
Steve Blocka7e24c12009-10-30 11:49:00 +00001617void MacroAssembler::StubReturn(int argc) {
1618 ASSERT(argc >= 1 && generating_stub());
1619 ret((argc - 1) * kPointerSize);
1620}
1621
1622
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001623bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1624 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
1625 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
1626}
1627
1628
Steve Blocka7e24c12009-10-30 11:49:00 +00001629void MacroAssembler::IllegalOperation(int num_arguments) {
1630 if (num_arguments > 0) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001631 add(esp, Immediate(num_arguments * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001632 }
Steve Block44f0eee2011-05-26 01:26:41 +01001633 mov(eax, Immediate(isolate()->factory()->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001634}
1635
1636
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001637void MacroAssembler::IndexFromHash(Register hash, Register index) {
1638 // The assert checks that the constants for the maximum number of digits
1639 // for an array index cached in the hash field and the number of bits
1640 // reserved for it does not conflict.
1641 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1642 (1 << String::kArrayIndexValueBits));
1643 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1644 // the low kHashShift bits.
1645 and_(hash, String::kArrayIndexValueMask);
1646 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1647 if (String::kHashShift > kSmiTagSize) {
1648 shr(hash, String::kHashShift - kSmiTagSize);
1649 }
1650 if (!index.is(hash)) {
1651 mov(index, hash);
1652 }
1653}
1654
1655
Steve Blocka7e24c12009-10-30 11:49:00 +00001656void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1657 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1658}
1659
1660
Ben Murdochb0fe1622011-05-05 13:52:32 +01001661void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
Steve Block44f0eee2011-05-26 01:26:41 +01001662 const Runtime::Function* function = Runtime::FunctionForId(id);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001663 Set(eax, Immediate(function->nargs));
Steve Block44f0eee2011-05-26 01:26:41 +01001664 mov(ebx, Immediate(ExternalReference(function, isolate())));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001665 CEntryStub ces(1, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001666 CallStub(&ces);
1667}
1668
1669
Steve Block44f0eee2011-05-26 01:26:41 +01001670void MacroAssembler::CallRuntime(const Runtime::Function* f,
1671 int num_arguments) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001672 // If the expected number of arguments of the runtime function is
1673 // constant, we check that the actual number of arguments match the
1674 // expectation.
1675 if (f->nargs >= 0 && f->nargs != num_arguments) {
1676 IllegalOperation(num_arguments);
1677 return;
1678 }
1679
Leon Clarke4515c472010-02-03 11:58:03 +00001680 // TODO(1236192): Most runtime routines don't need the number of
1681 // arguments passed in because it is constant. At some point we
1682 // should remove this need and make the runtime routine entry code
1683 // smarter.
1684 Set(eax, Immediate(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01001685 mov(ebx, Immediate(ExternalReference(f, isolate())));
Leon Clarke4515c472010-02-03 11:58:03 +00001686 CEntryStub ces(1);
1687 CallStub(&ces);
Steve Blocka7e24c12009-10-30 11:49:00 +00001688}
1689
1690
Ben Murdochbb769b22010-08-11 14:56:33 +01001691void MacroAssembler::CallExternalReference(ExternalReference ref,
1692 int num_arguments) {
1693 mov(eax, Immediate(num_arguments));
1694 mov(ebx, Immediate(ref));
1695
1696 CEntryStub stub(1);
1697 CallStub(&stub);
1698}
1699
1700
Steve Block6ded16b2010-05-10 14:33:55 +01001701void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1702 int num_arguments,
1703 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001704 // TODO(1236192): Most runtime routines don't need the number of
1705 // arguments passed in because it is constant. At some point we
1706 // should remove this need and make the runtime routine entry code
1707 // smarter.
1708 Set(eax, Immediate(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01001709 JumpToExternalReference(ext);
1710}
1711
1712
1713void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1714 int num_arguments,
1715 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01001716 TailCallExternalReference(ExternalReference(fid, isolate()),
1717 num_arguments,
1718 result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +00001719}
1720
1721
Ben Murdochb0fe1622011-05-05 13:52:32 +01001722// If true, a Handle<T> returned by value from a function with cdecl calling
1723// convention will be returned directly as a value of location_ field in a
1724// register eax.
1725// If false, it is returned as a pointer to a preallocated by caller memory
1726// region. Pointer to this region should be passed to a function as an
1727// implicit first argument.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001728#if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001729static const bool kReturnHandlesDirectly = true;
John Reck59135872010-11-02 12:39:01 -07001730#else
Ben Murdochb0fe1622011-05-05 13:52:32 +01001731static const bool kReturnHandlesDirectly = false;
John Reck59135872010-11-02 12:39:01 -07001732#endif
1733
1734
1735Operand ApiParameterOperand(int index) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001736 return Operand(
1737 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001738}
1739
1740
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001741void MacroAssembler::PrepareCallApiFunction(int argc) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001742 if (kReturnHandlesDirectly) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001743 EnterApiExitFrame(argc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001744 // When handles are returned directly we don't have to allocate extra
John Reck59135872010-11-02 12:39:01 -07001745 // space for and pass an out parameter.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001746 if (emit_debug_code()) {
1747 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1748 }
John Reck59135872010-11-02 12:39:01 -07001749 } else {
1750 // We allocate two additional slots: return value and pointer to it.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001751 EnterApiExitFrame(argc + 2);
John Reck59135872010-11-02 12:39:01 -07001752
John Reck59135872010-11-02 12:39:01 -07001753 // The argument slots are filled as follows:
1754 //
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001755 // n + 1: output slot
John Reck59135872010-11-02 12:39:01 -07001756 // n: arg n
1757 // ...
1758 // 1: arg1
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001759 // 0: pointer to the output slot
John Reck59135872010-11-02 12:39:01 -07001760
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001761 lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1762 mov(Operand(esp, 0 * kPointerSize), esi);
Steve Block44f0eee2011-05-26 01:26:41 +01001763 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001764 mov(Operand(esi, 0), Immediate(0));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001765 }
1766 }
1767}
1768
1769
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001770void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
1771 int stack_space) {
Steve Blockd0582a62009-12-15 09:54:21 +00001772 ExternalReference next_address =
1773 ExternalReference::handle_scope_next_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001774 ExternalReference limit_address =
1775 ExternalReference::handle_scope_limit_address();
John Reck59135872010-11-02 12:39:01 -07001776 ExternalReference level_address =
1777 ExternalReference::handle_scope_level_address();
Steve Blockd0582a62009-12-15 09:54:21 +00001778
John Reck59135872010-11-02 12:39:01 -07001779 // Allocate HandleScope in callee-save registers.
1780 mov(ebx, Operand::StaticVariable(next_address));
1781 mov(edi, Operand::StaticVariable(limit_address));
1782 add(Operand::StaticVariable(level_address), Immediate(1));
Steve Blockd0582a62009-12-15 09:54:21 +00001783
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001784 // Call the api function.
1785 call(function_address, RelocInfo::RUNTIME_ENTRY);
John Reck59135872010-11-02 12:39:01 -07001786
Ben Murdochb0fe1622011-05-05 13:52:32 +01001787 if (!kReturnHandlesDirectly) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001788 // PrepareCallApiFunction saved pointer to the output slot into
1789 // callee-save register esi.
1790 mov(eax, Operand(esi, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001791 }
Steve Blockd0582a62009-12-15 09:54:21 +00001792
John Reck59135872010-11-02 12:39:01 -07001793 Label empty_handle;
1794 Label prologue;
1795 Label promote_scheduled_exception;
1796 Label delete_allocated_handles;
1797 Label leave_exit_frame;
Leon Clarkee46be812010-01-19 14:06:41 +00001798
John Reck59135872010-11-02 12:39:01 -07001799 // Check if the result handle holds 0.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001800 test(eax, eax);
Ben Murdoch257744e2011-11-30 15:57:28 +00001801 j(zero, &empty_handle);
John Reck59135872010-11-02 12:39:01 -07001802 // It was non-zero. Dereference to get the result value.
1803 mov(eax, Operand(eax, 0));
1804 bind(&prologue);
1805 // No more valid handles (the result handle was the last one). Restore
1806 // previous handle scope.
1807 mov(Operand::StaticVariable(next_address), ebx);
1808 sub(Operand::StaticVariable(level_address), Immediate(1));
1809 Assert(above_equal, "Invalid HandleScope level");
1810 cmp(edi, Operand::StaticVariable(limit_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00001811 j(not_equal, &delete_allocated_handles);
John Reck59135872010-11-02 12:39:01 -07001812 bind(&leave_exit_frame);
Leon Clarkee46be812010-01-19 14:06:41 +00001813
John Reck59135872010-11-02 12:39:01 -07001814 // Check if the function scheduled an exception.
1815 ExternalReference scheduled_exception_address =
Steve Block44f0eee2011-05-26 01:26:41 +01001816 ExternalReference::scheduled_exception_address(isolate());
John Reck59135872010-11-02 12:39:01 -07001817 cmp(Operand::StaticVariable(scheduled_exception_address),
Steve Block44f0eee2011-05-26 01:26:41 +01001818 Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001819 j(not_equal, &promote_scheduled_exception);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001820 LeaveApiExitFrame();
1821 ret(stack_space * kPointerSize);
John Reck59135872010-11-02 12:39:01 -07001822 bind(&promote_scheduled_exception);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001823 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1824
John Reck59135872010-11-02 12:39:01 -07001825 bind(&empty_handle);
1826 // It was zero; the result is undefined.
Steve Block44f0eee2011-05-26 01:26:41 +01001827 mov(eax, isolate()->factory()->undefined_value());
John Reck59135872010-11-02 12:39:01 -07001828 jmp(&prologue);
Leon Clarkee46be812010-01-19 14:06:41 +00001829
John Reck59135872010-11-02 12:39:01 -07001830 // HandleScope limit has changed. Delete allocated extensions.
Steve Block44f0eee2011-05-26 01:26:41 +01001831 ExternalReference delete_extensions =
1832 ExternalReference::delete_handle_scope_extensions(isolate());
John Reck59135872010-11-02 12:39:01 -07001833 bind(&delete_allocated_handles);
1834 mov(Operand::StaticVariable(limit_address), edi);
1835 mov(edi, eax);
Steve Block44f0eee2011-05-26 01:26:41 +01001836 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1837 mov(eax, Immediate(delete_extensions));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001838 call(eax);
John Reck59135872010-11-02 12:39:01 -07001839 mov(eax, edi);
1840 jmp(&leave_exit_frame);
Steve Blockd0582a62009-12-15 09:54:21 +00001841}
1842
1843
Steve Block6ded16b2010-05-10 14:33:55 +01001844void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001845 // Set the entry point and jump to the C entry runtime stub.
1846 mov(ebx, Immediate(ext));
1847 CEntryStub ces(1);
1848 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1849}
1850
1851
Ben Murdoch257744e2011-11-30 15:57:28 +00001852void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
1853 // This macro takes the dst register to make the code more readable
1854 // at the call sites. However, the dst register has to be ecx to
1855 // follow the calling convention which requires the call type to be
1856 // in ecx.
1857 ASSERT(dst.is(ecx));
1858 if (call_kind == CALL_AS_FUNCTION) {
1859 // Set to some non-zero smi by updating the least significant
1860 // byte.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001861 mov_b(dst, 1 << kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001862 } else {
1863 // Set to smi zero by clearing the register.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001864 xor_(dst, dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00001865 }
1866}
1867
1868
Steve Blocka7e24c12009-10-30 11:49:00 +00001869void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1870 const ParameterCount& actual,
1871 Handle<Code> code_constant,
1872 const Operand& code_operand,
Ben Murdoch257744e2011-11-30 15:57:28 +00001873 Label* done,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001874 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001875 Label::Distance done_near,
1876 const CallWrapper& call_wrapper,
1877 CallKind call_kind) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001878 bool definitely_matches = false;
1879 Label invoke;
1880 if (expected.is_immediate()) {
1881 ASSERT(actual.is_immediate());
1882 if (expected.immediate() == actual.immediate()) {
1883 definitely_matches = true;
1884 } else {
1885 mov(eax, actual.immediate());
1886 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1887 if (expected.immediate() == sentinel) {
1888 // Don't worry about adapting arguments for builtins that
1889 // don't want that done. Skip adaption code by making it look
1890 // like we have a match between expected and actual number of
1891 // arguments.
1892 definitely_matches = true;
1893 } else {
1894 mov(ebx, expected.immediate());
1895 }
1896 }
1897 } else {
1898 if (actual.is_immediate()) {
1899 // Expected is in register, actual is immediate. This is the
1900 // case when we invoke function values without going through the
1901 // IC mechanism.
1902 cmp(expected.reg(), actual.immediate());
1903 j(equal, &invoke);
1904 ASSERT(expected.reg().is(ebx));
1905 mov(eax, actual.immediate());
1906 } else if (!expected.reg().is(actual.reg())) {
1907 // Both expected and actual are in (different) registers. This
1908 // is the case when we invoke functions using call and apply.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001909 cmp(expected.reg(), actual.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001910 j(equal, &invoke);
1911 ASSERT(actual.reg().is(eax));
1912 ASSERT(expected.reg().is(ebx));
1913 }
1914 }
1915
1916 if (!definitely_matches) {
1917 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001918 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001919 if (!code_constant.is_null()) {
1920 mov(edx, Immediate(code_constant));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001921 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001922 } else if (!code_operand.is_reg(edx)) {
1923 mov(edx, code_operand);
1924 }
1925
1926 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001927 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
1928 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001929 call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +00001930 call_wrapper.AfterCall();
1931 jmp(done, done_near);
Steve Blocka7e24c12009-10-30 11:49:00 +00001932 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001933 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001934 jmp(adaptor, RelocInfo::CODE_TARGET);
1935 }
1936 bind(&invoke);
1937 }
1938}
1939
1940
1941void MacroAssembler::InvokeCode(const Operand& code,
1942 const ParameterCount& expected,
1943 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001944 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001945 const CallWrapper& call_wrapper,
1946 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001947 // You can't call a function without a valid frame.
1948 ASSERT(flag == JUMP_FUNCTION || has_frame());
1949
Ben Murdoch257744e2011-11-30 15:57:28 +00001950 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001951 InvokePrologue(expected, actual, Handle<Code>::null(), code,
Ben Murdoch257744e2011-11-30 15:57:28 +00001952 &done, flag, Label::kNear, call_wrapper,
1953 call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001954 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001955 call_wrapper.BeforeCall(CallSize(code));
1956 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001957 call(code);
Ben Murdoch257744e2011-11-30 15:57:28 +00001958 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00001959 } else {
1960 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001961 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001962 jmp(code);
1963 }
1964 bind(&done);
1965}
1966
1967
1968void MacroAssembler::InvokeCode(Handle<Code> code,
1969 const ParameterCount& expected,
1970 const ParameterCount& actual,
1971 RelocInfo::Mode rmode,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001972 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001973 const CallWrapper& call_wrapper,
1974 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001975 // You can't call a function without a valid frame.
1976 ASSERT(flag == JUMP_FUNCTION || has_frame());
1977
Ben Murdoch257744e2011-11-30 15:57:28 +00001978 Label done;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001979 Operand dummy(eax, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001980 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
1981 call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001982 if (flag == CALL_FUNCTION) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001983 call_wrapper.BeforeCall(CallSize(code, rmode));
1984 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001985 call(code, rmode);
Ben Murdoch257744e2011-11-30 15:57:28 +00001986 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00001987 } else {
1988 ASSERT(flag == JUMP_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001989 SetCallKind(ecx, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00001990 jmp(code, rmode);
1991 }
1992 bind(&done);
1993}
1994
1995
1996void MacroAssembler::InvokeFunction(Register fun,
1997 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001998 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00001999 const CallWrapper& call_wrapper,
2000 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002001 // You can't call a function without a valid frame.
2002 ASSERT(flag == JUMP_FUNCTION || has_frame());
2003
Steve Blocka7e24c12009-10-30 11:49:00 +00002004 ASSERT(fun.is(edi));
2005 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2006 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2007 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002008 SmiUntag(ebx);
Steve Blocka7e24c12009-10-30 11:49:00 +00002009
2010 ParameterCount expected(ebx);
Steve Block791712a2010-08-27 10:21:07 +01002011 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002012 expected, actual, flag, call_wrapper, call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +00002013}
2014
2015
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002016void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
Andrei Popescu402d9372010-02-26 13:31:12 +00002017 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002018 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002019 const CallWrapper& call_wrapper,
2020 CallKind call_kind) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002021 // You can't call a function without a valid frame.
2022 ASSERT(flag == JUMP_FUNCTION || has_frame());
2023
Andrei Popescu402d9372010-02-26 13:31:12 +00002024 // Get the function and setup the context.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002025 mov(edi, Immediate(function));
Andrei Popescu402d9372010-02-26 13:31:12 +00002026 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002027
Andrei Popescu402d9372010-02-26 13:31:12 +00002028 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002029 // We call indirectly through the code field in the function to
2030 // allow recompilation to take effect without changing any of the
2031 // call sites.
2032 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2033 expected, actual, flag, call_wrapper, call_kind);
Andrei Popescu402d9372010-02-26 13:31:12 +00002034}
2035
2036
Ben Murdochb0fe1622011-05-05 13:52:32 +01002037void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2038 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002039 const CallWrapper& call_wrapper) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002040 // You can't call a builtin without a valid frame.
2041 ASSERT(flag == JUMP_FUNCTION || has_frame());
Steve Blocka7e24c12009-10-30 11:49:00 +00002042
2043 // Rely on the assertion to check that the number of provided
2044 // arguments match the expected number of arguments. Fake a
2045 // parameter count to avoid emitting code to do the check.
2046 ParameterCount expected(0);
Steve Block791712a2010-08-27 10:21:07 +01002047 GetBuiltinFunction(edi, id);
2048 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002049 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +00002050}
2051
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002052
Steve Block791712a2010-08-27 10:21:07 +01002053void MacroAssembler::GetBuiltinFunction(Register target,
2054 Builtins::JavaScript id) {
2055 // Load the JavaScript builtin function from the builtins object.
2056 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2057 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2058 mov(target, FieldOperand(target,
2059 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2060}
Steve Blocka7e24c12009-10-30 11:49:00 +00002061
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002062
Steve Blocka7e24c12009-10-30 11:49:00 +00002063void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01002064 ASSERT(!target.is(edi));
Andrei Popescu402d9372010-02-26 13:31:12 +00002065 // Load the JavaScript builtin function from the builtins object.
Steve Block791712a2010-08-27 10:21:07 +01002066 GetBuiltinFunction(edi, id);
2067 // Load the code entry point from the function into the target register.
2068 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002069}
2070
2071
Steve Blockd0582a62009-12-15 09:54:21 +00002072void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2073 if (context_chain_length > 0) {
2074 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002075 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002076 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002077 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002078 }
Steve Block1e0659c2011-05-24 12:43:12 +01002079 } else {
2080 // Slot is in the current function context. Move it into the
2081 // destination register in case we store into it (the write barrier
2082 // cannot be allowed to destroy the context in esi).
2083 mov(dst, esi);
2084 }
2085
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002086 // We should not have found a with context by walking the context chain
Steve Block1e0659c2011-05-24 12:43:12 +01002087 // (i.e., the static scope chain and runtime context chain do not agree).
2088 // A variable occurring in such a scope should have slot type LOOKUP and
2089 // not CONTEXT.
Steve Block44f0eee2011-05-26 01:26:41 +01002090 if (emit_debug_code()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002091 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2092 isolate()->factory()->with_context_map());
2093 Check(not_equal, "Variable resolved to with context.");
Steve Blockd0582a62009-12-15 09:54:21 +00002094 }
2095}
2096
2097
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002098void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2099 // Load the global or builtins object from the current context.
2100 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2101 // Load the global context from the global or builtins object.
2102 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2103 // Load the function from the global context.
2104 mov(function, Operand(function, Context::SlotOffset(index)));
2105}
2106
2107
2108void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2109 Register map) {
2110 // Load the initial map. The global functions all have initial maps.
2111 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002112 if (emit_debug_code()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002113 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002114 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002115 jmp(&ok);
2116 bind(&fail);
2117 Abort("Global functions must have initial map");
2118 bind(&ok);
2119 }
2120}
2121
Steve Blockd0582a62009-12-15 09:54:21 +00002122
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002123// Store the value in register src in the safepoint register stack
2124// slot for register dst.
2125void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2126 mov(SafepointRegisterSlot(dst), src);
2127}
2128
2129
2130void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2131 mov(SafepointRegisterSlot(dst), src);
2132}
2133
2134
2135void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2136 mov(dst, SafepointRegisterSlot(src));
2137}
2138
2139
2140Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2141 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2142}
2143
2144
Ben Murdochb0fe1622011-05-05 13:52:32 +01002145int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2146 // The registers are pushed starting with the lowest encoding,
2147 // which means that lowest encodings are furthest away from
2148 // the stack pointer.
2149 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2150 return kNumSafepointRegisters - reg_code - 1;
2151}
2152
2153
Steve Blocka7e24c12009-10-30 11:49:00 +00002154void MacroAssembler::Ret() {
2155 ret(0);
2156}
2157
2158
Steve Block1e0659c2011-05-24 12:43:12 +01002159void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2160 if (is_uint16(bytes_dropped)) {
2161 ret(bytes_dropped);
2162 } else {
2163 pop(scratch);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002164 add(esp, Immediate(bytes_dropped));
Steve Block1e0659c2011-05-24 12:43:12 +01002165 push(scratch);
2166 ret(0);
2167 }
2168}
2169
2170
Leon Clarkee46be812010-01-19 14:06:41 +00002171void MacroAssembler::Drop(int stack_elements) {
2172 if (stack_elements > 0) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002173 add(esp, Immediate(stack_elements * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00002174 }
2175}
2176
2177
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002178void MacroAssembler::Move(Register dst, Register src) {
2179 if (!dst.is(src)) {
2180 mov(dst, src);
2181 }
2182}
2183
2184
Leon Clarkee46be812010-01-19 14:06:41 +00002185void MacroAssembler::Move(Register dst, Handle<Object> value) {
2186 mov(dst, value);
2187}
2188
2189
Steve Blocka7e24c12009-10-30 11:49:00 +00002190void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2191 if (FLAG_native_code_counters && counter->Enabled()) {
2192 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2193 }
2194}
2195
2196
2197void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2198 ASSERT(value > 0);
2199 if (FLAG_native_code_counters && counter->Enabled()) {
2200 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2201 if (value == 1) {
2202 inc(operand);
2203 } else {
2204 add(operand, Immediate(value));
2205 }
2206 }
2207}
2208
2209
2210void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2211 ASSERT(value > 0);
2212 if (FLAG_native_code_counters && counter->Enabled()) {
2213 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2214 if (value == 1) {
2215 dec(operand);
2216 } else {
2217 sub(operand, Immediate(value));
2218 }
2219 }
2220}
2221
2222
Leon Clarked91b9f72010-01-27 17:25:45 +00002223void MacroAssembler::IncrementCounter(Condition cc,
2224 StatsCounter* counter,
2225 int value) {
2226 ASSERT(value > 0);
2227 if (FLAG_native_code_counters && counter->Enabled()) {
2228 Label skip;
2229 j(NegateCondition(cc), &skip);
2230 pushfd();
2231 IncrementCounter(counter, value);
2232 popfd();
2233 bind(&skip);
2234 }
2235}
2236
2237
2238void MacroAssembler::DecrementCounter(Condition cc,
2239 StatsCounter* counter,
2240 int value) {
2241 ASSERT(value > 0);
2242 if (FLAG_native_code_counters && counter->Enabled()) {
2243 Label skip;
2244 j(NegateCondition(cc), &skip);
2245 pushfd();
2246 DecrementCounter(counter, value);
2247 popfd();
2248 bind(&skip);
2249 }
2250}
2251
2252
Steve Blocka7e24c12009-10-30 11:49:00 +00002253void MacroAssembler::Assert(Condition cc, const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +01002254 if (emit_debug_code()) Check(cc, msg);
Steve Blocka7e24c12009-10-30 11:49:00 +00002255}
2256
2257
Iain Merrick75681382010-08-19 15:07:18 +01002258void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002259 if (emit_debug_code()) {
2260 Factory* factory = isolate()->factory();
Iain Merrick75681382010-08-19 15:07:18 +01002261 Label ok;
2262 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002263 Immediate(factory->fixed_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002264 j(equal, &ok);
2265 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002266 Immediate(factory->fixed_double_array_map()));
2267 j(equal, &ok);
2268 cmp(FieldOperand(elements, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002269 Immediate(factory->fixed_cow_array_map()));
Iain Merrick75681382010-08-19 15:07:18 +01002270 j(equal, &ok);
2271 Abort("JSObject with fast elements map has slow elements");
2272 bind(&ok);
2273 }
2274}
2275
2276
Steve Blocka7e24c12009-10-30 11:49:00 +00002277void MacroAssembler::Check(Condition cc, const char* msg) {
2278 Label L;
Ben Murdoch257744e2011-11-30 15:57:28 +00002279 j(cc, &L);
Steve Blocka7e24c12009-10-30 11:49:00 +00002280 Abort(msg);
2281 // will not return here
2282 bind(&L);
2283}
2284
2285
Steve Block6ded16b2010-05-10 14:33:55 +01002286void MacroAssembler::CheckStackAlignment() {
2287 int frame_alignment = OS::ActivationFrameAlignment();
2288 int frame_alignment_mask = frame_alignment - 1;
2289 if (frame_alignment > kPointerSize) {
2290 ASSERT(IsPowerOf2(frame_alignment));
2291 Label alignment_as_expected;
2292 test(esp, Immediate(frame_alignment_mask));
2293 j(zero, &alignment_as_expected);
2294 // Abort if stack is not aligned.
2295 int3();
2296 bind(&alignment_as_expected);
2297 }
2298}
2299
2300
Steve Blocka7e24c12009-10-30 11:49:00 +00002301void MacroAssembler::Abort(const char* msg) {
2302 // We want to pass the msg string like a smi to avoid GC
2303 // problems, however msg is not guaranteed to be aligned
2304 // properly. Instead, we pass an aligned pointer that is
2305 // a proper v8 smi, but also pass the alignment difference
2306 // from the real pointer as a smi.
2307 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2308 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2309 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2310#ifdef DEBUG
2311 if (msg != NULL) {
2312 RecordComment("Abort message: ");
2313 RecordComment(msg);
2314 }
2315#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002316
Steve Blocka7e24c12009-10-30 11:49:00 +00002317 push(eax);
2318 push(Immediate(p0));
2319 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002320 // Disable stub call restrictions to always allow calls to abort.
2321 if (!has_frame_) {
2322 // We don't actually want to generate a pile of code for this, so just
2323 // claim there is a stack frame, without generating one.
2324 FrameScope scope(this, StackFrame::NONE);
2325 CallRuntime(Runtime::kAbort, 2);
2326 } else {
2327 CallRuntime(Runtime::kAbort, 2);
2328 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002329 // will not return here
Steve Blockd0582a62009-12-15 09:54:21 +00002330 int3();
Steve Blocka7e24c12009-10-30 11:49:00 +00002331}
2332
2333
Ben Murdoch257744e2011-11-30 15:57:28 +00002334void MacroAssembler::LoadInstanceDescriptors(Register map,
2335 Register descriptors) {
2336 mov(descriptors,
2337 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
2338 Label not_smi;
2339 JumpIfNotSmi(descriptors, &not_smi);
2340 mov(descriptors, isolate()->factory()->empty_descriptor_array());
2341 bind(&not_smi);
Iain Merrick75681382010-08-19 15:07:18 +01002342}
2343
2344
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002345void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2346 Register scratch,
2347 int power) {
2348 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2349 HeapNumber::kExponentBits));
2350 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002351 movd(dst, scratch);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002352 psllq(dst, HeapNumber::kMantissaBits);
2353}
2354
2355
Andrei Popescu402d9372010-02-26 13:31:12 +00002356void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2357 Register instance_type,
2358 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +01002359 Label* failure) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002360 if (!scratch.is(instance_type)) {
2361 mov(scratch, instance_type);
2362 }
2363 and_(scratch,
2364 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2365 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2366 j(not_equal, failure);
2367}
2368
2369
Leon Clarked91b9f72010-01-27 17:25:45 +00002370void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2371 Register object2,
2372 Register scratch1,
2373 Register scratch2,
2374 Label* failure) {
2375 // Check that both objects are not smis.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002376 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002377 mov(scratch1, object1);
2378 and_(scratch1, object2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002379 JumpIfSmi(scratch1, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00002380
2381 // Load instance type for both strings.
2382 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2383 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2384 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2385 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2386
2387 // Check that both are flat ascii strings.
2388 const int kFlatAsciiStringMask =
2389 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2390 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2391 // Interleave bits from both instance types and compare them in one check.
2392 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2393 and_(scratch1, kFlatAsciiStringMask);
2394 and_(scratch2, kFlatAsciiStringMask);
2395 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2396 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2397 j(not_equal, failure);
2398}
2399
2400
Steve Block6ded16b2010-05-10 14:33:55 +01002401void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002402 int frame_alignment = OS::ActivationFrameAlignment();
2403 if (frame_alignment != 0) {
Steve Block6ded16b2010-05-10 14:33:55 +01002404 // Make stack end at alignment and make room for num_arguments words
2405 // and the original value of esp.
2406 mov(scratch, esp);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002407 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002408 ASSERT(IsPowerOf2(frame_alignment));
2409 and_(esp, -frame_alignment);
Steve Block6ded16b2010-05-10 14:33:55 +01002410 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2411 } else {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002412 sub(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002413 }
2414}
2415
2416
2417void MacroAssembler::CallCFunction(ExternalReference function,
2418 int num_arguments) {
2419 // Trashing eax is ok as it will be the return value.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002420 mov(eax, Immediate(function));
Steve Block6ded16b2010-05-10 14:33:55 +01002421 CallCFunction(eax, num_arguments);
2422}
2423
2424
2425void MacroAssembler::CallCFunction(Register function,
2426 int num_arguments) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002427 ASSERT(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01002428 // Check stack alignment.
Steve Block44f0eee2011-05-26 01:26:41 +01002429 if (emit_debug_code()) {
Steve Block6ded16b2010-05-10 14:33:55 +01002430 CheckStackAlignment();
2431 }
2432
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002433 call(function);
Steve Block6ded16b2010-05-10 14:33:55 +01002434 if (OS::ActivationFrameAlignment() != 0) {
2435 mov(esp, Operand(esp, num_arguments * kPointerSize));
2436 } else {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002437 add(esp, Immediate(num_arguments * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01002438 }
2439}
2440
2441
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002442bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
2443 if (r1.is(r2)) return true;
2444 if (r1.is(r3)) return true;
2445 if (r1.is(r4)) return true;
2446 if (r2.is(r3)) return true;
2447 if (r2.is(r4)) return true;
2448 if (r3.is(r4)) return true;
2449 return false;
2450}
2451
2452
Steve Blocka7e24c12009-10-30 11:49:00 +00002453CodePatcher::CodePatcher(byte* address, int size)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002454 : address_(address),
2455 size_(size),
2456 masm_(Isolate::Current(), address, size + Assembler::kGap) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002457 // Create a new macro assembler pointing to the address of the code to patch.
2458 // The size is adjusted with kGap on order for the assembler to generate size
2459 // bytes of instructions without failing with buffer size constraints.
2460 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2461}
2462
2463
2464CodePatcher::~CodePatcher() {
2465 // Indicate that code has changed.
2466 CPU::FlushICache(address_, size_);
2467
2468 // Check that the code was patched as expected.
2469 ASSERT(masm_.pc_ == address_ + size_);
2470 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2471}
2472
2473
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002474void MacroAssembler::CheckPageFlag(
2475 Register object,
2476 Register scratch,
2477 int mask,
2478 Condition cc,
2479 Label* condition_met,
2480 Label::Distance condition_met_distance) {
2481 ASSERT(cc == zero || cc == not_zero);
2482 if (scratch.is(object)) {
2483 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2484 } else {
2485 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2486 and_(scratch, object);
2487 }
2488 if (mask < (1 << kBitsPerByte)) {
2489 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2490 static_cast<uint8_t>(mask));
2491 } else {
2492 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2493 }
2494 j(cc, condition_met, condition_met_distance);
2495}
2496
2497
2498void MacroAssembler::JumpIfBlack(Register object,
2499 Register scratch0,
2500 Register scratch1,
2501 Label* on_black,
2502 Label::Distance on_black_near) {
2503 HasColor(object, scratch0, scratch1,
2504 on_black, on_black_near,
2505 1, 0); // kBlackBitPattern.
2506 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2507}
2508
2509
2510void MacroAssembler::HasColor(Register object,
2511 Register bitmap_scratch,
2512 Register mask_scratch,
2513 Label* has_color,
2514 Label::Distance has_color_distance,
2515 int first_bit,
2516 int second_bit) {
2517 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2518
2519 GetMarkBits(object, bitmap_scratch, mask_scratch);
2520
2521 Label other_color, word_boundary;
2522 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2523 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2524 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2525 j(zero, &word_boundary, Label::kNear);
2526 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2527 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2528 jmp(&other_color, Label::kNear);
2529
2530 bind(&word_boundary);
2531 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2532
2533 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2534 bind(&other_color);
2535}
2536
2537
2538void MacroAssembler::GetMarkBits(Register addr_reg,
2539 Register bitmap_reg,
2540 Register mask_reg) {
2541 ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2542 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2543 and_(bitmap_reg, addr_reg);
2544 mov(ecx, addr_reg);
2545 int shift =
2546 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2547 shr(ecx, shift);
2548 and_(ecx,
2549 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2550
2551 add(bitmap_reg, ecx);
2552 mov(ecx, addr_reg);
2553 shr(ecx, kPointerSizeLog2);
2554 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2555 mov(mask_reg, Immediate(1));
2556 shl_cl(mask_reg);
2557}
2558
2559
2560void MacroAssembler::EnsureNotWhite(
2561 Register value,
2562 Register bitmap_scratch,
2563 Register mask_scratch,
2564 Label* value_is_white_and_not_data,
2565 Label::Distance distance) {
2566 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2567 GetMarkBits(value, bitmap_scratch, mask_scratch);
2568
2569 // If the value is black or grey we don't need to do anything.
2570 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2571 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2572 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
2573 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2574
2575 Label done;
2576
2577 // Since both black and grey have a 1 in the first position and white does
2578 // not have a 1 there we only need to check one bit.
2579 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2580 j(not_zero, &done, Label::kNear);
2581
2582 if (FLAG_debug_code) {
2583 // Check for impossible bit pattern.
2584 Label ok;
2585 push(mask_scratch);
2586 // shl. May overflow making the check conservative.
2587 add(mask_scratch, mask_scratch);
2588 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2589 j(zero, &ok, Label::kNear);
2590 int3();
2591 bind(&ok);
2592 pop(mask_scratch);
2593 }
2594
2595 // Value is white. We check whether it is data that doesn't need scanning.
2596 // Currently only checks for HeapNumber and non-cons strings.
2597 Register map = ecx; // Holds map while checking type.
2598 Register length = ecx; // Holds length of object after checking type.
2599 Label not_heap_number;
2600 Label is_data_object;
2601
2602 // Check for heap-number
2603 mov(map, FieldOperand(value, HeapObject::kMapOffset));
2604 cmp(map, FACTORY->heap_number_map());
2605 j(not_equal, &not_heap_number, Label::kNear);
2606 mov(length, Immediate(HeapNumber::kSize));
2607 jmp(&is_data_object, Label::kNear);
2608
2609 bind(&not_heap_number);
2610 // Check for strings.
2611 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
2612 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
2613 // If it's a string and it's not a cons string then it's an object containing
2614 // no GC pointers.
2615 Register instance_type = ecx;
2616 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
2617 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
2618 j(not_zero, value_is_white_and_not_data);
2619 // It's a non-indirect (non-cons and non-slice) string.
2620 // If it's external, the length is just ExternalString::kSize.
2621 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
2622 Label not_external;
2623 // External strings are the only ones with the kExternalStringTag bit
2624 // set.
2625 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
2626 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
2627 test_b(instance_type, kExternalStringTag);
2628 j(zero, &not_external, Label::kNear);
2629 mov(length, Immediate(ExternalString::kSize));
2630 jmp(&is_data_object, Label::kNear);
2631
2632 bind(&not_external);
2633 // Sequential string, either ASCII or UC16.
2634 ASSERT(kAsciiStringTag == 0x04);
2635 and_(length, Immediate(kStringEncodingMask));
2636 xor_(length, Immediate(kStringEncodingMask));
2637 add(length, Immediate(0x04));
2638 // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
2639 // by 2. If we multiply the string length as smi by this, it still
2640 // won't overflow a 32-bit value.
2641 ASSERT_EQ(SeqAsciiString::kMaxSize, SeqTwoByteString::kMaxSize);
2642 ASSERT(SeqAsciiString::kMaxSize <=
2643 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
2644 imul(length, FieldOperand(value, String::kLengthOffset));
2645 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
2646 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
2647 and_(length, Immediate(~kObjectAlignmentMask));
2648
2649 bind(&is_data_object);
2650 // Value is a data object, and it is white. Mark it black. Since we know
2651 // that the object is white we can make it black by flipping one bit.
2652 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
2653
2654 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
2655 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
2656 length);
2657 if (FLAG_debug_code) {
2658 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
2659 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
2660 Check(less_equal, "Live Bytes Count overflow chunk size");
2661 }
2662
2663 bind(&done);
2664}
2665
Steve Blocka7e24c12009-10-30 11:49:00 +00002666} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01002667
2668#endif // V8_TARGET_ARCH_IA32