blob: 065b61693f20c22360a4b98915b2ed15f454913b [file] [log] [blame]
ager@chromium.org5ec48922009-05-05 07:25:34 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
kasperl@chromium.org71affb52009-05-26 05:44:31 +000028#include "v8.h"
29
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +000030#if defined(V8_TARGET_ARCH_X64)
31
kasperl@chromium.org71affb52009-05-26 05:44:31 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000034#include "assembler-x64.h"
ager@chromium.orge2902be2009-06-08 12:21:35 +000035#include "macro-assembler-x64.h"
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000036#include "serialize.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000037#include "debug.h"
kasperl@chromium.org71affb52009-05-26 05:44:31 +000038
39namespace v8 {
40namespace internal {
41
42MacroAssembler::MacroAssembler(void* buffer, int size)
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000043 : Assembler(buffer, size),
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000044 generating_stub_(false),
45 allow_stub_calls_(true),
46 code_object_(Heap::undefined_value()) {
kasperl@chromium.org71affb52009-05-26 05:44:31 +000047}
48
ager@chromium.orge2902be2009-06-08 12:21:35 +000049
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000050void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
whesse@chromium.orgb6e43bb2010-04-14 09:36:28 +000051 movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
ager@chromium.org18ad94b2009-09-02 08:22:29 +000052}
53
54
sgjesse@chromium.org720dc0b2010-05-10 09:25:39 +000055void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
56 movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
57}
58
59
ager@chromium.org18ad94b2009-09-02 08:22:29 +000060void MacroAssembler::PushRoot(Heap::RootListIndex index) {
whesse@chromium.orgb6e43bb2010-04-14 09:36:28 +000061 push(Operand(kRootRegister, index << kPointerSizeLog2));
ager@chromium.org18ad94b2009-09-02 08:22:29 +000062}
63
64
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000065void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
whesse@chromium.orgb6e43bb2010-04-14 09:36:28 +000066 cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
ager@chromium.org18ad94b2009-09-02 08:22:29 +000067}
68
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000069
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +000070void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +000071 LoadRoot(kScratchRegister, index);
72 cmpq(with, kScratchRegister);
73}
74
75
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +000076void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
77 CompareRoot(rsp, Heap::kStackLimitRootIndex);
78 j(below, on_stack_overflow);
79}
80
81
ager@chromium.orgac091b72010-05-05 07:34:42 +000082void MacroAssembler::RecordWriteHelper(Register object,
83 Register addr,
84 Register scratch) {
85 if (FLAG_debug_code) {
86 // Check that the object is not in new space.
87 Label not_in_new_space;
88 InNewSpace(object, scratch, not_equal, &not_in_new_space);
89 Abort("new-space object passed to RecordWriteHelper");
90 bind(&not_in_new_space);
91 }
92
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000093 Label fast;
94
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +000095 // Compute the page start address from the heap object pointer, and reuse
96 // the 'object' register for it.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +000097 ASSERT(is_int32(~Page::kPageAlignmentMask));
ager@chromium.orgac091b72010-05-05 07:34:42 +000098 and_(object,
99 Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000100 Register page_start = object;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000101
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000102 // Compute the bit addr in the remembered set/index of the pointer in the
103 // page. Reuse 'addr' as pointer_offset.
ager@chromium.orgac091b72010-05-05 07:34:42 +0000104 subq(addr, page_start);
105 shr(addr, Immediate(kPointerSizeLog2));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000106 Register pointer_offset = addr;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000107
108 // If the bit offset lies beyond the normal remembered set range, it is in
109 // the extra remembered set area of a large object.
ager@chromium.orgac091b72010-05-05 07:34:42 +0000110 cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
111 j(below, &fast);
112
113 // We have a large object containing pointers. It must be a FixedArray.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000114
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000115 // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
116 // extra remembered set after the large object.
117
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000118 // Load the array length into 'scratch'.
ager@chromium.orgac091b72010-05-05 07:34:42 +0000119 movl(scratch,
120 Operand(page_start,
121 Page::kObjectStartOffset + FixedArray::kLengthOffset));
christian.plesner.hansen@gmail.com5a6af922009-08-12 14:20:51 +0000122 Register array_length = scratch;
123
124 // Extra remembered set starts right after the large object (a FixedArray), at
125 // page_start + kObjectStartOffset + objectSize
126 // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
127 // Add the delta between the end of the normal RSet and the start of the
128 // extra RSet to 'page_start', so that addressing the bit using
129 // 'pointer_offset' hits the extra RSet words.
ager@chromium.orgac091b72010-05-05 07:34:42 +0000130 lea(page_start,
131 Operand(page_start, array_length, times_pointer_size,
132 Page::kObjectStartOffset + FixedArray::kHeaderSize
133 - Page::kRSetEndOffset));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000134
135 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
136 // to limit code size. We should probably evaluate this decision by
137 // measuring the performance of an equivalent implementation using
138 // "simpler" instructions
ager@chromium.orgac091b72010-05-05 07:34:42 +0000139 bind(&fast);
140 bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
kmillikin@chromium.org4111b802010-05-03 10:34:42 +0000141}
142
143
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000144// Set the remembered set bit for [object+offset].
145// object is the object being stored into, value is the object being stored.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000146// If offset is zero, then the smi_index register contains the array index into
147// the elements array represented as a smi. Otherwise it can be used as a
148// scratch register.
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000149// All registers are clobbered by the operation.
150void MacroAssembler::RecordWrite(Register object,
151 int offset,
152 Register value,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000153 Register smi_index) {
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000154 // The compiled code assumes that record write doesn't change the
155 // context register, so we check that none of the clobbered
156 // registers are rsi.
157 ASSERT(!object.is(rsi) && !value.is(rsi) && !smi_index.is(rsi));
158
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000159 // First, check if a remembered set write is even needed. The tests below
160 // catch stores of Smis and stores into young gen (which does not have space
ager@chromium.orgac091b72010-05-05 07:34:42 +0000161 // for the remembered set bits).
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000162 Label done;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000163 JumpIfSmi(value, &done);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000164
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000165 RecordWriteNonSmi(object, offset, value, smi_index);
166 bind(&done);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000167
168 // Clobber all input registers when running with the debug-code flag
169 // turned on to provoke errors. This clobbering repeats the
170 // clobbering done inside RecordWriteNonSmi but it's necessary to
171 // avoid having the fast case for smis leave the registers
172 // unchanged.
173 if (FLAG_debug_code) {
vegorov@chromium.orgf8372902010-03-15 10:26:20 +0000174 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
175 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
176 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000177 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000178}
179
180
181void MacroAssembler::RecordWriteNonSmi(Register object,
182 int offset,
183 Register scratch,
184 Register smi_index) {
185 Label done;
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000186
187 if (FLAG_debug_code) {
188 Label okay;
189 JumpIfNotSmi(object, &okay);
190 Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
191 bind(&okay);
192 }
193
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000194 // Test that the object address is not in the new space. We cannot
195 // set remembered set bits in the new space.
kmillikin@chromium.org4111b802010-05-03 10:34:42 +0000196 InNewSpace(object, scratch, equal, &done);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000197
whesse@chromium.orge88a9ed2010-04-15 15:07:46 +0000198 // The offset is relative to a tagged or untagged HeapObject pointer,
199 // so either offset or offset + kHeapObjectTag must be a
200 // multiple of kPointerSize.
201 ASSERT(IsAligned(offset, kPointerSize) ||
202 IsAligned(offset + kHeapObjectTag, kPointerSize));
203
204 // We use optimized write barrier code if the word being written to is not in
205 // a large object page, or is in the first "page" of a large object page.
206 // We make sure that an offset is inside the right limits whether it is
207 // tagged or untagged.
208 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
ager@chromium.orgac091b72010-05-05 07:34:42 +0000209 // Compute the bit offset in the remembered set, leave it in 'scratch'.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000210 lea(scratch, Operand(object, offset));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000211 ASSERT(is_int32(Page::kPageAlignmentMask));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000212 and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
ager@chromium.orgac091b72010-05-05 07:34:42 +0000213 shr(scratch, Immediate(kPointerSizeLog2));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000214
215 // Compute the page address from the heap object pointer, leave it in
216 // 'object' (immediate value is sign extended).
217 and_(object, Immediate(~Page::kPageAlignmentMask));
218
219 // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
220 // to limit code size. We should probably evaluate this decision by
221 // measuring the performance of an equivalent implementation using
222 // "simpler" instructions
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000223 bts(Operand(object, Page::kRSetOffset), scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000224 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000225 Register dst = smi_index;
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000226 if (offset != 0) {
227 lea(dst, Operand(object, offset));
228 } else {
229 // array access: calculate the destination address in the same manner as
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000230 // KeyedStoreIC::GenerateGeneric.
231 SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
ager@chromium.orgac091b72010-05-05 07:34:42 +0000232 lea(dst, FieldOperand(object,
233 index.reg,
234 index.scale,
235 FixedArray::kHeaderSize));
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000236 }
237 // If we are already generating a shared stub, not inlining the
238 // record write code isn't going to save us any memory.
239 if (generating_stub()) {
ager@chromium.orgac091b72010-05-05 07:34:42 +0000240 RecordWriteHelper(object, dst, scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000241 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000242 RecordWriteStub stub(object, dst, scratch);
sgjesse@chromium.orgb9d7da12009-08-05 08:38:10 +0000243 CallStub(&stub);
244 }
245 }
246
247 bind(&done);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000248
249 // Clobber all input registers when running with the debug-code flag
250 // turned on to provoke errors.
251 if (FLAG_debug_code) {
vegorov@chromium.orgf8372902010-03-15 10:26:20 +0000252 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
253 movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
254 movq(smi_index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000255 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000256}
257
258
ager@chromium.orgac091b72010-05-05 07:34:42 +0000259void MacroAssembler::InNewSpace(Register object,
260 Register scratch,
261 Condition cc,
262 Label* branch) {
263 if (Serializer::enabled()) {
264 // Can't do arithmetic on external references if it might get serialized.
265 // The mask isn't really an address. We load it as an external reference in
266 // case the size of the new space is different between the snapshot maker
267 // and the running system.
268 if (scratch.is(object)) {
269 movq(kScratchRegister, ExternalReference::new_space_mask());
270 and_(scratch, kScratchRegister);
271 } else {
272 movq(scratch, ExternalReference::new_space_mask());
273 and_(scratch, object);
274 }
275 movq(kScratchRegister, ExternalReference::new_space_start());
276 cmpq(scratch, kScratchRegister);
277 j(cc, branch);
278 } else {
279 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
280 intptr_t new_space_start =
281 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
282 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
283 if (scratch.is(object)) {
284 addq(scratch, kScratchRegister);
285 } else {
286 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
287 }
288 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
289 j(cc, branch);
290 }
291}
292
293
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000294void MacroAssembler::Assert(Condition cc, const char* msg) {
295 if (FLAG_debug_code) Check(cc, msg);
296}
297
298
299void MacroAssembler::Check(Condition cc, const char* msg) {
300 Label L;
301 j(cc, &L);
302 Abort(msg);
303 // will not return here
304 bind(&L);
305}
306
307
ricow@chromium.orgc9c80822010-04-21 08:22:37 +0000308void MacroAssembler::CheckStackAlignment() {
309 int frame_alignment = OS::ActivationFrameAlignment();
310 int frame_alignment_mask = frame_alignment - 1;
311 if (frame_alignment > kPointerSize) {
312 ASSERT(IsPowerOf2(frame_alignment));
313 Label alignment_as_expected;
314 testq(rsp, Immediate(frame_alignment_mask));
315 j(zero, &alignment_as_expected);
316 // Abort if stack is not aligned.
317 int3();
318 bind(&alignment_as_expected);
319 }
320}
321
322
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000323void MacroAssembler::NegativeZeroTest(Register result,
324 Register op,
325 Label* then_label) {
326 Label ok;
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000327 testl(result, result);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000328 j(not_zero, &ok);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000329 testl(op, op);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000330 j(sign, then_label);
331 bind(&ok);
332}
333
334
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000335void MacroAssembler::Abort(const char* msg) {
336 // We want to pass the msg string like a smi to avoid GC
337 // problems, however msg is not guaranteed to be aligned
338 // properly. Instead, we pass an aligned pointer that is
339 // a proper v8 smi, but also pass the alignment difference
340 // from the real pointer as a smi.
341 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
342 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
343 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
344 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
345#ifdef DEBUG
346 if (msg != NULL) {
347 RecordComment("Abort message: ");
348 RecordComment(msg);
349 }
350#endif
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +0000351 // Disable stub call restrictions to always allow calls to abort.
352 set_allow_stub_calls(true);
353
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000354 push(rax);
355 movq(kScratchRegister, p0, RelocInfo::NONE);
356 push(kScratchRegister);
357 movq(kScratchRegister,
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000358 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000359 RelocInfo::NONE);
360 push(kScratchRegister);
361 CallRuntime(Runtime::kAbort, 2);
362 // will not return here
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +0000363 int3();
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000364}
365
366
367void MacroAssembler::CallStub(CodeStub* stub) {
368 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
sgjesse@chromium.org911335c2009-08-19 12:59:44 +0000369 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000370}
371
372
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +0000373void MacroAssembler::TailCallStub(CodeStub* stub) {
374 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
375 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
376}
377
378
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000379void MacroAssembler::StubReturn(int argc) {
380 ASSERT(argc >= 1 && generating_stub());
381 ret((argc - 1) * kPointerSize);
382}
383
384
385void MacroAssembler::IllegalOperation(int num_arguments) {
386 if (num_arguments > 0) {
387 addq(rsp, Immediate(num_arguments * kPointerSize));
388 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +0000389 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000390}
391
392
393void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
394 CallRuntime(Runtime::FunctionForId(id), num_arguments);
395}
396
397
398void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
399 // If the expected number of arguments of the runtime function is
400 // constant, we check that the actual number of arguments match the
401 // expectation.
402 if (f->nargs >= 0 && f->nargs != num_arguments) {
403 IllegalOperation(num_arguments);
404 return;
405 }
406
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000407 // TODO(1236192): Most runtime routines don't need the number of
408 // arguments passed in because it is constant. At some point we
409 // should remove this need and make the runtime routine entry code
410 // smarter.
411 movq(rax, Immediate(num_arguments));
412 movq(rbx, ExternalReference(f));
413 CEntryStub ces(f->result_size);
414 CallStub(&ces);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000415}
416
417
ager@chromium.org5c838252010-02-19 08:53:10 +0000418void MacroAssembler::CallExternalReference(const ExternalReference& ext,
419 int num_arguments) {
420 movq(rax, Immediate(num_arguments));
421 movq(rbx, ext);
422
423 CEntryStub stub(1);
424 CallStub(&stub);
425}
426
427
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000428void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
429 int num_arguments,
430 int result_size) {
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000431 // ----------- S t a t e -------------
432 // -- rsp[0] : return address
433 // -- rsp[8] : argument num_arguments - 1
434 // ...
435 // -- rsp[8 * num_arguments] : argument 0 (receiver)
436 // -----------------------------------
437
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000438 // TODO(1236192): Most runtime routines don't need the number of
439 // arguments passed in because it is constant. At some point we
440 // should remove this need and make the runtime routine entry code
441 // smarter.
442 movq(rax, Immediate(num_arguments));
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000443 JumpToExternalReference(ext, result_size);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000444}
445
446
ager@chromium.orgce5e87b2010-03-10 10:24:18 +0000447void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
448 int num_arguments,
449 int result_size) {
450 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
451}
452
453
454void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
455 int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000456 // Set the entry point and jump to the C entry runtime stub.
457 movq(rbx, ext);
ager@chromium.orga1645e22009-09-09 19:27:10 +0000458 CEntryStub ces(result_size);
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +0000459 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000460}
461
ager@chromium.orge2902be2009-06-08 12:21:35 +0000462
ager@chromium.org5c838252010-02-19 08:53:10 +0000463void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
464 // Calls are not allowed in some stubs.
465 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000466
ager@chromium.org5c838252010-02-19 08:53:10 +0000467 // Rely on the assertion to check that the number of provided
468 // arguments match the expected number of arguments. Fake a
469 // parameter count to avoid emitting code to do the check.
470 ParameterCount expected(0);
471 GetBuiltinEntry(rdx, id);
472 InvokeCode(rdx, expected, expected, flag);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000473}
474
ager@chromium.org5c838252010-02-19 08:53:10 +0000475
476void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
ricow@chromium.orgc9c80822010-04-21 08:22:37 +0000477 ASSERT(!target.is(rdi));
478
479 // Load the builtins object into target register.
480 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
481 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
482
ager@chromium.org5c838252010-02-19 08:53:10 +0000483 // Load the JavaScript builtin function from the builtins object.
ricow@chromium.orgc9c80822010-04-21 08:22:37 +0000484 movq(rdi, FieldOperand(target, JSBuiltinsObject::OffsetOfFunctionWithId(id)));
485
486 // Load the code entry point from the builtins object.
487 movq(target, FieldOperand(target, JSBuiltinsObject::OffsetOfCodeWithId(id)));
488 if (FLAG_debug_code) {
489 // Make sure the code objects in the builtins object and in the
490 // builtin function are the same.
491 push(target);
492 movq(target, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
493 movq(target, FieldOperand(target, SharedFunctionInfo::kCodeOffset));
494 cmpq(target, Operand(rsp, 0));
495 Assert(equal, "Builtin code object changed");
496 pop(target);
497 }
498 lea(target, FieldOperand(target, Code::kHeaderSize));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000499}
500
501
ager@chromium.orge2902be2009-06-08 12:21:35 +0000502void MacroAssembler::Set(Register dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000503 if (x == 0) {
504 xor_(dst, dst);
505 } else if (is_int32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000506 movq(dst, Immediate(static_cast<int32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000507 } else if (is_uint32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000508 movl(dst, Immediate(static_cast<uint32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000509 } else {
510 movq(dst, x, RelocInfo::NONE);
511 }
512}
513
514
515void MacroAssembler::Set(const Operand& dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000516 if (x == 0) {
517 xor_(kScratchRegister, kScratchRegister);
518 movq(dst, kScratchRegister);
519 } else if (is_int32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000520 movq(dst, Immediate(static_cast<int32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000521 } else if (is_uint32(x)) {
ager@chromium.orgc4c92722009-11-18 14:12:51 +0000522 movl(dst, Immediate(static_cast<uint32_t>(x)));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000523 } else {
524 movq(kScratchRegister, x, RelocInfo::NONE);
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000525 movq(dst, kScratchRegister);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000526 }
ager@chromium.orge2902be2009-06-08 12:21:35 +0000527}
528
ager@chromium.org4af710e2009-09-15 12:20:11 +0000529// ----------------------------------------------------------------------------
530// Smi tagging, untagging and tag detection.
531
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000532static int kSmiShift = kSmiTagSize + kSmiShiftSize;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000533
534void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000535 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000536 if (!dst.is(src)) {
537 movl(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000538 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000539 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000540}
541
542
543void MacroAssembler::Integer32ToSmi(Register dst,
544 Register src,
545 Label* on_overflow) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000546 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000547 // 32-bit integer always fits in a long smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +0000548 if (!dst.is(src)) {
549 movl(dst, src);
550 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000551 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000552}
553
554
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000555void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
556 Register src,
557 int constant) {
558 if (dst.is(src)) {
559 addq(dst, Immediate(constant));
560 } else {
561 lea(dst, Operand(src, constant));
562 }
563 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000564}
565
566
567void MacroAssembler::SmiToInteger32(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000568 ASSERT_EQ(0, kSmiTag);
569 if (!dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000570 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000571 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000572 shr(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000573}
574
575
576void MacroAssembler::SmiToInteger64(Register dst, Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000577 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000578 if (!dst.is(src)) {
579 movq(dst, src);
580 }
581 sar(dst, Immediate(kSmiShift));
582}
583
584
585void MacroAssembler::SmiTest(Register src) {
586 testq(src, src);
587}
588
589
590void MacroAssembler::SmiCompare(Register dst, Register src) {
591 cmpq(dst, src);
592}
593
594
595void MacroAssembler::SmiCompare(Register dst, Smi* src) {
596 ASSERT(!dst.is(kScratchRegister));
597 if (src->value() == 0) {
598 testq(dst, dst);
599 } else {
600 Move(kScratchRegister, src);
601 cmpq(dst, kScratchRegister);
602 }
603}
604
605
ager@chromium.orgac091b72010-05-05 07:34:42 +0000606void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
607 cmpq(dst, src);
608}
609
610
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000611void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
612 cmpq(dst, src);
613}
614
615
616void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
617 if (src->value() == 0) {
618 // Only tagged long smi to have 32-bit representation.
619 cmpq(dst, Immediate(0));
620 } else {
621 Move(kScratchRegister, src);
622 cmpq(dst, kScratchRegister);
623 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000624}
625
626
627void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
628 Register src,
629 int power) {
630 ASSERT(power >= 0);
631 ASSERT(power < 64);
632 if (power == 0) {
633 SmiToInteger64(dst, src);
634 return;
635 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000636 if (!dst.is(src)) {
637 movq(dst, src);
638 }
639 if (power < kSmiShift) {
640 sar(dst, Immediate(kSmiShift - power));
641 } else if (power > kSmiShift) {
642 shl(dst, Immediate(power - kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000643 }
644}
645
646
ager@chromium.org4af710e2009-09-15 12:20:11 +0000647Condition MacroAssembler::CheckSmi(Register src) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000648 ASSERT_EQ(0, kSmiTag);
649 testb(src, Immediate(kSmiTagMask));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000650 return zero;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000651}
652
653
654Condition MacroAssembler::CheckPositiveSmi(Register src) {
655 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000656 movq(kScratchRegister, src);
657 rol(kScratchRegister, Immediate(1));
658 testl(kScratchRegister, Immediate(0x03));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000659 return zero;
660}
661
662
ager@chromium.org4af710e2009-09-15 12:20:11 +0000663Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
664 if (first.is(second)) {
665 return CheckSmi(first);
666 }
667 movl(kScratchRegister, first);
668 orl(kScratchRegister, second);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000669 testb(kScratchRegister, Immediate(kSmiTagMask));
670 return zero;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000671}
672
673
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000674Condition MacroAssembler::CheckBothPositiveSmi(Register first,
675 Register second) {
676 if (first.is(second)) {
677 return CheckPositiveSmi(first);
678 }
679 movl(kScratchRegister, first);
680 orl(kScratchRegister, second);
681 rol(kScratchRegister, Immediate(1));
682 testl(kScratchRegister, Immediate(0x03));
683 return zero;
684}
685
686
687
688Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
689 if (first.is(second)) {
690 return CheckSmi(first);
691 }
692 movl(kScratchRegister, first);
693 andl(kScratchRegister, second);
694 testb(kScratchRegister, Immediate(kSmiTagMask));
695 return zero;
696}
697
698
ager@chromium.org4af710e2009-09-15 12:20:11 +0000699Condition MacroAssembler::CheckIsMinSmi(Register src) {
700 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000701 movq(kScratchRegister, src);
702 rol(kScratchRegister, Immediate(1));
703 cmpq(kScratchRegister, Immediate(1));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000704 return equal;
705}
706
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +0000707
ager@chromium.org4af710e2009-09-15 12:20:11 +0000708Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000709 // A 32-bit integer value can always be converted to a smi.
710 return always;
ager@chromium.org4af710e2009-09-15 12:20:11 +0000711}
712
713
ager@chromium.org3811b432009-10-28 14:53:37 +0000714Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
715 // An unsigned 32-bit integer value is valid as long as the high bit
716 // is not set.
717 testq(src, Immediate(0x80000000));
718 return zero;
719}
720
721
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000722void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
723 if (dst.is(src)) {
724 ASSERT(!dst.is(kScratchRegister));
725 movq(kScratchRegister, src);
726 neg(dst); // Low 32 bits are retained as zero by negation.
727 // Test if result is zero or Smi::kMinValue.
728 cmpq(dst, kScratchRegister);
729 j(not_equal, on_smi_result);
730 movq(src, kScratchRegister);
731 } else {
732 movq(dst, src);
733 neg(dst);
734 cmpq(dst, src);
735 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
736 j(not_equal, on_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000737 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000738}
739
740
741void MacroAssembler::SmiAdd(Register dst,
742 Register src1,
743 Register src2,
744 Label* on_not_smi_result) {
745 ASSERT(!dst.is(src2));
ager@chromium.orgac091b72010-05-05 07:34:42 +0000746 if (on_not_smi_result == NULL) {
747 // No overflow checking. Use only when it's known that
748 // overflowing is impossible.
749 if (dst.is(src1)) {
750 addq(dst, src2);
751 } else {
752 movq(dst, src1);
753 addq(dst, src2);
754 }
755 Assert(no_overflow, "Smi addition onverflow");
756 } else if (dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000757 addq(dst, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000758 Label smi_result;
759 j(no_overflow, &smi_result);
760 // Restore src1.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000761 subq(src1, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000762 jmp(on_not_smi_result);
763 bind(&smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000764 } else {
765 movq(dst, src1);
766 addq(dst, src2);
767 j(overflow, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000768 }
769}
770
771
ager@chromium.org4af710e2009-09-15 12:20:11 +0000772void MacroAssembler::SmiSub(Register dst,
773 Register src1,
774 Register src2,
775 Label* on_not_smi_result) {
776 ASSERT(!dst.is(src2));
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +0000777 if (on_not_smi_result == NULL) {
778 // No overflow checking. Use only when it's known that
779 // overflowing is impossible (e.g., subtracting two positive smis).
780 if (dst.is(src1)) {
781 subq(dst, src2);
782 } else {
783 movq(dst, src1);
784 subq(dst, src2);
785 }
786 Assert(no_overflow, "Smi substraction onverflow");
787 } else if (dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000788 subq(dst, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000789 Label smi_result;
790 j(no_overflow, &smi_result);
791 // Restore src1.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000792 addq(src1, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000793 jmp(on_not_smi_result);
794 bind(&smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000795 } else {
796 movq(dst, src1);
797 subq(dst, src2);
798 j(overflow, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000799 }
800}
801
802
ager@chromium.orgac091b72010-05-05 07:34:42 +0000803void MacroAssembler::SmiSub(Register dst,
804 Register src1,
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +0000805 const Operand& src2,
ager@chromium.orgac091b72010-05-05 07:34:42 +0000806 Label* on_not_smi_result) {
807 if (on_not_smi_result == NULL) {
808 // No overflow checking. Use only when it's known that
809 // overflowing is impossible (e.g., subtracting two positive smis).
810 if (dst.is(src1)) {
811 subq(dst, src2);
812 } else {
813 movq(dst, src1);
814 subq(dst, src2);
815 }
816 Assert(no_overflow, "Smi substraction onverflow");
817 } else if (dst.is(src1)) {
818 subq(dst, src2);
819 Label smi_result;
820 j(no_overflow, &smi_result);
821 // Restore src1.
822 addq(src1, src2);
823 jmp(on_not_smi_result);
824 bind(&smi_result);
825 } else {
826 movq(dst, src1);
827 subq(dst, src2);
828 j(overflow, on_not_smi_result);
829 }
830}
831
ager@chromium.org4af710e2009-09-15 12:20:11 +0000832void MacroAssembler::SmiMul(Register dst,
833 Register src1,
834 Register src2,
835 Label* on_not_smi_result) {
836 ASSERT(!dst.is(src2));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000837 ASSERT(!dst.is(kScratchRegister));
838 ASSERT(!src1.is(kScratchRegister));
839 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000840
841 if (dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000842 Label failure, zero_correct_result;
843 movq(kScratchRegister, src1); // Create backup for later testing.
844 SmiToInteger64(dst, src1);
845 imul(dst, src2);
846 j(overflow, &failure);
847
848 // Check for negative zero result. If product is zero, and one
849 // argument is negative, go to slow case.
850 Label correct_result;
851 testq(dst, dst);
852 j(not_zero, &correct_result);
853
854 movq(dst, kScratchRegister);
855 xor_(dst, src2);
856 j(positive, &zero_correct_result); // Result was positive zero.
857
858 bind(&failure); // Reused failure exit, restores src1.
859 movq(src1, kScratchRegister);
860 jmp(on_not_smi_result);
861
862 bind(&zero_correct_result);
863 xor_(dst, dst);
864
865 bind(&correct_result);
866 } else {
867 SmiToInteger64(dst, src1);
868 imul(dst, src2);
869 j(overflow, on_not_smi_result);
870 // Check for negative zero result. If product is zero, and one
871 // argument is negative, go to slow case.
872 Label correct_result;
873 testq(dst, dst);
874 j(not_zero, &correct_result);
875 // One of src1 and src2 is zero, the check whether the other is
876 // negative.
ager@chromium.org4af710e2009-09-15 12:20:11 +0000877 movq(kScratchRegister, src1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000878 xor_(kScratchRegister, src2);
879 j(negative, on_not_smi_result);
880 bind(&correct_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000881 }
ager@chromium.org4af710e2009-09-15 12:20:11 +0000882}
883
884
885void MacroAssembler::SmiTryAddConstant(Register dst,
886 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000887 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000888 Label* on_not_smi_result) {
889 // Does not assume that src is a smi.
ager@chromium.org3811b432009-10-28 14:53:37 +0000890 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000891 ASSERT_EQ(0, kSmiTag);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000892 ASSERT(!dst.is(kScratchRegister));
893 ASSERT(!src.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +0000894
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000895 JumpIfNotSmi(src, on_not_smi_result);
896 Register tmp = (dst.is(src) ? kScratchRegister : dst);
897 Move(tmp, constant);
898 addq(tmp, src);
899 j(overflow, on_not_smi_result);
900 if (dst.is(src)) {
901 movq(dst, tmp);
902 }
903}
904
905
906void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
907 if (constant->value() == 0) {
908 if (!dst.is(src)) {
909 movq(dst, src);
910 }
911 } else if (dst.is(src)) {
912 ASSERT(!dst.is(kScratchRegister));
913
914 Move(kScratchRegister, constant);
915 addq(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000916 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000917 Move(dst, constant);
918 addq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000919 }
920}
921
922
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +0000923void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
924 if (constant->value() != 0) {
925 Move(kScratchRegister, constant);
926 addq(dst, kScratchRegister);
927 }
928}
929
930
ager@chromium.org4af710e2009-09-15 12:20:11 +0000931void MacroAssembler::SmiAddConstant(Register dst,
932 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000933 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000934 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000935 if (constant->value() == 0) {
936 if (!dst.is(src)) {
937 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000938 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000939 } else if (dst.is(src)) {
940 ASSERT(!dst.is(kScratchRegister));
941
942 Move(kScratchRegister, constant);
943 addq(dst, kScratchRegister);
944 Label result_ok;
945 j(no_overflow, &result_ok);
946 subq(dst, kScratchRegister);
947 jmp(on_not_smi_result);
948 bind(&result_ok);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000949 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000950 Move(dst, constant);
951 addq(dst, src);
952 j(overflow, on_not_smi_result);
953 }
954}
955
956
957void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
958 if (constant->value() == 0) {
ager@chromium.org4af710e2009-09-15 12:20:11 +0000959 if (!dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000960 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000961 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000962 } else if (dst.is(src)) {
963 ASSERT(!dst.is(kScratchRegister));
964
965 Move(kScratchRegister, constant);
966 subq(dst, kScratchRegister);
967 } else {
968 // Subtract by adding the negative, to do it in two operations.
969 if (constant->value() == Smi::kMinValue) {
970 Move(kScratchRegister, constant);
971 movq(dst, src);
972 subq(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000973 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000974 Move(dst, Smi::FromInt(-constant->value()));
975 addq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000976 }
977 }
978}
979
980
981void MacroAssembler::SmiSubConstant(Register dst,
982 Register src,
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000983 Smi* constant,
ager@chromium.org4af710e2009-09-15 12:20:11 +0000984 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000985 if (constant->value() == 0) {
986 if (!dst.is(src)) {
987 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000988 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +0000989 } else if (dst.is(src)) {
990 ASSERT(!dst.is(kScratchRegister));
991
992 Move(kScratchRegister, constant);
993 subq(dst, kScratchRegister);
994 Label sub_success;
995 j(no_overflow, &sub_success);
996 addq(src, kScratchRegister);
997 jmp(on_not_smi_result);
998 bind(&sub_success);
ager@chromium.org4af710e2009-09-15 12:20:11 +0000999 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001000 if (constant->value() == Smi::kMinValue) {
1001 Move(kScratchRegister, constant);
1002 movq(dst, src);
1003 subq(dst, kScratchRegister);
1004 j(overflow, on_not_smi_result);
1005 } else {
1006 Move(dst, Smi::FromInt(-(constant->value())));
1007 addq(dst, src);
1008 j(overflow, on_not_smi_result);
1009 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001010 }
1011}
1012
1013
1014void MacroAssembler::SmiDiv(Register dst,
1015 Register src1,
1016 Register src2,
1017 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001018 ASSERT(!src1.is(kScratchRegister));
1019 ASSERT(!src2.is(kScratchRegister));
1020 ASSERT(!dst.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001021 ASSERT(!src2.is(rax));
1022 ASSERT(!src2.is(rdx));
1023 ASSERT(!src1.is(rdx));
1024
1025 // Check for 0 divisor (result is +/-Infinity).
1026 Label positive_divisor;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001027 testq(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001028 j(zero, on_not_smi_result);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001029
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001030 if (src1.is(rax)) {
1031 movq(kScratchRegister, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001032 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001033 SmiToInteger32(rax, src1);
1034 // We need to rule out dividing Smi::kMinValue by -1, since that would
1035 // overflow in idiv and raise an exception.
1036 // We combine this with negative zero test (negative zero only happens
1037 // when dividing zero by a negative number).
ager@chromium.org4af710e2009-09-15 12:20:11 +00001038
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001039 // We overshoot a little and go to slow case if we divide min-value
1040 // by any negative value, not just -1.
1041 Label safe_div;
1042 testl(rax, Immediate(0x7fffffff));
1043 j(not_zero, &safe_div);
1044 testq(src2, src2);
1045 if (src1.is(rax)) {
1046 j(positive, &safe_div);
1047 movq(src1, kScratchRegister);
1048 jmp(on_not_smi_result);
1049 } else {
1050 j(negative, on_not_smi_result);
1051 }
1052 bind(&safe_div);
1053
1054 SmiToInteger32(src2, src2);
1055 // Sign extend src1 into edx:eax.
1056 cdq();
ager@chromium.org4af710e2009-09-15 12:20:11 +00001057 idivl(src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001058 Integer32ToSmi(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001059 // Check that the remainder is zero.
1060 testl(rdx, rdx);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001061 if (src1.is(rax)) {
1062 Label smi_result;
1063 j(zero, &smi_result);
1064 movq(src1, kScratchRegister);
1065 jmp(on_not_smi_result);
1066 bind(&smi_result);
1067 } else {
1068 j(not_zero, on_not_smi_result);
1069 }
1070 if (!dst.is(src1) && src1.is(rax)) {
1071 movq(src1, kScratchRegister);
1072 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001073 Integer32ToSmi(dst, rax);
1074}
1075
1076
1077void MacroAssembler::SmiMod(Register dst,
1078 Register src1,
1079 Register src2,
1080 Label* on_not_smi_result) {
1081 ASSERT(!dst.is(kScratchRegister));
1082 ASSERT(!src1.is(kScratchRegister));
1083 ASSERT(!src2.is(kScratchRegister));
1084 ASSERT(!src2.is(rax));
1085 ASSERT(!src2.is(rdx));
1086 ASSERT(!src1.is(rdx));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001087 ASSERT(!src1.is(src2));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001088
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001089 testq(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001090 j(zero, on_not_smi_result);
1091
1092 if (src1.is(rax)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001093 movq(kScratchRegister, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001094 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001095 SmiToInteger32(rax, src1);
1096 SmiToInteger32(src2, src2);
1097
1098 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1099 Label safe_div;
1100 cmpl(rax, Immediate(Smi::kMinValue));
1101 j(not_equal, &safe_div);
1102 cmpl(src2, Immediate(-1));
1103 j(not_equal, &safe_div);
1104 // Retag inputs and go slow case.
1105 Integer32ToSmi(src2, src2);
1106 if (src1.is(rax)) {
1107 movq(src1, kScratchRegister);
1108 }
1109 jmp(on_not_smi_result);
1110 bind(&safe_div);
1111
ager@chromium.org4af710e2009-09-15 12:20:11 +00001112 // Sign extend eax into edx:eax.
1113 cdq();
1114 idivl(src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001115 // Restore smi tags on inputs.
1116 Integer32ToSmi(src2, src2);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001117 if (src1.is(rax)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001118 movq(src1, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001119 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001120 // Check for a negative zero result. If the result is zero, and the
1121 // dividend is negative, go slow to return a floating point negative zero.
1122 Label smi_result;
1123 testl(rdx, rdx);
1124 j(not_zero, &smi_result);
1125 testq(src1, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001126 j(negative, on_not_smi_result);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001127 bind(&smi_result);
1128 Integer32ToSmi(dst, rdx);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001129}
1130
1131
1132void MacroAssembler::SmiNot(Register dst, Register src) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001133 ASSERT(!dst.is(kScratchRegister));
1134 ASSERT(!src.is(kScratchRegister));
1135 // Set tag and padding bits before negating, so that they are zero afterwards.
1136 movl(kScratchRegister, Immediate(~0));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001137 if (dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001138 xor_(dst, kScratchRegister);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001139 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001140 lea(dst, Operand(src, kScratchRegister, times_1, 0));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001141 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001142 not_(dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001143}
1144
1145
1146void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001147 ASSERT(!dst.is(src2));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001148 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001149 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001150 }
1151 and_(dst, src2);
1152}
1153
1154
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001155void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1156 if (constant->value() == 0) {
1157 xor_(dst, dst);
1158 } else if (dst.is(src)) {
1159 ASSERT(!dst.is(kScratchRegister));
1160 Move(kScratchRegister, constant);
1161 and_(dst, kScratchRegister);
1162 } else {
1163 Move(dst, constant);
1164 and_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001165 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001166}
1167
1168
1169void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1170 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001171 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001172 }
1173 or_(dst, src2);
1174}
1175
1176
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001177void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1178 if (dst.is(src)) {
1179 ASSERT(!dst.is(kScratchRegister));
1180 Move(kScratchRegister, constant);
1181 or_(dst, kScratchRegister);
1182 } else {
1183 Move(dst, constant);
1184 or_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001185 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001186}
1187
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001188
ager@chromium.org4af710e2009-09-15 12:20:11 +00001189void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1190 if (!dst.is(src1)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001191 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001192 }
1193 xor_(dst, src2);
1194}
1195
1196
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001197void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1198 if (dst.is(src)) {
1199 ASSERT(!dst.is(kScratchRegister));
1200 Move(kScratchRegister, constant);
1201 xor_(dst, kScratchRegister);
1202 } else {
1203 Move(dst, constant);
1204 xor_(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001205 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001206}
1207
1208
ager@chromium.org4af710e2009-09-15 12:20:11 +00001209void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1210 Register src,
1211 int shift_value) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001212 ASSERT(is_uint5(shift_value));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001213 if (shift_value > 0) {
1214 if (dst.is(src)) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001215 sar(dst, Immediate(shift_value + kSmiShift));
1216 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001217 } else {
1218 UNIMPLEMENTED(); // Not used.
1219 }
1220 }
1221}
1222
1223
1224void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1225 Register src,
1226 int shift_value,
1227 Label* on_not_smi_result) {
1228 // Logic right shift interprets its result as an *unsigned* number.
1229 if (dst.is(src)) {
1230 UNIMPLEMENTED(); // Not used.
1231 } else {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001232 movq(dst, src);
1233 if (shift_value == 0) {
1234 testq(dst, dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001235 j(negative, on_not_smi_result);
1236 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001237 shr(dst, Immediate(shift_value + kSmiShift));
1238 shl(dst, Immediate(kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001239 }
1240}
1241
1242
1243void MacroAssembler::SmiShiftLeftConstant(Register dst,
1244 Register src,
sgjesse@chromium.org720dc0b2010-05-10 09:25:39 +00001245 int shift_value) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001246 if (!dst.is(src)) {
1247 movq(dst, src);
1248 }
1249 if (shift_value > 0) {
1250 shl(dst, Immediate(shift_value));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001251 }
1252}
1253
1254
1255void MacroAssembler::SmiShiftLeft(Register dst,
1256 Register src1,
sgjesse@chromium.org720dc0b2010-05-10 09:25:39 +00001257 Register src2) {
ager@chromium.org4af710e2009-09-15 12:20:11 +00001258 ASSERT(!dst.is(rcx));
1259 Label result_ok;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001260 // Untag shift amount.
1261 if (!dst.is(src1)) {
1262 movq(dst, src1);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001263 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001264 SmiToInteger32(rcx, src2);
1265 // Shift amount specified by lower 5 bits, not six as the shl opcode.
1266 and_(rcx, Immediate(0x1f));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001267 shl_cl(dst);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001268}
1269
1270
1271void MacroAssembler::SmiShiftLogicalRight(Register dst,
1272 Register src1,
1273 Register src2,
1274 Label* on_not_smi_result) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001275 ASSERT(!dst.is(kScratchRegister));
1276 ASSERT(!src1.is(kScratchRegister));
1277 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001278 ASSERT(!dst.is(rcx));
1279 Label result_ok;
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001280 if (src1.is(rcx) || src2.is(rcx)) {
1281 movq(kScratchRegister, rcx);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001282 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001283 if (!dst.is(src1)) {
1284 movq(dst, src1);
1285 }
1286 SmiToInteger32(rcx, src2);
1287 orl(rcx, Immediate(kSmiShift));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001288 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001289 shl(dst, Immediate(kSmiShift));
1290 testq(dst, dst);
1291 if (src1.is(rcx) || src2.is(rcx)) {
1292 Label positive_result;
1293 j(positive, &positive_result);
1294 if (src1.is(rcx)) {
1295 movq(src1, kScratchRegister);
1296 } else {
1297 movq(src2, kScratchRegister);
1298 }
1299 jmp(on_not_smi_result);
1300 bind(&positive_result);
1301 } else {
1302 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1303 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001304}
1305
1306
1307void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1308 Register src1,
1309 Register src2) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001310 ASSERT(!dst.is(kScratchRegister));
1311 ASSERT(!src1.is(kScratchRegister));
1312 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001313 ASSERT(!dst.is(rcx));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001314 if (src1.is(rcx)) {
1315 movq(kScratchRegister, src1);
1316 } else if (src2.is(rcx)) {
1317 movq(kScratchRegister, src2);
1318 }
1319 if (!dst.is(src1)) {
1320 movq(dst, src1);
1321 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001322 SmiToInteger32(rcx, src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001323 orl(rcx, Immediate(kSmiShift));
ager@chromium.orgc4c92722009-11-18 14:12:51 +00001324 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001325 shl(dst, Immediate(kSmiShift));
1326 if (src1.is(rcx)) {
1327 movq(src1, kScratchRegister);
1328 } else if (src2.is(rcx)) {
1329 movq(src2, kScratchRegister);
1330 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001331}
1332
1333
1334void MacroAssembler::SelectNonSmi(Register dst,
1335 Register src1,
1336 Register src2,
1337 Label* on_not_smis) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001338 ASSERT(!dst.is(kScratchRegister));
1339 ASSERT(!src1.is(kScratchRegister));
1340 ASSERT(!src2.is(kScratchRegister));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001341 ASSERT(!dst.is(src1));
1342 ASSERT(!dst.is(src2));
1343 // Both operands must not be smis.
1344#ifdef DEBUG
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001345 if (allow_stub_calls()) { // Check contains a stub call.
1346 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1347 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1348 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001349#endif
1350 ASSERT_EQ(0, kSmiTag);
1351 ASSERT_EQ(0, Smi::FromInt(0));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001352 movl(kScratchRegister, Immediate(kSmiTagMask));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001353 and_(kScratchRegister, src1);
1354 testl(kScratchRegister, src2);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001355 // If non-zero then both are smis.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001356 j(not_zero, on_not_smis);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001357
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001358 // Exactly one operand is a smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001359 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1360 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1361 subq(kScratchRegister, Immediate(1));
1362 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1363 movq(dst, src1);
1364 xor_(dst, src2);
1365 and_(dst, kScratchRegister);
1366 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1367 xor_(dst, src1);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001368 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
ager@chromium.org4af710e2009-09-15 12:20:11 +00001369}
1370
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001371SmiIndex MacroAssembler::SmiToIndex(Register dst,
1372 Register src,
1373 int shift) {
ager@chromium.org4af710e2009-09-15 12:20:11 +00001374 ASSERT(is_uint6(shift));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001375 // There is a possible optimization if shift is in the range 60-63, but that
1376 // will (and must) never happen.
1377 if (!dst.is(src)) {
1378 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001379 }
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001380 if (shift < kSmiShift) {
1381 sar(dst, Immediate(kSmiShift - shift));
1382 } else {
1383 shl(dst, Immediate(shift - kSmiShift));
ager@chromium.org4af710e2009-09-15 12:20:11 +00001384 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001385 return SmiIndex(dst, times_1);
1386}
1387
ager@chromium.org4af710e2009-09-15 12:20:11 +00001388SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1389 Register src,
1390 int shift) {
1391 // Register src holds a positive smi.
1392 ASSERT(is_uint6(shift));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001393 if (!dst.is(src)) {
1394 movq(dst, src);
ager@chromium.org4af710e2009-09-15 12:20:11 +00001395 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001396 neg(dst);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001397 if (shift < kSmiShift) {
1398 sar(dst, Immediate(kSmiShift - shift));
1399 } else {
1400 shl(dst, Immediate(shift - kSmiShift));
1401 }
ager@chromium.org4af710e2009-09-15 12:20:11 +00001402 return SmiIndex(dst, times_1);
1403}
1404
1405
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001406void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1407 ASSERT_EQ(0, kSmiTag);
1408 Condition smi = CheckSmi(src);
1409 j(smi, on_smi);
1410}
1411
1412
1413void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1414 Condition smi = CheckSmi(src);
1415 j(NegateCondition(smi), on_not_smi);
1416}
1417
1418
1419void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1420 Label* on_not_positive_smi) {
1421 Condition positive_smi = CheckPositiveSmi(src);
1422 j(NegateCondition(positive_smi), on_not_positive_smi);
1423}
1424
1425
1426void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1427 Smi* constant,
1428 Label* on_equals) {
1429 SmiCompare(src, constant);
1430 j(equal, on_equals);
1431}
1432
1433
1434void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1435 Condition is_valid = CheckInteger32ValidSmiValue(src);
1436 j(NegateCondition(is_valid), on_invalid);
1437}
1438
1439
ager@chromium.org3811b432009-10-28 14:53:37 +00001440void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1441 Label* on_invalid) {
1442 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1443 j(NegateCondition(is_valid), on_invalid);
1444}
1445
1446
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001447void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1448 Label* on_not_both_smi) {
1449 Condition both_smi = CheckBothSmi(src1, src2);
1450 j(NegateCondition(both_smi), on_not_both_smi);
1451}
ager@chromium.org4af710e2009-09-15 12:20:11 +00001452
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001453
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00001454void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2,
1455 Label* on_not_both_smi) {
1456 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1457 j(NegateCondition(both_smi), on_not_both_smi);
1458}
1459
1460
1461
1462void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1463 Register second_object,
1464 Register scratch1,
1465 Register scratch2,
1466 Label* on_fail) {
1467 // Check that both objects are not smis.
1468 Condition either_smi = CheckEitherSmi(first_object, second_object);
1469 j(either_smi, on_fail);
1470
1471 // Load instance type for both strings.
1472 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1473 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1474 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1475 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1476
1477 // Check that both are flat ascii strings.
1478 ASSERT(kNotStringTag != 0);
1479 const int kFlatAsciiStringMask =
1480 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1481 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1482
1483 andl(scratch1, Immediate(kFlatAsciiStringMask));
1484 andl(scratch2, Immediate(kFlatAsciiStringMask));
1485 // Interleave the bits to check both scratch1 and scratch2 in one test.
1486 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1487 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1488 cmpl(scratch1,
1489 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1490 j(not_equal, on_fail);
1491}
1492
1493
ager@chromium.orgce5e87b2010-03-10 10:24:18 +00001494void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1495 Register instance_type,
1496 Register scratch,
1497 Label *failure) {
1498 if (!scratch.is(instance_type)) {
1499 movl(scratch, instance_type);
1500 }
1501
1502 const int kFlatAsciiStringMask =
1503 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1504
1505 andl(scratch, Immediate(kFlatAsciiStringMask));
1506 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1507 j(not_equal, failure);
1508}
1509
1510
1511void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1512 Register first_object_instance_type,
1513 Register second_object_instance_type,
1514 Register scratch1,
1515 Register scratch2,
1516 Label* on_fail) {
1517 // Load instance type for both strings.
1518 movq(scratch1, first_object_instance_type);
1519 movq(scratch2, second_object_instance_type);
1520
1521 // Check that both are flat ascii strings.
1522 ASSERT(kNotStringTag != 0);
1523 const int kFlatAsciiStringMask =
1524 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1525 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1526
1527 andl(scratch1, Immediate(kFlatAsciiStringMask));
1528 andl(scratch2, Immediate(kFlatAsciiStringMask));
1529 // Interleave the bits to check both scratch1 and scratch2 in one test.
1530 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1531 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1532 cmpl(scratch1,
1533 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1534 j(not_equal, on_fail);
1535}
1536
1537
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001538void MacroAssembler::Move(Register dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001539 ASSERT(!source->IsFailure());
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001540 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001541 Move(dst, Smi::cast(*source));
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001542 } else {
1543 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1544 }
1545}
1546
1547
1548void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001549 ASSERT(!source->IsFailure());
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001550 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001551 Move(dst, Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001552 } else {
1553 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1554 movq(dst, kScratchRegister);
1555 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001556}
1557
1558
1559void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001560 if (source->IsSmi()) {
1561 SmiCompare(dst, Smi::cast(*source));
1562 } else {
1563 Move(kScratchRegister, source);
1564 cmpq(dst, kScratchRegister);
1565 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001566}
1567
1568
ager@chromium.org3e875802009-06-29 08:26:34 +00001569void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001570 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001571 SmiCompare(dst, Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001572 } else {
1573 ASSERT(source->IsHeapObject());
1574 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1575 cmpq(dst, kScratchRegister);
1576 }
ager@chromium.org3e875802009-06-29 08:26:34 +00001577}
1578
1579
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001580void MacroAssembler::Push(Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001581 if (source->IsSmi()) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001582 Push(Smi::cast(*source));
kasperl@chromium.org68ac0092009-07-09 06:00:35 +00001583 } else {
1584 ASSERT(source->IsHeapObject());
1585 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1586 push(kScratchRegister);
1587 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001588}
1589
1590
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001591void MacroAssembler::Push(Smi* source) {
ager@chromium.org3811b432009-10-28 14:53:37 +00001592 intptr_t smi = reinterpret_cast<intptr_t>(source);
1593 if (is_int32(smi)) {
1594 push(Immediate(static_cast<int32_t>(smi)));
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001595 } else {
ager@chromium.org3811b432009-10-28 14:53:37 +00001596 Set(kScratchRegister, smi);
1597 push(kScratchRegister);
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001598 }
1599}
1600
1601
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00001602void MacroAssembler::Drop(int stack_elements) {
1603 if (stack_elements > 0) {
1604 addq(rsp, Immediate(stack_elements * kPointerSize));
1605 }
1606}
1607
1608
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001609void MacroAssembler::Test(const Operand& src, Smi* source) {
ager@chromium.org3811b432009-10-28 14:53:37 +00001610 intptr_t smi = reinterpret_cast<intptr_t>(source);
1611 if (is_int32(smi)) {
1612 testl(src, Immediate(static_cast<int32_t>(smi)));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001613 } else {
ager@chromium.org3811b432009-10-28 14:53:37 +00001614 Move(kScratchRegister, source);
1615 testq(src, kScratchRegister);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +00001616 }
1617}
1618
1619
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001620void MacroAssembler::Jump(ExternalReference ext) {
1621 movq(kScratchRegister, ext);
1622 jmp(kScratchRegister);
1623}
1624
1625
1626void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1627 movq(kScratchRegister, destination, rmode);
1628 jmp(kScratchRegister);
1629}
1630
1631
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001632void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00001633 // TODO(X64): Inline this
1634 jmp(code_object, rmode);
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001635}
1636
1637
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001638void MacroAssembler::Call(ExternalReference ext) {
1639 movq(kScratchRegister, ext);
1640 call(kScratchRegister);
1641}
1642
1643
1644void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1645 movq(kScratchRegister, destination, rmode);
1646 call(kScratchRegister);
1647}
1648
1649
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001650void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001651 ASSERT(RelocInfo::IsCodeTarget(rmode));
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00001652 WriteRecordedPositions();
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00001653 call(code_object, rmode);
ager@chromium.org5aa501c2009-06-23 07:57:28 +00001654}
1655
1656
ager@chromium.orge2902be2009-06-08 12:21:35 +00001657void MacroAssembler::PushTryHandler(CodeLocation try_location,
1658 HandlerType type) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001659 // Adjust this code if not the case.
1660 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1661
1662 // The pc (return address) is already on TOS. This code pushes state,
1663 // frame pointer and current handler. Check that they are expected
1664 // next on the stack, in that order.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001665 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1666 StackHandlerConstants::kPCOffset - kPointerSize);
ager@chromium.orge2902be2009-06-08 12:21:35 +00001667 ASSERT_EQ(StackHandlerConstants::kFPOffset,
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001668 StackHandlerConstants::kStateOffset - kPointerSize);
1669 ASSERT_EQ(StackHandlerConstants::kNextOffset,
ager@chromium.orge2902be2009-06-08 12:21:35 +00001670 StackHandlerConstants::kFPOffset - kPointerSize);
1671
1672 if (try_location == IN_JAVASCRIPT) {
1673 if (type == TRY_CATCH_HANDLER) {
1674 push(Immediate(StackHandler::TRY_CATCH));
1675 } else {
1676 push(Immediate(StackHandler::TRY_FINALLY));
1677 }
ager@chromium.orge2902be2009-06-08 12:21:35 +00001678 push(rbp);
ager@chromium.orge2902be2009-06-08 12:21:35 +00001679 } else {
1680 ASSERT(try_location == IN_JS_ENTRY);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001681 // The frame pointer does not point to a JS frame so we save NULL
1682 // for rbp. We expect the code throwing an exception to check rbp
1683 // before dereferencing it to restore the context.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001684 push(Immediate(StackHandler::ENTRY));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001685 push(Immediate(0)); // NULL frame pointer.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001686 }
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001687 // Save the current handler.
ager@chromium.orge2902be2009-06-08 12:21:35 +00001688 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001689 push(Operand(kScratchRegister, 0));
ager@chromium.orge2902be2009-06-08 12:21:35 +00001690 // Link this handler.
1691 movq(Operand(kScratchRegister, 0), rsp);
1692}
1693
1694
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00001695void MacroAssembler::PopTryHandler() {
1696 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1697 // Unlink this handler.
1698 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1699 pop(Operand(kScratchRegister, 0));
1700 // Remove the remaining fields.
1701 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1702}
1703
1704
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001705void MacroAssembler::Ret() {
1706 ret(0);
1707}
1708
1709
ager@chromium.org3e875802009-06-29 08:26:34 +00001710void MacroAssembler::FCmp() {
ager@chromium.org3811b432009-10-28 14:53:37 +00001711 fucomip();
1712 ffree(0);
1713 fincstp();
ager@chromium.org3e875802009-06-29 08:26:34 +00001714}
1715
1716
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001717void MacroAssembler::CmpObjectType(Register heap_object,
1718 InstanceType type,
1719 Register map) {
1720 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1721 CmpInstanceType(map, type);
1722}
1723
1724
1725void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1726 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1727 Immediate(static_cast<int8_t>(type)));
1728}
1729
1730
ager@chromium.org5c838252010-02-19 08:53:10 +00001731void MacroAssembler::CheckMap(Register obj,
1732 Handle<Map> map,
1733 Label* fail,
1734 bool is_heap_object) {
1735 if (!is_heap_object) {
1736 JumpIfSmi(obj, fail);
1737 }
1738 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1739 j(not_equal, fail);
1740}
1741
1742
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +00001743void MacroAssembler::AbortIfNotNumber(Register object) {
ager@chromium.org5c838252010-02-19 08:53:10 +00001744 Label ok;
1745 Condition is_smi = CheckSmi(object);
1746 j(is_smi, &ok);
1747 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1748 Factory::heap_number_map());
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +00001749 Assert(equal, "Operand not a number");
ager@chromium.org5c838252010-02-19 08:53:10 +00001750 bind(&ok);
1751}
1752
1753
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +00001754void MacroAssembler::AbortIfNotSmi(Register object) {
lrn@chromium.org25156de2010-04-06 13:10:27 +00001755 Label ok;
1756 Condition is_smi = CheckSmi(object);
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +00001757 Assert(is_smi, "Operand not a smi");
lrn@chromium.org25156de2010-04-06 13:10:27 +00001758}
1759
1760
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00001761Condition MacroAssembler::IsObjectStringType(Register heap_object,
1762 Register map,
1763 Register instance_type) {
1764 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1765 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1766 ASSERT(kNotStringTag != 0);
1767 testb(instance_type, Immediate(kIsNotStringMask));
1768 return zero;
1769}
1770
1771
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00001772void MacroAssembler::TryGetFunctionPrototype(Register function,
1773 Register result,
1774 Label* miss) {
1775 // Check that the receiver isn't a smi.
1776 testl(function, Immediate(kSmiTagMask));
1777 j(zero, miss);
1778
1779 // Check that the function really is a function.
1780 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1781 j(not_equal, miss);
1782
1783 // Make sure that the function has an instance prototype.
1784 Label non_instance;
1785 testb(FieldOperand(result, Map::kBitFieldOffset),
1786 Immediate(1 << Map::kHasNonInstancePrototype));
1787 j(not_zero, &non_instance);
1788
1789 // Get the prototype or initial map from the function.
1790 movq(result,
1791 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1792
1793 // If the prototype or initial map is the hole, don't return it and
1794 // simply miss the cache instead. This will allow us to allocate a
1795 // prototype object on-demand in the runtime system.
ager@chromium.org18ad94b2009-09-02 08:22:29 +00001796 CompareRoot(result, Heap::kTheHoleValueRootIndex);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00001797 j(equal, miss);
1798
1799 // If the function does not have an initial map, we're done.
1800 Label done;
1801 CmpObjectType(result, MAP_TYPE, kScratchRegister);
1802 j(not_equal, &done);
1803
1804 // Get the prototype from the initial map.
1805 movq(result, FieldOperand(result, Map::kPrototypeOffset));
1806 jmp(&done);
1807
1808 // Non-instance prototype: Fetch prototype from constructor field
1809 // in initial map.
1810 bind(&non_instance);
1811 movq(result, FieldOperand(result, Map::kConstructorOffset));
1812
1813 // All done.
1814 bind(&done);
1815}
1816
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001817
1818void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1819 if (FLAG_native_code_counters && counter->Enabled()) {
1820 movq(kScratchRegister, ExternalReference(counter));
1821 movl(Operand(kScratchRegister, 0), Immediate(value));
1822 }
1823}
1824
1825
1826void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1827 ASSERT(value > 0);
1828 if (FLAG_native_code_counters && counter->Enabled()) {
1829 movq(kScratchRegister, ExternalReference(counter));
1830 Operand operand(kScratchRegister, 0);
1831 if (value == 1) {
1832 incl(operand);
1833 } else {
1834 addl(operand, Immediate(value));
1835 }
1836 }
1837}
1838
1839
1840void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1841 ASSERT(value > 0);
1842 if (FLAG_native_code_counters && counter->Enabled()) {
1843 movq(kScratchRegister, ExternalReference(counter));
1844 Operand operand(kScratchRegister, 0);
1845 if (value == 1) {
1846 decl(operand);
1847 } else {
1848 subl(operand, Immediate(value));
1849 }
1850 }
1851}
1852
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001853#ifdef ENABLE_DEBUGGER_SUPPORT
1854
1855void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1856 ASSERT((regs & ~kJSCallerSaved) == 0);
1857 // Push the content of the memory location to the stack.
1858 for (int i = 0; i < kNumJSCallerSaved; i++) {
1859 int r = JSCallerSavedCode(i);
1860 if ((regs & (1 << r)) != 0) {
1861 ExternalReference reg_addr =
1862 ExternalReference(Debug_Address::Register(i));
1863 movq(kScratchRegister, reg_addr);
1864 push(Operand(kScratchRegister, 0));
1865 }
1866 }
1867}
1868
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001869
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001870void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1871 ASSERT((regs & ~kJSCallerSaved) == 0);
1872 // Copy the content of registers to memory location.
1873 for (int i = 0; i < kNumJSCallerSaved; i++) {
1874 int r = JSCallerSavedCode(i);
1875 if ((regs & (1 << r)) != 0) {
1876 Register reg = { r };
1877 ExternalReference reg_addr =
1878 ExternalReference(Debug_Address::Register(i));
1879 movq(kScratchRegister, reg_addr);
1880 movq(Operand(kScratchRegister, 0), reg);
1881 }
1882 }
1883}
1884
1885
1886void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1887 ASSERT((regs & ~kJSCallerSaved) == 0);
1888 // Copy the content of memory location to registers.
1889 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1890 int r = JSCallerSavedCode(i);
1891 if ((regs & (1 << r)) != 0) {
1892 Register reg = { r };
1893 ExternalReference reg_addr =
1894 ExternalReference(Debug_Address::Register(i));
1895 movq(kScratchRegister, reg_addr);
1896 movq(reg, Operand(kScratchRegister, 0));
1897 }
1898 }
1899}
1900
1901
1902void MacroAssembler::PopRegistersToMemory(RegList regs) {
1903 ASSERT((regs & ~kJSCallerSaved) == 0);
1904 // Pop the content from the stack to the memory location.
1905 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1906 int r = JSCallerSavedCode(i);
1907 if ((regs & (1 << r)) != 0) {
1908 ExternalReference reg_addr =
1909 ExternalReference(Debug_Address::Register(i));
1910 movq(kScratchRegister, reg_addr);
1911 pop(Operand(kScratchRegister, 0));
1912 }
1913 }
1914}
1915
1916
1917void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1918 Register scratch,
1919 RegList regs) {
1920 ASSERT(!scratch.is(kScratchRegister));
1921 ASSERT(!base.is(kScratchRegister));
1922 ASSERT(!base.is(scratch));
1923 ASSERT((regs & ~kJSCallerSaved) == 0);
1924 // Copy the content of the stack to the memory location and adjust base.
1925 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1926 int r = JSCallerSavedCode(i);
1927 if ((regs & (1 << r)) != 0) {
1928 movq(scratch, Operand(base, 0));
1929 ExternalReference reg_addr =
1930 ExternalReference(Debug_Address::Register(i));
1931 movq(kScratchRegister, reg_addr);
1932 movq(Operand(kScratchRegister, 0), scratch);
1933 lea(base, Operand(base, kPointerSize));
1934 }
1935 }
1936}
1937
ager@chromium.org5c838252010-02-19 08:53:10 +00001938void MacroAssembler::DebugBreak() {
1939 ASSERT(allow_stub_calls());
1940 xor_(rax, rax); // no arguments
1941 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1942 CEntryStub ces(1);
1943 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
ager@chromium.org3e875802009-06-29 08:26:34 +00001944}
ager@chromium.org5c838252010-02-19 08:53:10 +00001945#endif // ENABLE_DEBUGGER_SUPPORT
ager@chromium.org3e875802009-06-29 08:26:34 +00001946
1947
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001948void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1949 const ParameterCount& actual,
1950 Handle<Code> code_constant,
1951 Register code_register,
1952 Label* done,
1953 InvokeFlag flag) {
1954 bool definitely_matches = false;
1955 Label invoke;
1956 if (expected.is_immediate()) {
1957 ASSERT(actual.is_immediate());
1958 if (expected.immediate() == actual.immediate()) {
1959 definitely_matches = true;
1960 } else {
1961 movq(rax, Immediate(actual.immediate()));
1962 if (expected.immediate() ==
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00001963 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00001964 // Don't worry about adapting arguments for built-ins that
1965 // don't want that done. Skip adaption code by making it look
1966 // like we have a match between expected and actual number of
1967 // arguments.
1968 definitely_matches = true;
1969 } else {
1970 movq(rbx, Immediate(expected.immediate()));
1971 }
1972 }
1973 } else {
1974 if (actual.is_immediate()) {
1975 // Expected is in register, actual is immediate. This is the
1976 // case when we invoke function values without going through the
1977 // IC mechanism.
1978 cmpq(expected.reg(), Immediate(actual.immediate()));
1979 j(equal, &invoke);
1980 ASSERT(expected.reg().is(rbx));
1981 movq(rax, Immediate(actual.immediate()));
1982 } else if (!expected.reg().is(actual.reg())) {
1983 // Both expected and actual are in (different) registers. This
1984 // is the case when we invoke functions using call and apply.
1985 cmpq(expected.reg(), actual.reg());
1986 j(equal, &invoke);
1987 ASSERT(actual.reg().is(rax));
1988 ASSERT(expected.reg().is(rbx));
1989 }
1990 }
1991
1992 if (!definitely_matches) {
1993 Handle<Code> adaptor =
1994 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1995 if (!code_constant.is_null()) {
1996 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1997 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1998 } else if (!code_register.is(rdx)) {
1999 movq(rdx, code_register);
2000 }
2001
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002002 if (flag == CALL_FUNCTION) {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00002003 Call(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002004 jmp(done);
2005 } else {
sgjesse@chromium.org911335c2009-08-19 12:59:44 +00002006 Jump(adaptor, RelocInfo::CODE_TARGET);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002007 }
2008 bind(&invoke);
2009 }
2010}
2011
2012
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002013void MacroAssembler::InvokeCode(Register code,
2014 const ParameterCount& expected,
2015 const ParameterCount& actual,
2016 InvokeFlag flag) {
2017 Label done;
2018 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
2019 if (flag == CALL_FUNCTION) {
2020 call(code);
2021 } else {
2022 ASSERT(flag == JUMP_FUNCTION);
2023 jmp(code);
2024 }
2025 bind(&done);
2026}
2027
2028
2029void MacroAssembler::InvokeCode(Handle<Code> code,
2030 const ParameterCount& expected,
2031 const ParameterCount& actual,
2032 RelocInfo::Mode rmode,
2033 InvokeFlag flag) {
2034 Label done;
2035 Register dummy = rax;
2036 InvokePrologue(expected, actual, code, dummy, &done, flag);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002037 if (flag == CALL_FUNCTION) {
ager@chromium.org3e875802009-06-29 08:26:34 +00002038 Call(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002039 } else {
2040 ASSERT(flag == JUMP_FUNCTION);
ager@chromium.org3e875802009-06-29 08:26:34 +00002041 Jump(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002042 }
2043 bind(&done);
2044}
2045
2046
2047void MacroAssembler::InvokeFunction(Register function,
2048 const ParameterCount& actual,
2049 InvokeFlag flag) {
2050 ASSERT(function.is(rdi));
2051 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2052 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
ager@chromium.org3e875802009-06-29 08:26:34 +00002053 movsxlq(rbx,
2054 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002055 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
ager@chromium.org5aa501c2009-06-23 07:57:28 +00002056 // Advances rdx to the end of the Code object header, to the start of
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002057 // the executable code.
2058 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
2059
2060 ParameterCount expected(rbx);
2061 InvokeCode(rdx, expected, actual, flag);
2062}
2063
2064
ager@chromium.org5c838252010-02-19 08:53:10 +00002065void MacroAssembler::InvokeFunction(JSFunction* function,
2066 const ParameterCount& actual,
2067 InvokeFlag flag) {
2068 ASSERT(function->is_compiled());
2069 // Get the function and setup the context.
2070 Move(rdi, Handle<JSFunction>(function));
2071 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2072
2073 // Invoke the cached code.
2074 Handle<Code> code(function->code());
2075 ParameterCount expected(function->shared()->formal_parameter_count());
2076 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag);
2077}
2078
2079
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002080void MacroAssembler::EnterFrame(StackFrame::Type type) {
2081 push(rbp);
2082 movq(rbp, rsp);
2083 push(rsi); // Context.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002084 Push(Smi::FromInt(type));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002085 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2086 push(kScratchRegister);
2087 if (FLAG_debug_code) {
2088 movq(kScratchRegister,
2089 Factory::undefined_value(),
2090 RelocInfo::EMBEDDED_OBJECT);
2091 cmpq(Operand(rsp, 0), kScratchRegister);
2092 Check(not_equal, "code object not properly patched");
2093 }
2094}
2095
2096
2097void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2098 if (FLAG_debug_code) {
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002099 Move(kScratchRegister, Smi::FromInt(type));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002100 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2101 Check(equal, "stack frame types must match");
2102 }
2103 movq(rsp, rbp);
2104 pop(rbp);
2105}
2106
2107
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002108void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002109 // Setup the frame structure on the stack.
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002110 // All constants are relative to the frame pointer of the exit frame.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002111 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2112 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2113 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2114 push(rbp);
2115 movq(rbp, rsp);
2116
2117 // Reserve room for entry stack pointer and push the debug marker.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002118 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
ager@chromium.org5c838252010-02-19 08:53:10 +00002119 push(Immediate(0)); // Saved entry sp, patched before call.
2120 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2121 push(kScratchRegister); // Accessed from EditFrame::code_slot.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002122
2123 // Save the frame pointer and the context in top.
2124 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2125 ExternalReference context_address(Top::k_context_address);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002126 movq(r14, rax); // Backup rax before we use it.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002127
2128 movq(rax, rbp);
2129 store_rax(c_entry_fp_address);
2130 movq(rax, rsi);
2131 store_rax(context_address);
2132
2133 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2134 // so it must be retained across the C-call.
2135 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
kasperl@chromium.org86f77b72009-07-06 08:21:57 +00002136 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002137
2138#ifdef ENABLE_DEBUGGER_SUPPORT
2139 // Save the state of all registers to the stack from the memory
2140 // location. This is needed to allow nested break points.
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002141 if (mode == ExitFrame::MODE_DEBUG) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002142 // TODO(1243899): This should be symmetric to
2143 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
2144 // correct here, but computed for the other call. Very error
2145 // prone! FIX THIS. Actually there are deeper problems with
2146 // register saving than this asymmetry (see the bug report
2147 // associated with this issue).
2148 PushRegistersFromMemory(kJSCallerSaved);
2149 }
2150#endif
2151
ager@chromium.orga1645e22009-09-09 19:27:10 +00002152#ifdef _WIN64
2153 // Reserve space on stack for result and argument structures, if necessary.
2154 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
2155 // Reserve space for the Arguments object. The Windows 64-bit ABI
2156 // requires us to pass this structure as a pointer to its location on
2157 // the stack. The structure contains 2 values.
2158 int argument_stack_space = 2 * kPointerSize;
2159 // We also need backing space for 4 parameters, even though
2160 // we only pass one or two parameter, and it is in a register.
2161 int argument_mirror_space = 4 * kPointerSize;
2162 int total_stack_space =
2163 argument_mirror_space + argument_stack_space + result_stack_space;
2164 subq(rsp, Immediate(total_stack_space));
2165#endif
2166
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002167 // Get the required frame alignment for the OS.
2168 static const int kFrameAlignment = OS::ActivationFrameAlignment();
2169 if (kFrameAlignment > 0) {
2170 ASSERT(IsPowerOf2(kFrameAlignment));
2171 movq(kScratchRegister, Immediate(-kFrameAlignment));
2172 and_(rsp, kScratchRegister);
2173 }
2174
2175 // Patch the saved entry sp.
2176 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2177}
2178
2179
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002180void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002181 // Registers:
2182 // r15 : argv
2183#ifdef ENABLE_DEBUGGER_SUPPORT
2184 // Restore the memory copy of the registers by digging them out from
2185 // the stack. This is needed to allow nested break points.
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002186 if (mode == ExitFrame::MODE_DEBUG) {
ager@chromium.orga1645e22009-09-09 19:27:10 +00002187 // It's okay to clobber register rbx below because we don't need
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002188 // the function pointer after this.
2189 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
ager@chromium.orgc4c92722009-11-18 14:12:51 +00002190 int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002191 lea(rbx, Operand(rbp, kOffset));
2192 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
2193 }
2194#endif
2195
2196 // Get the return address from the stack and restore the frame pointer.
2197 movq(rcx, Operand(rbp, 1 * kPointerSize));
2198 movq(rbp, Operand(rbp, 0 * kPointerSize));
2199
ager@chromium.orga1645e22009-09-09 19:27:10 +00002200 // Pop everything up to and including the arguments and the receiver
2201 // from the caller stack.
ager@chromium.orgeadaf222009-06-16 09:43:10 +00002202 lea(rsp, Operand(r15, 1 * kPointerSize));
2203
2204 // Restore current context from top and clear it in debug mode.
2205 ExternalReference context_address(Top::k_context_address);
2206 movq(kScratchRegister, context_address);
2207 movq(rsi, Operand(kScratchRegister, 0));
2208#ifdef DEBUG
2209 movq(Operand(kScratchRegister, 0), Immediate(0));
2210#endif
2211
2212 // Push the return address to get ready to return.
2213 push(rcx);
2214
2215 // Clear the top frame.
2216 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2217 movq(kScratchRegister, c_entry_fp_address);
2218 movq(Operand(kScratchRegister, 0), Immediate(0));
2219}
2220
2221
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002222Register MacroAssembler::CheckMaps(JSObject* object,
2223 Register object_reg,
2224 JSObject* holder,
2225 Register holder_reg,
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002226 Register scratch,
kmillikin@chromium.org4111b802010-05-03 10:34:42 +00002227 int save_at_depth,
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002228 Label* miss) {
2229 // Make sure there's no overlap between scratch and the other
2230 // registers.
2231 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
2232
2233 // Keep track of the current object in register reg. On the first
2234 // iteration, reg is an alias for object_reg, on later iterations,
2235 // it is an alias for holder_reg.
2236 Register reg = object_reg;
kmillikin@chromium.org4111b802010-05-03 10:34:42 +00002237 int depth = 0;
2238
2239 if (save_at_depth == depth) {
ager@chromium.orgac091b72010-05-05 07:34:42 +00002240 movq(Operand(rsp, kPointerSize), object_reg);
kmillikin@chromium.org4111b802010-05-03 10:34:42 +00002241 }
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002242
2243 // Check the maps in the prototype chain.
2244 // Traverse the prototype chain from the object and do map checks.
2245 while (object != holder) {
2246 depth++;
2247
2248 // Only global objects and objects that do not require access
2249 // checks are allowed in stubs.
2250 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2251
2252 JSObject* prototype = JSObject::cast(object->GetPrototype());
2253 if (Heap::InNewSpace(prototype)) {
2254 // Get the map of the current object.
2255 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2256 Cmp(scratch, Handle<Map>(object->map()));
2257 // Branch on the result of the map check.
2258 j(not_equal, miss);
2259 // Check access rights to the global object. This has to happen
2260 // after the map check so that we know that the object is
2261 // actually a global object.
2262 if (object->IsJSGlobalProxy()) {
2263 CheckAccessGlobalProxy(reg, scratch, miss);
2264
2265 // Restore scratch register to be the map of the object.
2266 // We load the prototype from the map in the scratch register.
2267 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
2268 }
2269 // The prototype is in new space; we cannot store a reference
2270 // to it in the code. Load it from the map.
2271 reg = holder_reg; // from now the object is in holder_reg
2272 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
2273
2274 } else {
2275 // Check the map of the current object.
2276 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2277 Handle<Map>(object->map()));
2278 // Branch on the result of the map check.
2279 j(not_equal, miss);
2280 // Check access rights to the global object. This has to happen
2281 // after the map check so that we know that the object is
2282 // actually a global object.
2283 if (object->IsJSGlobalProxy()) {
2284 CheckAccessGlobalProxy(reg, scratch, miss);
2285 }
2286 // The prototype is in old space; load it directly.
2287 reg = holder_reg; // from now the object is in holder_reg
2288 Move(reg, Handle<JSObject>(prototype));
2289 }
2290
kmillikin@chromium.org4111b802010-05-03 10:34:42 +00002291 if (save_at_depth == depth) {
2292 movq(Operand(rsp, kPointerSize), reg);
2293 }
2294
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002295 // Go to the next object in the prototype chain.
2296 object = prototype;
2297 }
2298
2299 // Check the holder map.
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002300 Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002301 j(not_equal, miss);
2302
2303 // Log the check depth.
kmillikin@chromium.org4111b802010-05-03 10:34:42 +00002304 LOG(IntEvent("check-maps-depth", depth + 1));
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002305
2306 // Perform security check for access to the global object and return
2307 // the holder register.
2308 ASSERT(object == holder);
2309 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2310 if (object->IsJSGlobalProxy()) {
2311 CheckAccessGlobalProxy(reg, scratch, miss);
2312 }
2313 return reg;
2314}
2315
2316
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002317void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2318 Register scratch,
2319 Label* miss) {
2320 Label same_contexts;
2321
2322 ASSERT(!holder_reg.is(scratch));
2323 ASSERT(!scratch.is(kScratchRegister));
2324 // Load current lexical context from the stack frame.
2325 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2326
2327 // When generating debug code, make sure the lexical context is set.
2328 if (FLAG_debug_code) {
2329 cmpq(scratch, Immediate(0));
2330 Check(not_equal, "we should not have an empty lexical context");
2331 }
2332 // Load the global context of the current context.
2333 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2334 movq(scratch, FieldOperand(scratch, offset));
2335 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2336
2337 // Check the context is a global context.
2338 if (FLAG_debug_code) {
2339 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2340 Factory::global_context_map());
2341 Check(equal, "JSGlobalObject::global_context should be a global context.");
2342 }
2343
2344 // Check if both contexts are the same.
2345 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2346 j(equal, &same_contexts);
2347
2348 // Compare security tokens.
2349 // Check that the security token in the calling global object is
2350 // compatible with the security token in the receiving global
2351 // object.
2352
2353 // Check the context is a global context.
2354 if (FLAG_debug_code) {
2355 // Preserve original value of holder_reg.
2356 push(holder_reg);
2357 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002358 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002359 Check(not_equal, "JSGlobalProxy::context() should not be null.");
2360
2361 // Read the first word and compare to global_context_map(),
2362 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002363 CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002364 Check(equal, "JSGlobalObject::global_context should be a global context.");
2365 pop(holder_reg);
2366 }
2367
2368 movq(kScratchRegister,
2369 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
christian.plesner.hansen@gmail.com9d58c2b2009-10-16 11:48:38 +00002370 int token_offset =
2371 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
kasperl@chromium.orge959c182009-07-27 08:59:04 +00002372 movq(scratch, FieldOperand(scratch, token_offset));
2373 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2374 j(not_equal, miss);
2375
2376 bind(&same_contexts);
2377}
2378
2379
ager@chromium.orga1645e22009-09-09 19:27:10 +00002380void MacroAssembler::LoadAllocationTopHelper(Register result,
2381 Register result_end,
2382 Register scratch,
2383 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002384 ExternalReference new_space_allocation_top =
2385 ExternalReference::new_space_allocation_top_address();
2386
2387 // Just return if allocation top is already known.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002388 if ((flags & RESULT_CONTAINS_TOP) != 0) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002389 // No use of scratch if allocation top is provided.
ager@chromium.orgac091b72010-05-05 07:34:42 +00002390 ASSERT(!scratch.is_valid());
ager@chromium.orga1645e22009-09-09 19:27:10 +00002391#ifdef DEBUG
2392 // Assert that result actually contains top on entry.
2393 movq(kScratchRegister, new_space_allocation_top);
2394 cmpq(result, Operand(kScratchRegister, 0));
2395 Check(equal, "Unexpected allocation top");
2396#endif
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002397 return;
2398 }
2399
ager@chromium.orgac091b72010-05-05 07:34:42 +00002400 // Move address of new object to result. Use scratch register if available,
2401 // and keep address in scratch until call to UpdateAllocationTopHelper.
2402 if (scratch.is_valid()) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002403 ASSERT(!scratch.is(result_end));
2404 movq(scratch, new_space_allocation_top);
2405 movq(result, Operand(scratch, 0));
ager@chromium.orgac091b72010-05-05 07:34:42 +00002406 } else if (result.is(rax)) {
2407 load_rax(new_space_allocation_top);
2408 } else {
2409 movq(kScratchRegister, new_space_allocation_top);
2410 movq(result, Operand(kScratchRegister, 0));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002411 }
2412}
2413
2414
2415void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2416 Register scratch) {
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002417 if (FLAG_debug_code) {
2418 testq(result_end, Immediate(kObjectAlignmentMask));
2419 Check(zero, "Unaligned allocation in new space");
2420 }
2421
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002422 ExternalReference new_space_allocation_top =
2423 ExternalReference::new_space_allocation_top_address();
2424
2425 // Update new top.
2426 if (result_end.is(rax)) {
2427 // rax can be stored directly to a memory location.
2428 store_rax(new_space_allocation_top);
2429 } else {
2430 // Register required - use scratch provided if available.
ager@chromium.orgac091b72010-05-05 07:34:42 +00002431 if (scratch.is_valid()) {
2432 movq(Operand(scratch, 0), result_end);
2433 } else {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002434 movq(kScratchRegister, new_space_allocation_top);
2435 movq(Operand(kScratchRegister, 0), result_end);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002436 }
2437 }
2438}
2439
2440
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002441void MacroAssembler::AllocateInNewSpace(int object_size,
2442 Register result,
2443 Register result_end,
2444 Register scratch,
2445 Label* gc_required,
2446 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002447 ASSERT(!result.is(result_end));
2448
2449 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002450 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002451
2452 // Calculate new top and bail out if new space is exhausted.
2453 ExternalReference new_space_allocation_limit =
2454 ExternalReference::new_space_allocation_limit_address();
ager@chromium.orgac091b72010-05-05 07:34:42 +00002455
2456 Register top_reg = result_end.is_valid() ? result_end : result;
2457
2458 if (top_reg.is(result)) {
2459 addq(top_reg, Immediate(object_size));
2460 } else {
2461 lea(top_reg, Operand(result, object_size));
2462 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002463 movq(kScratchRegister, new_space_allocation_limit);
ager@chromium.orgac091b72010-05-05 07:34:42 +00002464 cmpq(top_reg, Operand(kScratchRegister, 0));
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002465 j(above, gc_required);
2466
2467 // Update allocation top.
ager@chromium.orgac091b72010-05-05 07:34:42 +00002468 UpdateAllocationTopHelper(top_reg, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002469
ager@chromium.orgac091b72010-05-05 07:34:42 +00002470 if (top_reg.is(result)) {
2471 if ((flags & TAG_OBJECT) != 0) {
2472 subq(result, Immediate(object_size - kHeapObjectTag));
2473 } else {
2474 subq(result, Immediate(object_size));
2475 }
2476 } else if ((flags & TAG_OBJECT) != 0) {
2477 // Tag the result if requested.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002478 addq(result, Immediate(kHeapObjectTag));
2479 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002480}
2481
2482
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002483void MacroAssembler::AllocateInNewSpace(int header_size,
2484 ScaleFactor element_size,
2485 Register element_count,
2486 Register result,
2487 Register result_end,
2488 Register scratch,
2489 Label* gc_required,
2490 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002491 ASSERT(!result.is(result_end));
2492
2493 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002494 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002495
2496 // Calculate new top and bail out if new space is exhausted.
2497 ExternalReference new_space_allocation_limit =
2498 ExternalReference::new_space_allocation_limit_address();
2499 lea(result_end, Operand(result, element_count, element_size, header_size));
2500 movq(kScratchRegister, new_space_allocation_limit);
2501 cmpq(result_end, Operand(kScratchRegister, 0));
2502 j(above, gc_required);
2503
2504 // Update allocation top.
2505 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002506
2507 // Tag the result if requested.
2508 if ((flags & TAG_OBJECT) != 0) {
2509 addq(result, Immediate(kHeapObjectTag));
2510 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002511}
2512
2513
sgjesse@chromium.orgc5145742009-10-07 09:00:33 +00002514void MacroAssembler::AllocateInNewSpace(Register object_size,
2515 Register result,
2516 Register result_end,
2517 Register scratch,
2518 Label* gc_required,
2519 AllocationFlags flags) {
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002520 // Load address of new object into result.
ager@chromium.orga1645e22009-09-09 19:27:10 +00002521 LoadAllocationTopHelper(result, result_end, scratch, flags);
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002522
2523 // Calculate new top and bail out if new space is exhausted.
2524 ExternalReference new_space_allocation_limit =
2525 ExternalReference::new_space_allocation_limit_address();
2526 if (!object_size.is(result_end)) {
2527 movq(result_end, object_size);
2528 }
2529 addq(result_end, result);
2530 movq(kScratchRegister, new_space_allocation_limit);
2531 cmpq(result_end, Operand(kScratchRegister, 0));
2532 j(above, gc_required);
2533
2534 // Update allocation top.
2535 UpdateAllocationTopHelper(result_end, scratch);
ager@chromium.orga1645e22009-09-09 19:27:10 +00002536
2537 // Tag the result if requested.
2538 if ((flags & TAG_OBJECT) != 0) {
2539 addq(result, Immediate(kHeapObjectTag));
2540 }
ager@chromium.org18ad94b2009-09-02 08:22:29 +00002541}
2542
2543
2544void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2545 ExternalReference new_space_allocation_top =
2546 ExternalReference::new_space_allocation_top_address();
2547
2548 // Make sure the object has no tag before resetting top.
2549 and_(object, Immediate(~kHeapObjectTagMask));
2550 movq(kScratchRegister, new_space_allocation_top);
2551#ifdef DEBUG
2552 cmpq(object, Operand(kScratchRegister, 0));
2553 Check(below, "Undo allocation of non allocated memory");
2554#endif
2555 movq(Operand(kScratchRegister, 0), object);
2556}
2557
2558
ager@chromium.org3811b432009-10-28 14:53:37 +00002559void MacroAssembler::AllocateHeapNumber(Register result,
2560 Register scratch,
2561 Label* gc_required) {
2562 // Allocate heap number in new space.
2563 AllocateInNewSpace(HeapNumber::kSize,
2564 result,
2565 scratch,
2566 no_reg,
2567 gc_required,
2568 TAG_OBJECT);
2569
2570 // Set the map.
2571 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2572 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2573}
2574
2575
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002576void MacroAssembler::AllocateTwoByteString(Register result,
2577 Register length,
2578 Register scratch1,
2579 Register scratch2,
2580 Register scratch3,
2581 Label* gc_required) {
2582 // Calculate the number of bytes needed for the characters in the string while
2583 // observing object alignment.
ager@chromium.orgac091b72010-05-05 07:34:42 +00002584 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2585 kObjectAlignmentMask;
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002586 ASSERT(kShortSize == 2);
2587 // scratch1 = length * 2 + kObjectAlignmentMask.
ager@chromium.orgac091b72010-05-05 07:34:42 +00002588 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2589 kHeaderAlignment));
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002590 and_(scratch1, Immediate(~kObjectAlignmentMask));
ager@chromium.orgac091b72010-05-05 07:34:42 +00002591 if (kHeaderAlignment > 0) {
2592 subq(scratch1, Immediate(kHeaderAlignment));
2593 }
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002594
2595 // Allocate two byte string in new space.
2596 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2597 times_1,
2598 scratch1,
2599 result,
2600 scratch2,
2601 scratch3,
2602 gc_required,
2603 TAG_OBJECT);
2604
2605 // Set the map, length and hash field.
2606 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2607 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
ager@chromium.orgac091b72010-05-05 07:34:42 +00002608 Integer32ToSmi(scratch1, length);
2609 movq(FieldOperand(result, String::kLengthOffset), scratch1);
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002610 movl(FieldOperand(result, String::kHashFieldOffset),
2611 Immediate(String::kEmptyHashField));
2612}
2613
2614
2615void MacroAssembler::AllocateAsciiString(Register result,
2616 Register length,
2617 Register scratch1,
2618 Register scratch2,
2619 Register scratch3,
2620 Label* gc_required) {
2621 // Calculate the number of bytes needed for the characters in the string while
2622 // observing object alignment.
ager@chromium.orgac091b72010-05-05 07:34:42 +00002623 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2624 kObjectAlignmentMask;
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002625 movl(scratch1, length);
2626 ASSERT(kCharSize == 1);
ager@chromium.orgac091b72010-05-05 07:34:42 +00002627 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002628 and_(scratch1, Immediate(~kObjectAlignmentMask));
ager@chromium.orgac091b72010-05-05 07:34:42 +00002629 if (kHeaderAlignment > 0) {
2630 subq(scratch1, Immediate(kHeaderAlignment));
2631 }
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002632
2633 // Allocate ascii string in new space.
2634 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2635 times_1,
2636 scratch1,
2637 result,
2638 scratch2,
2639 scratch3,
2640 gc_required,
2641 TAG_OBJECT);
2642
2643 // Set the map, length and hash field.
2644 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2645 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
ager@chromium.orgac091b72010-05-05 07:34:42 +00002646 Integer32ToSmi(scratch1, length);
2647 movq(FieldOperand(result, String::kLengthOffset), scratch1);
kmillikin@chromium.org13bd2942009-12-16 15:36:05 +00002648 movl(FieldOperand(result, String::kHashFieldOffset),
2649 Immediate(String::kEmptyHashField));
2650}
2651
2652
2653void MacroAssembler::AllocateConsString(Register result,
2654 Register scratch1,
2655 Register scratch2,
2656 Label* gc_required) {
2657 // Allocate heap number in new space.
2658 AllocateInNewSpace(ConsString::kSize,
2659 result,
2660 scratch1,
2661 scratch2,
2662 gc_required,
2663 TAG_OBJECT);
2664
2665 // Set the map. The other fields are left uninitialized.
2666 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2667 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2668}
2669
2670
2671void MacroAssembler::AllocateAsciiConsString(Register result,
2672 Register scratch1,
2673 Register scratch2,
2674 Label* gc_required) {
2675 // Allocate heap number in new space.
2676 AllocateInNewSpace(ConsString::kSize,
2677 result,
2678 scratch1,
2679 scratch2,
2680 gc_required,
2681 TAG_OBJECT);
2682
2683 // Set the map. The other fields are left uninitialized.
2684 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2685 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2686}
2687
2688
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002689void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2690 if (context_chain_length > 0) {
2691 // Move up the chain of contexts to the context containing the slot.
2692 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2693 // Load the function context (which is the incoming, outer context).
lrn@chromium.orgd5649e32010-01-19 13:36:12 +00002694 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002695 for (int i = 1; i < context_chain_length; i++) {
2696 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2697 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2698 }
2699 // The context may be an intermediate context, not a function context.
2700 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2701 } else { // context is the current function context.
2702 // The context may be an intermediate context, not a function context.
2703 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2704 }
2705}
2706
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00002707int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2708 // On Windows stack slots are reserved by the caller for all arguments
2709 // including the ones passed in registers. On Linux 6 arguments are passed in
2710 // registers and the caller does not reserve stack slots for them.
2711 ASSERT(num_arguments >= 0);
2712#ifdef _WIN64
2713 static const int kArgumentsWithoutStackSlot = 0;
2714#else
2715 static const int kArgumentsWithoutStackSlot = 6;
2716#endif
2717 return num_arguments > kArgumentsWithoutStackSlot ?
2718 num_arguments - kArgumentsWithoutStackSlot : 0;
2719}
2720
2721void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2722 int frame_alignment = OS::ActivationFrameAlignment();
2723 ASSERT(frame_alignment != 0);
2724 ASSERT(num_arguments >= 0);
2725 // Make stack end at alignment and allocate space for arguments and old rsp.
2726 movq(kScratchRegister, rsp);
2727 ASSERT(IsPowerOf2(frame_alignment));
2728 int argument_slots_on_stack =
2729 ArgumentStackSlotsForCFunctionCall(num_arguments);
2730 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2731 and_(rsp, Immediate(-frame_alignment));
2732 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2733}
2734
2735
2736void MacroAssembler::CallCFunction(ExternalReference function,
2737 int num_arguments) {
2738 movq(rax, function);
2739 CallCFunction(rax, num_arguments);
2740}
2741
2742
2743void MacroAssembler::CallCFunction(Register function, int num_arguments) {
ricow@chromium.orgc9c80822010-04-21 08:22:37 +00002744 // Check stack alignment.
2745 if (FLAG_debug_code) {
2746 CheckStackAlignment();
2747 }
2748
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00002749 call(function);
2750 ASSERT(OS::ActivationFrameAlignment() != 0);
2751 ASSERT(num_arguments >= 0);
2752 int argument_slots_on_stack =
2753 ArgumentStackSlotsForCFunctionCall(num_arguments);
2754 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2755}
2756
sgjesse@chromium.orgac6aa172009-12-04 12:29:05 +00002757
ager@chromium.org4af710e2009-09-15 12:20:11 +00002758CodePatcher::CodePatcher(byte* address, int size)
2759 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2760 // Create a new macro assembler pointing to the address of the code to patch.
2761 // The size is adjusted with kGap on order for the assembler to generate size
2762 // bytes of instructions without failing with buffer size constraints.
2763 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2764}
2765
2766
2767CodePatcher::~CodePatcher() {
2768 // Indicate that code has changed.
2769 CPU::FlushICache(address_, size_);
2770
2771 // Check that the code was patched as expected.
2772 ASSERT(masm_.pc_ == address_ + size_);
2773 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2774}
2775
kasperl@chromium.org71affb52009-05-26 05:44:31 +00002776} } // namespace v8::internal
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +00002777
2778#endif // V8_TARGET_ARCH_X64