blob: a8ffca918c97b8aca1d3d8fab001ffab63d9fee6 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Steve Block8defd9f2010-07-08 12:39:36 +010050static const Register kScratchRegister = { 10 }; // r10.
51static const Register kSmiConstantRegister = { 15 }; // r15 (callee save).
52static const Register kRootRegister = { 13 }; // r13 (callee save).
53// Value of smi in kSmiConstantRegister.
54static const int kSmiConstantRegisterValue = 1;
Steve Blocka7e24c12009-10-30 11:49:00 +000055
Leon Clarkee46be812010-01-19 14:06:41 +000056// Convenience for platform-independent signatures.
57typedef Operand MemOperand;
58
Steve Blocka7e24c12009-10-30 11:49:00 +000059// Forward declaration.
60class JumpTarget;
61
62struct SmiIndex {
63 SmiIndex(Register index_register, ScaleFactor scale)
64 : reg(index_register),
65 scale(scale) {}
66 Register reg;
67 ScaleFactor scale;
68};
69
70// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
75 void LoadRoot(Register destination, Heap::RootListIndex index);
76 void CompareRoot(Register with, Heap::RootListIndex index);
77 void CompareRoot(Operand with, Heap::RootListIndex index);
78 void PushRoot(Heap::RootListIndex index);
Kristian Monsen25f61362010-05-21 11:50:48 +010079 void StoreRoot(Register source, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +000080
81 // ---------------------------------------------------------------------------
82 // GC Support
83
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010084 // For page containing |object| mark region covering |addr| dirty.
85 // RecordWriteHelper only works if the object is not in new
Steve Block6ded16b2010-05-10 14:33:55 +010086 // space.
87 void RecordWriteHelper(Register object,
88 Register addr,
89 Register scratch);
90
91 // Check if object is in new space. The condition cc can be equal or
92 // not_equal. If it is equal a jump will be done if the object is on new
93 // space. The register scratch can be object itself, but it will be clobbered.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010094 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +010095 void InNewSpace(Register object,
96 Register scratch,
97 Condition cc,
Kristian Monsen0d5e1162010-09-30 15:31:59 +010098 LabelType* branch);
Steve Block6ded16b2010-05-10 14:33:55 +010099
Steve Block8defd9f2010-07-08 12:39:36 +0100100 // For page containing |object| mark region covering [object+offset]
101 // dirty. |object| is the object being stored into, |value| is the
102 // object being stored. If |offset| is zero, then the |scratch|
103 // register contains the array index into the elements array
104 // represented as a Smi. All registers are clobbered by the
105 // operation. RecordWrite filters out smis so it does not update the
106 // write barrier if the value is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000107 void RecordWrite(Register object,
108 int offset,
109 Register value,
110 Register scratch);
111
Steve Block8defd9f2010-07-08 12:39:36 +0100112 // For page containing |object| mark region covering [address]
113 // dirty. |object| is the object being stored into, |value| is the
114 // object being stored. All registers are clobbered by the
115 // operation. RecordWrite filters out smis so it does not update
116 // the write barrier if the value is a smi.
117 void RecordWrite(Register object,
118 Register address,
119 Register value);
120
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100121 // For page containing |object| mark region covering [object+offset] dirty.
Steve Block3ce2e202009-11-05 08:53:23 +0000122 // The value is known to not be a smi.
123 // object is the object being stored into, value is the object being stored.
124 // If offset is zero, then the scratch register contains the array index into
125 // the elements array represented as a Smi.
126 // All registers are clobbered by the operation.
127 void RecordWriteNonSmi(Register object,
128 int offset,
129 Register value,
130 Register scratch);
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132#ifdef ENABLE_DEBUGGER_SUPPORT
133 // ---------------------------------------------------------------------------
134 // Debugger Support
135
Andrei Popescu402d9372010-02-26 13:31:12 +0000136 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000137#endif
138
139 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000140 // Stack limit support
141
142 // Do simple test for stack overflow. This doesn't handle an overflow.
143 void StackLimitCheck(Label* on_stack_limit_hit);
144
145 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 // Activation frames
147
148 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
149 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
150
151 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
152 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
153
Steve Blockd0582a62009-12-15 09:54:21 +0000154 // Enter specific kind of exit frame; either in normal or
155 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000156 // sets up the number of arguments in register rdi and the pointer
157 // to the first argument in register rsi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100158 void EnterExitFrame(int result_size = 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000159
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100160 void EnterApiExitFrame(int stack_space,
Ben Murdochbb769b22010-08-11 14:56:33 +0100161 int argc,
162 int result_size = 1);
163
Steve Blocka7e24c12009-10-30 11:49:00 +0000164 // Leave the current exit frame. Expects/provides the return value in
165 // register rax:rdx (untouched) and the pointer to the first
166 // argument in register rsi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100167 void LeaveExitFrame(int result_size = 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000168
169
170 // ---------------------------------------------------------------------------
171 // JavaScript invokes
172
173 // Invoke the JavaScript function code by either calling or jumping.
174 void InvokeCode(Register code,
175 const ParameterCount& expected,
176 const ParameterCount& actual,
177 InvokeFlag flag);
178
179 void InvokeCode(Handle<Code> code,
180 const ParameterCount& expected,
181 const ParameterCount& actual,
182 RelocInfo::Mode rmode,
183 InvokeFlag flag);
184
185 // Invoke the JavaScript function in the given register. Changes the
186 // current context to the context in the function before invoking.
187 void InvokeFunction(Register function,
188 const ParameterCount& actual,
189 InvokeFlag flag);
190
Andrei Popescu402d9372010-02-26 13:31:12 +0000191 void InvokeFunction(JSFunction* function,
192 const ParameterCount& actual,
193 InvokeFlag flag);
194
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 // Invoke specified builtin JavaScript function. Adds an entry to
196 // the unresolved list if the name does not resolve.
197 void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag);
198
Steve Block791712a2010-08-27 10:21:07 +0100199 // Store the function for the given builtin in the target register.
200 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
201
Steve Blocka7e24c12009-10-30 11:49:00 +0000202 // Store the code object for the given builtin in the target register.
203 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
204
205
206 // ---------------------------------------------------------------------------
207 // Smi tagging, untagging and operations on tagged smis.
208
Steve Block8defd9f2010-07-08 12:39:36 +0100209 void InitializeSmiConstantRegister() {
210 movq(kSmiConstantRegister,
211 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
212 RelocInfo::NONE);
213 }
214
Steve Blocka7e24c12009-10-30 11:49:00 +0000215 // Conversions between tagged smi values and non-tagged integer values.
216
217 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000218 // Only uses the low 32 bits of the src register. Sets the N and Z flags
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100219 // based on the value of the resulting smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000220 void Integer32ToSmi(Register dst, Register src);
221
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100222 // Stores an integer32 value into a memory field that already holds a smi.
223 void Integer32ToSmiField(const Operand& dst, Register src);
224
Steve Blocka7e24c12009-10-30 11:49:00 +0000225 // Adds constant to src and tags the result as a smi.
226 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000227 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000228
229 // Convert smi to 32-bit integer. I.e., not sign extended into
230 // high 32 bits of destination.
231 void SmiToInteger32(Register dst, Register src);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100232 void SmiToInteger32(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000233
234 // Convert smi to 64-bit integer (sign extended if necessary).
235 void SmiToInteger64(Register dst, Register src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100236 void SmiToInteger64(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000237
238 // Multiply a positive smi's integer value by a power of two.
239 // Provides result as 64-bit integer value.
240 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
241 Register src,
242 int power);
243
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100244 // Divide a positive smi's integer value by a power of two.
245 // Provides result as 32-bit integer value.
246 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
247 Register src,
248 int power);
249
250
Steve Block3ce2e202009-11-05 08:53:23 +0000251 // Simple comparison of smis.
252 void SmiCompare(Register dst, Register src);
253 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100254 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000255 void SmiCompare(const Operand& dst, Register src);
256 void SmiCompare(const Operand& dst, Smi* src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100257 // Compare the int32 in src register to the value of the smi stored at dst.
258 void SmiCompareInteger32(const Operand& dst, Register src);
Steve Block3ce2e202009-11-05 08:53:23 +0000259 // Sets sign and zero flags depending on value of smi in register.
260 void SmiTest(Register src);
261
Steve Blocka7e24c12009-10-30 11:49:00 +0000262 // Functions performing a check on a known or potential smi. Returns
263 // a condition that is satisfied if the check is successful.
264
265 // Is the value a tagged smi.
266 Condition CheckSmi(Register src);
267
Steve Blocka7e24c12009-10-30 11:49:00 +0000268 // Is the value a positive tagged smi.
269 Condition CheckPositiveSmi(Register src);
270
Leon Clarkee46be812010-01-19 14:06:41 +0000271 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 Condition CheckBothSmi(Register first, Register second);
273
Leon Clarked91b9f72010-01-27 17:25:45 +0000274 // Are both values tagged smis.
275 Condition CheckBothPositiveSmi(Register first, Register second);
276
Leon Clarkee46be812010-01-19 14:06:41 +0000277 // Are either value a tagged smi.
Ben Murdochbb769b22010-08-11 14:56:33 +0100278 Condition CheckEitherSmi(Register first,
279 Register second,
280 Register scratch = kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +0000281
Steve Blocka7e24c12009-10-30 11:49:00 +0000282 // Is the value the minimum smi value (since we are using
283 // two's complement numbers, negating the value is known to yield
284 // a non-smi value).
285 Condition CheckIsMinSmi(Register src);
286
Steve Blocka7e24c12009-10-30 11:49:00 +0000287 // Checks whether an 32-bit integer value is a valid for conversion
288 // to a smi.
289 Condition CheckInteger32ValidSmiValue(Register src);
290
Steve Block3ce2e202009-11-05 08:53:23 +0000291 // Checks whether an 32-bit unsigned integer value is a valid for
292 // conversion to a smi.
293 Condition CheckUInteger32ValidSmiValue(Register src);
294
Steve Blocka7e24c12009-10-30 11:49:00 +0000295 // Test-and-jump functions. Typically combines a check function
296 // above with a conditional jump.
297
298 // Jump if the value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100299 template <typename LabelType>
300 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Blocka7e24c12009-10-30 11:49:00 +0000301
Steve Block3ce2e202009-11-05 08:53:23 +0000302 // Jump if the unsigned integer value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100303 template <typename LabelType>
304 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Block3ce2e202009-11-05 08:53:23 +0000305
Steve Blocka7e24c12009-10-30 11:49:00 +0000306 // Jump to label if the value is a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100307 template <typename LabelType>
308 void JumpIfSmi(Register src, LabelType* on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000309
310 // Jump to label if the value is not a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100311 template <typename LabelType>
312 void JumpIfNotSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000313
314 // Jump to label if the value is not a positive tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100315 template <typename LabelType>
316 void JumpIfNotPositiveSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000317
Steve Block3ce2e202009-11-05 08:53:23 +0000318 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 // to the constant.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100320 template <typename LabelType>
321 void JumpIfSmiEqualsConstant(Register src,
322 Smi* constant,
323 LabelType* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000324
325 // Jump if either or both register are not smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100326 template <typename LabelType>
327 void JumpIfNotBothSmi(Register src1,
328 Register src2,
329 LabelType* on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000330
Leon Clarked91b9f72010-01-27 17:25:45 +0000331 // Jump if either or both register are not positive smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100332 template <typename LabelType>
Leon Clarked91b9f72010-01-27 17:25:45 +0000333 void JumpIfNotBothPositiveSmi(Register src1, Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100334 LabelType* on_not_both_smi);
Leon Clarked91b9f72010-01-27 17:25:45 +0000335
Steve Blocka7e24c12009-10-30 11:49:00 +0000336 // Operations on tagged smi values.
337
338 // Smis represent a subset of integers. The subset is always equivalent to
339 // a two's complement interpretation of a fixed number of bits.
340
341 // Optimistically adds an integer constant to a supposed smi.
342 // If the src is not a smi, or the result is not a smi, jump to
343 // the label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100344 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000345 void SmiTryAddConstant(Register dst,
346 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000347 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100348 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000349
Steve Block3ce2e202009-11-05 08:53:23 +0000350 // Add an integer constant to a tagged smi, giving a tagged smi as result.
351 // No overflow testing on the result is done.
352 void SmiAddConstant(Register dst, Register src, Smi* constant);
353
Leon Clarkef7060e22010-06-03 12:02:55 +0100354 // Add an integer constant to a tagged smi, giving a tagged smi as result.
355 // No overflow testing on the result is done.
356 void SmiAddConstant(const Operand& dst, Smi* constant);
357
Steve Blocka7e24c12009-10-30 11:49:00 +0000358 // Add an integer constant to a tagged smi, giving a tagged smi as result,
359 // or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100360 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000361 void SmiAddConstant(Register dst,
362 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000363 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100364 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000365
366 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100367 // result. No testing on the result is done. Sets the N and Z flags
368 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000369 void SmiSubConstant(Register dst, Register src, Smi* constant);
370
371 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000372 // result, or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100373 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000374 void SmiSubConstant(Register dst,
375 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000376 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100377 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000378
379 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000380 // NOTICE: This operation jumps on success, not failure!
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100381 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000382 void SmiNeg(Register dst,
383 Register src,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100384 LabelType* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000385
386 // Adds smi values and return the result as a smi.
387 // If dst is src1, then src1 will be destroyed, even if
388 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100389 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000390 void SmiAdd(Register dst,
391 Register src1,
392 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100393 LabelType* on_not_smi_result);
394
395 void SmiAdd(Register dst,
396 Register src1,
397 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000398
399 // Subtracts smi values and return the result as a smi.
400 // If dst is src1, then src1 will be destroyed, even if
401 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100402 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000403 void SmiSub(Register dst,
404 Register src1,
405 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100406 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407
Steve Block6ded16b2010-05-10 14:33:55 +0100408 void SmiSub(Register dst,
409 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100410 Register src2);
411
412 template <typename LabelType>
413 void SmiSub(Register dst,
414 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100415 const Operand& src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100416 LabelType* on_not_smi_result);
417
418 void SmiSub(Register dst,
419 Register src1,
420 const Operand& src2);
Steve Block6ded16b2010-05-10 14:33:55 +0100421
Steve Blocka7e24c12009-10-30 11:49:00 +0000422 // Multiplies smi values and return the result as a smi,
423 // if possible.
424 // If dst is src1, then src1 will be destroyed, even if
425 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100426 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 void SmiMul(Register dst,
428 Register src1,
429 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100430 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000431
432 // Divides one smi by another and returns the quotient.
433 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100434 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000435 void SmiDiv(Register dst,
436 Register src1,
437 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100438 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000439
440 // Divides one smi by another and returns the remainder.
441 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100442 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 void SmiMod(Register dst,
444 Register src1,
445 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100446 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000447
448 // Bitwise operations.
449 void SmiNot(Register dst, Register src);
450 void SmiAnd(Register dst, Register src1, Register src2);
451 void SmiOr(Register dst, Register src1, Register src2);
452 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000453 void SmiAndConstant(Register dst, Register src1, Smi* constant);
454 void SmiOrConstant(Register dst, Register src1, Smi* constant);
455 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000456
457 void SmiShiftLeftConstant(Register dst,
458 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100459 int shift_value);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100460 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000461 void SmiShiftLogicalRightConstant(Register dst,
462 Register src,
463 int shift_value,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100464 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 void SmiShiftArithmeticRightConstant(Register dst,
466 Register src,
467 int shift_value);
468
469 // Shifts a smi value to the left, and returns the result if that is a smi.
470 // Uses and clobbers rcx, so dst may not be rcx.
471 void SmiShiftLeft(Register dst,
472 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100473 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474 // Shifts a smi value to the right, shifting in zero bits at the top, and
475 // returns the unsigned intepretation of the result if that is a smi.
476 // Uses and clobbers rcx, so dst may not be rcx.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100477 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000478 void SmiShiftLogicalRight(Register dst,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100479 Register src1,
480 Register src2,
481 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000482 // Shifts a smi value to the right, sign extending the top, and
483 // returns the signed intepretation of the result. That will always
484 // be a valid smi value, since it's numerically smaller than the
485 // original.
486 // Uses and clobbers rcx, so dst may not be rcx.
487 void SmiShiftArithmeticRight(Register dst,
488 Register src1,
489 Register src2);
490
491 // Specialized operations
492
493 // Select the non-smi register of two registers where exactly one is a
494 // smi. If neither are smis, jump to the failure label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100495 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000496 void SelectNonSmi(Register dst,
497 Register src1,
498 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100499 LabelType* on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +0000500
501 // Converts, if necessary, a smi to a combination of number and
502 // multiplier to be used as a scaled index.
503 // The src register contains a *positive* smi value. The shift is the
504 // power of two to multiply the index value by (e.g.
505 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
506 // The returned index register may be either src or dst, depending
507 // on what is most efficient. If src and dst are different registers,
508 // src is always unchanged.
509 SmiIndex SmiToIndex(Register dst, Register src, int shift);
510
511 // Converts a positive smi to a negative index.
512 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
513
Steve Block3ce2e202009-11-05 08:53:23 +0000514 // Basic Smi operations.
515 void Move(Register dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100516 LoadSmiConstant(dst, source);
Steve Block3ce2e202009-11-05 08:53:23 +0000517 }
518
519 void Move(const Operand& dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100520 Register constant = GetSmiConstant(source);
521 movq(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000522 }
523
524 void Push(Smi* smi);
525 void Test(const Operand& dst, Smi* source);
526
Steve Blocka7e24c12009-10-30 11:49:00 +0000527 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000528 // String macros.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100529 template <typename LabelType>
Leon Clarkee46be812010-01-19 14:06:41 +0000530 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
531 Register second_object,
532 Register scratch1,
533 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100534 LabelType* on_not_both_flat_ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000535
Steve Block6ded16b2010-05-10 14:33:55 +0100536 // Check whether the instance type represents a flat ascii string. Jump to the
537 // label if not. If the instance type can be scratched specify same register
538 // for both instance type and scratch.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100539 template <typename LabelType>
540 void JumpIfInstanceTypeIsNotSequentialAscii(
541 Register instance_type,
542 Register scratch,
543 LabelType *on_not_flat_ascii_string);
Steve Block6ded16b2010-05-10 14:33:55 +0100544
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100545 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100546 void JumpIfBothInstanceTypesAreNotSequentialAscii(
547 Register first_object_instance_type,
548 Register second_object_instance_type,
549 Register scratch1,
550 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100551 LabelType* on_fail);
Steve Block6ded16b2010-05-10 14:33:55 +0100552
Leon Clarkee46be812010-01-19 14:06:41 +0000553 // ---------------------------------------------------------------------------
554 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000555
Steve Block3ce2e202009-11-05 08:53:23 +0000556 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000557 void Set(Register dst, int64_t x);
558 void Set(const Operand& dst, int64_t x);
559
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100560 // Move if the registers are not identical.
561 void Move(Register target, Register source);
562
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 void Move(Register dst, Handle<Object> source);
565 void Move(const Operand& dst, Handle<Object> source);
566 void Cmp(Register dst, Handle<Object> source);
567 void Cmp(const Operand& dst, Handle<Object> source);
568 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000569
Leon Clarkee46be812010-01-19 14:06:41 +0000570 // Emit code to discard a non-negative number of pointer-sized elements
571 // from the stack, clobbering only the rsp register.
572 void Drop(int stack_elements);
573
574 void Call(Label* target) { call(target); }
575
Steve Blocka7e24c12009-10-30 11:49:00 +0000576 // Control Flow
577 void Jump(Address destination, RelocInfo::Mode rmode);
578 void Jump(ExternalReference ext);
579 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
580
581 void Call(Address destination, RelocInfo::Mode rmode);
582 void Call(ExternalReference ext);
583 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
584
585 // Compare object type for heap object.
586 // Always use unsigned comparisons: above and below, not less and greater.
587 // Incoming register is heap_object and outgoing register is map.
588 // They may be the same register, and may be kScratchRegister.
589 void CmpObjectType(Register heap_object, InstanceType type, Register map);
590
591 // Compare instance type for map.
592 // Always use unsigned comparisons: above and below, not less and greater.
593 void CmpInstanceType(Register map, InstanceType type);
594
Andrei Popescu31002712010-02-23 13:46:05 +0000595 // Check if the map of an object is equal to a specified map and
596 // branch to label if not. Skip the smi check if not required
597 // (object is known to be a heap object)
598 void CheckMap(Register obj,
599 Handle<Map> map,
600 Label* fail,
601 bool is_heap_object);
602
Leon Clarked91b9f72010-01-27 17:25:45 +0000603 // Check if the object in register heap_object is a string. Afterwards the
604 // register map contains the object map and the register instance_type
605 // contains the instance_type. The registers map and instance_type can be the
606 // same in which case it contains the instance type afterwards. Either of the
607 // registers map and instance_type can be the same as heap_object.
608 Condition IsObjectStringType(Register heap_object,
609 Register map,
610 Register instance_type);
611
Steve Block8defd9f2010-07-08 12:39:36 +0100612 // FCmp compares and pops the two values on top of the FPU stack.
613 // The flag results are similar to integer cmp, but requires unsigned
Steve Blocka7e24c12009-10-30 11:49:00 +0000614 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
615 void FCmp();
616
Andrei Popescu402d9372010-02-26 13:31:12 +0000617 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100618 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000619
Iain Merrick75681382010-08-19 15:07:18 +0100620 // Abort execution if argument is a smi. Used in debug code.
621 void AbortIfSmi(Register object);
622
Steve Block6ded16b2010-05-10 14:33:55 +0100623 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100624 void AbortIfNotSmi(Register object);
Steve Block6ded16b2010-05-10 14:33:55 +0100625
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100626 // Abort execution if argument is not the root value with the given index.
627 void AbortIfNotRootValue(Register src,
628 Heap::RootListIndex root_value_index,
629 const char* message);
630
Steve Blocka7e24c12009-10-30 11:49:00 +0000631 // ---------------------------------------------------------------------------
632 // Exception handling
633
634 // Push a new try handler and link into try handler chain. The return
635 // address must be pushed before calling this helper.
636 void PushTryHandler(CodeLocation try_location, HandlerType type);
637
Leon Clarkee46be812010-01-19 14:06:41 +0000638 // Unlink the stack handler on top of the stack from the try handler chain.
639 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000640
641 // ---------------------------------------------------------------------------
642 // Inline caching support
643
Steve Blocka7e24c12009-10-30 11:49:00 +0000644 // Generate code for checking access rights - used for security checks
645 // on access to global objects across environments. The holder register
646 // is left untouched, but the scratch register and kScratchRegister,
647 // which must be different, are clobbered.
648 void CheckAccessGlobalProxy(Register holder_reg,
649 Register scratch,
650 Label* miss);
651
652
653 // ---------------------------------------------------------------------------
654 // Allocation support
655
656 // Allocate an object in new space. If the new space is exhausted control
657 // continues at the gc_required label. The allocated object is returned in
658 // result and end of the new object is returned in result_end. The register
659 // scratch can be passed as no_reg in which case an additional object
660 // reference will be added to the reloc info. The returned pointers in result
661 // and result_end have not yet been tagged as heap objects. If
662 // result_contains_top_on_entry is true the content of result is known to be
663 // the allocation top on entry (could be result_end from a previous call to
664 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
665 // should be no_reg as it is never used.
666 void AllocateInNewSpace(int object_size,
667 Register result,
668 Register result_end,
669 Register scratch,
670 Label* gc_required,
671 AllocationFlags flags);
672
673 void AllocateInNewSpace(int header_size,
674 ScaleFactor element_size,
675 Register element_count,
676 Register result,
677 Register result_end,
678 Register scratch,
679 Label* gc_required,
680 AllocationFlags flags);
681
682 void AllocateInNewSpace(Register object_size,
683 Register result,
684 Register result_end,
685 Register scratch,
686 Label* gc_required,
687 AllocationFlags flags);
688
689 // Undo allocation in new space. The object passed and objects allocated after
690 // it will no longer be allocated. Make sure that no pointers are left to the
691 // object(s) no longer allocated as they would be invalid when allocation is
692 // un-done.
693 void UndoAllocationInNewSpace(Register object);
694
Steve Block3ce2e202009-11-05 08:53:23 +0000695 // Allocate a heap number in new space with undefined value. Returns
696 // tagged pointer in result register, or jumps to gc_required if new
697 // space is full.
698 void AllocateHeapNumber(Register result,
699 Register scratch,
700 Label* gc_required);
701
Leon Clarkee46be812010-01-19 14:06:41 +0000702 // Allocate a sequential string. All the header fields of the string object
703 // are initialized.
704 void AllocateTwoByteString(Register result,
705 Register length,
706 Register scratch1,
707 Register scratch2,
708 Register scratch3,
709 Label* gc_required);
710 void AllocateAsciiString(Register result,
711 Register length,
712 Register scratch1,
713 Register scratch2,
714 Register scratch3,
715 Label* gc_required);
716
717 // Allocate a raw cons string object. Only the map field of the result is
718 // initialized.
719 void AllocateConsString(Register result,
720 Register scratch1,
721 Register scratch2,
722 Label* gc_required);
723 void AllocateAsciiConsString(Register result,
724 Register scratch1,
725 Register scratch2,
726 Label* gc_required);
727
Steve Blocka7e24c12009-10-30 11:49:00 +0000728 // ---------------------------------------------------------------------------
729 // Support functions.
730
731 // Check if result is zero and op is negative.
732 void NegativeZeroTest(Register result, Register op, Label* then_label);
733
734 // Check if result is zero and op is negative in code using jump targets.
735 void NegativeZeroTest(CodeGenerator* cgen,
736 Register result,
737 Register op,
738 JumpTarget* then_target);
739
740 // Check if result is zero and any of op1 and op2 are negative.
741 // Register scratch is destroyed, and it must be different from op2.
742 void NegativeZeroTest(Register result, Register op1, Register op2,
743 Register scratch, Label* then_label);
744
745 // Try to get function prototype of a function and puts the value in
746 // the result register. Checks that the function really is a
747 // function and jumps to the miss label if the fast checks fail. The
748 // function register will be untouched; the other register may be
749 // clobbered.
750 void TryGetFunctionPrototype(Register function,
751 Register result,
752 Label* miss);
753
754 // Generates code for reporting that an illegal operation has
755 // occurred.
756 void IllegalOperation(int num_arguments);
757
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100758 // Picks out an array index from the hash field.
759 // Register use:
760 // hash - holds the index's hash. Clobbered.
761 // index - holds the overwritten index on exit.
762 void IndexFromHash(Register hash, Register index);
763
Steve Blockd0582a62009-12-15 09:54:21 +0000764 // Find the function context up the context chain.
765 void LoadContext(Register dst, int context_chain_length);
766
Steve Blocka7e24c12009-10-30 11:49:00 +0000767 // ---------------------------------------------------------------------------
768 // Runtime calls
769
770 // Call a code stub.
771 void CallStub(CodeStub* stub);
772
Ben Murdochbb769b22010-08-11 14:56:33 +0100773 // Call a code stub and return the code object called. Try to generate
774 // the code if necessary. Do not perform a GC but instead return a retry
775 // after GC failure.
776 Object* TryCallStub(CodeStub* stub);
777
Leon Clarkee46be812010-01-19 14:06:41 +0000778 // Tail call a code stub (jump).
779 void TailCallStub(CodeStub* stub);
780
Ben Murdochbb769b22010-08-11 14:56:33 +0100781 // Tail call a code stub (jump) and return the code object called. Try to
782 // generate the code if necessary. Do not perform a GC but instead return
783 // a retry after GC failure.
784 Object* TryTailCallStub(CodeStub* stub);
785
Steve Blocka7e24c12009-10-30 11:49:00 +0000786 // Return from a code stub after popping its arguments.
787 void StubReturn(int argc);
788
789 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000790 void CallRuntime(Runtime::Function* f, int num_arguments);
791
Ben Murdochbb769b22010-08-11 14:56:33 +0100792 // Call a runtime function, returning the CodeStub object called.
793 // Try to generate the stub code if necessary. Do not perform a GC
794 // but instead return a retry after GC failure.
795 Object* TryCallRuntime(Runtime::Function* f, int num_arguments);
796
Steve Blocka7e24c12009-10-30 11:49:00 +0000797 // Convenience function: Same as above, but takes the fid instead.
798 void CallRuntime(Runtime::FunctionId id, int num_arguments);
799
Ben Murdochbb769b22010-08-11 14:56:33 +0100800 // Convenience function: Same as above, but takes the fid instead.
801 Object* TryCallRuntime(Runtime::FunctionId id, int num_arguments);
802
Andrei Popescu402d9372010-02-26 13:31:12 +0000803 // Convenience function: call an external reference.
804 void CallExternalReference(const ExternalReference& ext,
805 int num_arguments);
806
Steve Blocka7e24c12009-10-30 11:49:00 +0000807 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100808 // Like JumpToExternalReference, but also takes care of passing the number
809 // of parameters.
810 void TailCallExternalReference(const ExternalReference& ext,
811 int num_arguments,
812 int result_size);
813
814 // Convenience function: tail call a runtime routine (jump).
815 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000816 int num_arguments,
817 int result_size);
818
Ben Murdochbb769b22010-08-11 14:56:33 +0100819 void PushHandleScope(Register scratch);
820
821 // Pops a handle scope using the specified scratch register and
822 // ensuring that saved register is left unchanged.
823 void PopHandleScope(Register saved, Register scratch);
824
825 // As PopHandleScope, but does not perform a GC. Instead, returns a
826 // retry after GC failure object if GC is necessary.
827 Object* TryPopHandleScope(Register saved, Register scratch);
828
Steve Blocka7e24c12009-10-30 11:49:00 +0000829 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100830 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000831
Leon Clarke4515c472010-02-03 11:58:03 +0000832 // Before calling a C-function from generated code, align arguments on stack.
833 // After aligning the frame, arguments must be stored in esp[0], esp[4],
834 // etc., not pushed. The argument count assumes all arguments are word sized.
835 // The number of slots reserved for arguments depends on platform. On Windows
836 // stack slots are reserved for the arguments passed in registers. On other
837 // platforms stack slots are only reserved for the arguments actually passed
838 // on the stack.
839 void PrepareCallCFunction(int num_arguments);
840
841 // Calls a C function and cleans up the space for arguments allocated
842 // by PrepareCallCFunction. The called function is not allowed to trigger a
843 // garbage collection, since that might move the code and invalidate the
844 // return address (unless this is somehow accounted for by the called
845 // function).
846 void CallCFunction(ExternalReference function, int num_arguments);
847 void CallCFunction(Register function, int num_arguments);
848
849 // Calculate the number of stack slots to reserve for arguments when calling a
850 // C function.
851 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000852
853 // ---------------------------------------------------------------------------
854 // Utilities
855
856 void Ret();
857
Steve Blocka7e24c12009-10-30 11:49:00 +0000858 Handle<Object> CodeObject() { return code_object_; }
859
860
861 // ---------------------------------------------------------------------------
862 // StatsCounter support
863
864 void SetCounter(StatsCounter* counter, int value);
865 void IncrementCounter(StatsCounter* counter, int value);
866 void DecrementCounter(StatsCounter* counter, int value);
867
868
869 // ---------------------------------------------------------------------------
870 // Debugging
871
872 // Calls Abort(msg) if the condition cc is not satisfied.
873 // Use --debug_code to enable.
874 void Assert(Condition cc, const char* msg);
875
Iain Merrick75681382010-08-19 15:07:18 +0100876 void AssertFastElements(Register elements);
877
Steve Blocka7e24c12009-10-30 11:49:00 +0000878 // Like Assert(), but always enabled.
879 void Check(Condition cc, const char* msg);
880
881 // Print a message to stdout and abort execution.
882 void Abort(const char* msg);
883
Steve Block6ded16b2010-05-10 14:33:55 +0100884 // Check that the stack is aligned.
885 void CheckStackAlignment();
886
Steve Blocka7e24c12009-10-30 11:49:00 +0000887 // Verify restrictions about code generated in stubs.
888 void set_generating_stub(bool value) { generating_stub_ = value; }
889 bool generating_stub() { return generating_stub_; }
890 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
891 bool allow_stub_calls() { return allow_stub_calls_; }
892
893 private:
Steve Blocka7e24c12009-10-30 11:49:00 +0000894 bool generating_stub_;
895 bool allow_stub_calls_;
Steve Block8defd9f2010-07-08 12:39:36 +0100896
897 // Returns a register holding the smi value. The register MUST NOT be
898 // modified. It may be the "smi 1 constant" register.
899 Register GetSmiConstant(Smi* value);
900
901 // Moves the smi value to the destination register.
902 void LoadSmiConstant(Register dst, Smi* value);
903
Andrei Popescu31002712010-02-23 13:46:05 +0000904 // This handle will be patched with the code object on installation.
905 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000906
907 // Helper functions for generating invokes.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100908 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000909 void InvokePrologue(const ParameterCount& expected,
910 const ParameterCount& actual,
911 Handle<Code> code_constant,
912 Register code_register,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100913 LabelType* done,
Steve Blocka7e24c12009-10-30 11:49:00 +0000914 InvokeFlag flag);
915
Steve Blocka7e24c12009-10-30 11:49:00 +0000916 // Activation support.
917 void EnterFrame(StackFrame::Type type);
918 void LeaveFrame(StackFrame::Type type);
919
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100920 void EnterExitFramePrologue(bool save_rax);
921 void EnterExitFrameEpilogue(int result_size, int argc);
Ben Murdochbb769b22010-08-11 14:56:33 +0100922
Steve Blocka7e24c12009-10-30 11:49:00 +0000923 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +0100924 // Loads the top of new-space into the result register.
925 // If flags contains RESULT_CONTAINS_TOP then result_end is valid and
926 // already contains the top of new-space, and scratch is invalid.
927 // Otherwise the address of the new-space top is loaded into scratch (if
928 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +0000929 void LoadAllocationTopHelper(Register result,
930 Register result_end,
931 Register scratch,
932 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +0100933 // Update allocation top with value in result_end register.
934 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +0000935 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +0100936
937 // Helper for PopHandleScope. Allowed to perform a GC and returns
938 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
939 // possibly returns a failure object indicating an allocation failure.
940 Object* PopHandleScopeHelper(Register saved,
941 Register scratch,
942 bool gc_allowed);
Steve Blocka7e24c12009-10-30 11:49:00 +0000943};
944
945
946// The code patcher is used to patch (typically) small parts of code e.g. for
947// debugging and other types of instrumentation. When using the code patcher
948// the exact number of bytes specified must be emitted. Is not legal to emit
949// relocation information. If any of these constraints are violated it causes
950// an assertion.
951class CodePatcher {
952 public:
953 CodePatcher(byte* address, int size);
954 virtual ~CodePatcher();
955
956 // Macro assembler to emit code.
957 MacroAssembler* masm() { return &masm_; }
958
959 private:
960 byte* address_; // The address of the code being patched.
961 int size_; // Number of bytes of the expected patch size.
962 MacroAssembler masm_; // Macro assembler used to generate the code.
963};
964
965
966// -----------------------------------------------------------------------------
967// Static helper functions.
968
969// Generate an Operand for loading a field from an object.
970static inline Operand FieldOperand(Register object, int offset) {
971 return Operand(object, offset - kHeapObjectTag);
972}
973
974
975// Generate an Operand for loading an indexed field from an object.
976static inline Operand FieldOperand(Register object,
977 Register index,
978 ScaleFactor scale,
979 int offset) {
980 return Operand(object, index, scale, offset - kHeapObjectTag);
981}
982
983
984#ifdef GENERATED_CODE_COVERAGE
985extern void LogGeneratedCodeCoverage(const char* file_line);
986#define CODE_COVERAGE_STRINGIFY(x) #x
987#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
988#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
989#define ACCESS_MASM(masm) { \
990 byte* x64_coverage_function = \
991 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
992 masm->pushfd(); \
993 masm->pushad(); \
994 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
995 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
996 masm->pop(rax); \
997 masm->popad(); \
998 masm->popfd(); \
999 } \
1000 masm->
1001#else
1002#define ACCESS_MASM(masm) masm->
1003#endif
1004
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001005// -----------------------------------------------------------------------------
1006// Template implementations.
1007
1008static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1009
1010
1011template <typename LabelType>
1012void MacroAssembler::SmiNeg(Register dst,
1013 Register src,
1014 LabelType* on_smi_result) {
1015 if (dst.is(src)) {
1016 ASSERT(!dst.is(kScratchRegister));
1017 movq(kScratchRegister, src);
1018 neg(dst); // Low 32 bits are retained as zero by negation.
1019 // Test if result is zero or Smi::kMinValue.
1020 cmpq(dst, kScratchRegister);
1021 j(not_equal, on_smi_result);
1022 movq(src, kScratchRegister);
1023 } else {
1024 movq(dst, src);
1025 neg(dst);
1026 cmpq(dst, src);
1027 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1028 j(not_equal, on_smi_result);
1029 }
1030}
1031
1032
1033template <typename LabelType>
1034void MacroAssembler::SmiAdd(Register dst,
1035 Register src1,
1036 Register src2,
1037 LabelType* on_not_smi_result) {
1038 ASSERT_NOT_NULL(on_not_smi_result);
1039 ASSERT(!dst.is(src2));
1040 if (dst.is(src1)) {
1041 movq(kScratchRegister, src1);
1042 addq(kScratchRegister, src2);
1043 j(overflow, on_not_smi_result);
1044 movq(dst, kScratchRegister);
1045 } else {
1046 movq(dst, src1);
1047 addq(dst, src2);
1048 j(overflow, on_not_smi_result);
1049 }
1050}
1051
1052
1053template <typename LabelType>
1054void MacroAssembler::SmiSub(Register dst,
1055 Register src1,
1056 Register src2,
1057 LabelType* on_not_smi_result) {
1058 ASSERT_NOT_NULL(on_not_smi_result);
1059 ASSERT(!dst.is(src2));
1060 if (dst.is(src1)) {
1061 cmpq(dst, src2);
1062 j(overflow, on_not_smi_result);
1063 subq(dst, src2);
1064 } else {
1065 movq(dst, src1);
1066 subq(dst, src2);
1067 j(overflow, on_not_smi_result);
1068 }
1069}
1070
1071
1072template <typename LabelType>
1073void MacroAssembler::SmiSub(Register dst,
1074 Register src1,
1075 const Operand& src2,
1076 LabelType* on_not_smi_result) {
1077 ASSERT_NOT_NULL(on_not_smi_result);
1078 if (dst.is(src1)) {
1079 movq(kScratchRegister, src2);
1080 cmpq(src1, kScratchRegister);
1081 j(overflow, on_not_smi_result);
1082 subq(src1, kScratchRegister);
1083 } else {
1084 movq(dst, src1);
1085 subq(dst, src2);
1086 j(overflow, on_not_smi_result);
1087 }
1088}
1089
1090
1091template <typename LabelType>
1092void MacroAssembler::SmiMul(Register dst,
1093 Register src1,
1094 Register src2,
1095 LabelType* on_not_smi_result) {
1096 ASSERT(!dst.is(src2));
1097 ASSERT(!dst.is(kScratchRegister));
1098 ASSERT(!src1.is(kScratchRegister));
1099 ASSERT(!src2.is(kScratchRegister));
1100
1101 if (dst.is(src1)) {
1102 NearLabel failure, zero_correct_result;
1103 movq(kScratchRegister, src1); // Create backup for later testing.
1104 SmiToInteger64(dst, src1);
1105 imul(dst, src2);
1106 j(overflow, &failure);
1107
1108 // Check for negative zero result. If product is zero, and one
1109 // argument is negative, go to slow case.
1110 NearLabel correct_result;
1111 testq(dst, dst);
1112 j(not_zero, &correct_result);
1113
1114 movq(dst, kScratchRegister);
1115 xor_(dst, src2);
1116 j(positive, &zero_correct_result); // Result was positive zero.
1117
1118 bind(&failure); // Reused failure exit, restores src1.
1119 movq(src1, kScratchRegister);
1120 jmp(on_not_smi_result);
1121
1122 bind(&zero_correct_result);
1123 xor_(dst, dst);
1124
1125 bind(&correct_result);
1126 } else {
1127 SmiToInteger64(dst, src1);
1128 imul(dst, src2);
1129 j(overflow, on_not_smi_result);
1130 // Check for negative zero result. If product is zero, and one
1131 // argument is negative, go to slow case.
1132 NearLabel correct_result;
1133 testq(dst, dst);
1134 j(not_zero, &correct_result);
1135 // One of src1 and src2 is zero, the check whether the other is
1136 // negative.
1137 movq(kScratchRegister, src1);
1138 xor_(kScratchRegister, src2);
1139 j(negative, on_not_smi_result);
1140 bind(&correct_result);
1141 }
1142}
1143
1144
1145template <typename LabelType>
1146void MacroAssembler::SmiTryAddConstant(Register dst,
1147 Register src,
1148 Smi* constant,
1149 LabelType* on_not_smi_result) {
1150 // Does not assume that src is a smi.
1151 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1152 ASSERT_EQ(0, kSmiTag);
1153 ASSERT(!dst.is(kScratchRegister));
1154 ASSERT(!src.is(kScratchRegister));
1155
1156 JumpIfNotSmi(src, on_not_smi_result);
1157 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1158 LoadSmiConstant(tmp, constant);
1159 addq(tmp, src);
1160 j(overflow, on_not_smi_result);
1161 if (dst.is(src)) {
1162 movq(dst, tmp);
1163 }
1164}
1165
1166
1167template <typename LabelType>
1168void MacroAssembler::SmiAddConstant(Register dst,
1169 Register src,
1170 Smi* constant,
1171 LabelType* on_not_smi_result) {
1172 if (constant->value() == 0) {
1173 if (!dst.is(src)) {
1174 movq(dst, src);
1175 }
1176 } else if (dst.is(src)) {
1177 ASSERT(!dst.is(kScratchRegister));
1178
1179 LoadSmiConstant(kScratchRegister, constant);
1180 addq(kScratchRegister, src);
1181 j(overflow, on_not_smi_result);
1182 movq(dst, kScratchRegister);
1183 } else {
1184 LoadSmiConstant(dst, constant);
1185 addq(dst, src);
1186 j(overflow, on_not_smi_result);
1187 }
1188}
1189
1190
1191template <typename LabelType>
1192void MacroAssembler::SmiSubConstant(Register dst,
1193 Register src,
1194 Smi* constant,
1195 LabelType* on_not_smi_result) {
1196 if (constant->value() == 0) {
1197 if (!dst.is(src)) {
1198 movq(dst, src);
1199 }
1200 } else if (dst.is(src)) {
1201 ASSERT(!dst.is(kScratchRegister));
1202 if (constant->value() == Smi::kMinValue) {
1203 // Subtracting min-value from any non-negative value will overflow.
1204 // We test the non-negativeness before doing the subtraction.
1205 testq(src, src);
1206 j(not_sign, on_not_smi_result);
1207 LoadSmiConstant(kScratchRegister, constant);
1208 subq(dst, kScratchRegister);
1209 } else {
1210 // Subtract by adding the negation.
1211 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1212 addq(kScratchRegister, dst);
1213 j(overflow, on_not_smi_result);
1214 movq(dst, kScratchRegister);
1215 }
1216 } else {
1217 if (constant->value() == Smi::kMinValue) {
1218 // Subtracting min-value from any non-negative value will overflow.
1219 // We test the non-negativeness before doing the subtraction.
1220 testq(src, src);
1221 j(not_sign, on_not_smi_result);
1222 LoadSmiConstant(dst, constant);
1223 // Adding and subtracting the min-value gives the same result, it only
1224 // differs on the overflow bit, which we don't check here.
1225 addq(dst, src);
1226 } else {
1227 // Subtract by adding the negation.
1228 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1229 addq(dst, src);
1230 j(overflow, on_not_smi_result);
1231 }
1232 }
1233}
1234
1235
1236template <typename LabelType>
1237void MacroAssembler::SmiDiv(Register dst,
1238 Register src1,
1239 Register src2,
1240 LabelType* on_not_smi_result) {
1241 ASSERT(!src1.is(kScratchRegister));
1242 ASSERT(!src2.is(kScratchRegister));
1243 ASSERT(!dst.is(kScratchRegister));
1244 ASSERT(!src2.is(rax));
1245 ASSERT(!src2.is(rdx));
1246 ASSERT(!src1.is(rdx));
1247
1248 // Check for 0 divisor (result is +/-Infinity).
1249 NearLabel positive_divisor;
1250 testq(src2, src2);
1251 j(zero, on_not_smi_result);
1252
1253 if (src1.is(rax)) {
1254 movq(kScratchRegister, src1);
1255 }
1256 SmiToInteger32(rax, src1);
1257 // We need to rule out dividing Smi::kMinValue by -1, since that would
1258 // overflow in idiv and raise an exception.
1259 // We combine this with negative zero test (negative zero only happens
1260 // when dividing zero by a negative number).
1261
1262 // We overshoot a little and go to slow case if we divide min-value
1263 // by any negative value, not just -1.
1264 NearLabel safe_div;
1265 testl(rax, Immediate(0x7fffffff));
1266 j(not_zero, &safe_div);
1267 testq(src2, src2);
1268 if (src1.is(rax)) {
1269 j(positive, &safe_div);
1270 movq(src1, kScratchRegister);
1271 jmp(on_not_smi_result);
1272 } else {
1273 j(negative, on_not_smi_result);
1274 }
1275 bind(&safe_div);
1276
1277 SmiToInteger32(src2, src2);
1278 // Sign extend src1 into edx:eax.
1279 cdq();
1280 idivl(src2);
1281 Integer32ToSmi(src2, src2);
1282 // Check that the remainder is zero.
1283 testl(rdx, rdx);
1284 if (src1.is(rax)) {
1285 NearLabel smi_result;
1286 j(zero, &smi_result);
1287 movq(src1, kScratchRegister);
1288 jmp(on_not_smi_result);
1289 bind(&smi_result);
1290 } else {
1291 j(not_zero, on_not_smi_result);
1292 }
1293 if (!dst.is(src1) && src1.is(rax)) {
1294 movq(src1, kScratchRegister);
1295 }
1296 Integer32ToSmi(dst, rax);
1297}
1298
1299
1300template <typename LabelType>
1301void MacroAssembler::SmiMod(Register dst,
1302 Register src1,
1303 Register src2,
1304 LabelType* on_not_smi_result) {
1305 ASSERT(!dst.is(kScratchRegister));
1306 ASSERT(!src1.is(kScratchRegister));
1307 ASSERT(!src2.is(kScratchRegister));
1308 ASSERT(!src2.is(rax));
1309 ASSERT(!src2.is(rdx));
1310 ASSERT(!src1.is(rdx));
1311 ASSERT(!src1.is(src2));
1312
1313 testq(src2, src2);
1314 j(zero, on_not_smi_result);
1315
1316 if (src1.is(rax)) {
1317 movq(kScratchRegister, src1);
1318 }
1319 SmiToInteger32(rax, src1);
1320 SmiToInteger32(src2, src2);
1321
1322 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1323 NearLabel safe_div;
1324 cmpl(rax, Immediate(Smi::kMinValue));
1325 j(not_equal, &safe_div);
1326 cmpl(src2, Immediate(-1));
1327 j(not_equal, &safe_div);
1328 // Retag inputs and go slow case.
1329 Integer32ToSmi(src2, src2);
1330 if (src1.is(rax)) {
1331 movq(src1, kScratchRegister);
1332 }
1333 jmp(on_not_smi_result);
1334 bind(&safe_div);
1335
1336 // Sign extend eax into edx:eax.
1337 cdq();
1338 idivl(src2);
1339 // Restore smi tags on inputs.
1340 Integer32ToSmi(src2, src2);
1341 if (src1.is(rax)) {
1342 movq(src1, kScratchRegister);
1343 }
1344 // Check for a negative zero result. If the result is zero, and the
1345 // dividend is negative, go slow to return a floating point negative zero.
1346 NearLabel smi_result;
1347 testl(rdx, rdx);
1348 j(not_zero, &smi_result);
1349 testq(src1, src1);
1350 j(negative, on_not_smi_result);
1351 bind(&smi_result);
1352 Integer32ToSmi(dst, rdx);
1353}
1354
1355
1356template <typename LabelType>
1357void MacroAssembler::SmiShiftLogicalRightConstant(
1358 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1359 // Logic right shift interprets its result as an *unsigned* number.
1360 if (dst.is(src)) {
1361 UNIMPLEMENTED(); // Not used.
1362 } else {
1363 movq(dst, src);
1364 if (shift_value == 0) {
1365 testq(dst, dst);
1366 j(negative, on_not_smi_result);
1367 }
1368 shr(dst, Immediate(shift_value + kSmiShift));
1369 shl(dst, Immediate(kSmiShift));
1370 }
1371}
1372
1373
1374template <typename LabelType>
1375void MacroAssembler::SmiShiftLogicalRight(Register dst,
1376 Register src1,
1377 Register src2,
1378 LabelType* on_not_smi_result) {
1379 ASSERT(!dst.is(kScratchRegister));
1380 ASSERT(!src1.is(kScratchRegister));
1381 ASSERT(!src2.is(kScratchRegister));
1382 ASSERT(!dst.is(rcx));
1383 NearLabel result_ok;
1384 if (src1.is(rcx) || src2.is(rcx)) {
1385 movq(kScratchRegister, rcx);
1386 }
1387 if (!dst.is(src1)) {
1388 movq(dst, src1);
1389 }
1390 SmiToInteger32(rcx, src2);
1391 orl(rcx, Immediate(kSmiShift));
1392 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1393 shl(dst, Immediate(kSmiShift));
1394 testq(dst, dst);
1395 if (src1.is(rcx) || src2.is(rcx)) {
1396 NearLabel positive_result;
1397 j(positive, &positive_result);
1398 if (src1.is(rcx)) {
1399 movq(src1, kScratchRegister);
1400 } else {
1401 movq(src2, kScratchRegister);
1402 }
1403 jmp(on_not_smi_result);
1404 bind(&positive_result);
1405 } else {
1406 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1407 }
1408}
1409
1410
1411template <typename LabelType>
1412void MacroAssembler::SelectNonSmi(Register dst,
1413 Register src1,
1414 Register src2,
1415 LabelType* on_not_smis) {
1416 ASSERT(!dst.is(kScratchRegister));
1417 ASSERT(!src1.is(kScratchRegister));
1418 ASSERT(!src2.is(kScratchRegister));
1419 ASSERT(!dst.is(src1));
1420 ASSERT(!dst.is(src2));
1421 // Both operands must not be smis.
1422#ifdef DEBUG
1423 if (allow_stub_calls()) { // Check contains a stub call.
1424 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1425 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1426 }
1427#endif
1428 ASSERT_EQ(0, kSmiTag);
1429 ASSERT_EQ(0, Smi::FromInt(0));
1430 movl(kScratchRegister, Immediate(kSmiTagMask));
1431 and_(kScratchRegister, src1);
1432 testl(kScratchRegister, src2);
1433 // If non-zero then both are smis.
1434 j(not_zero, on_not_smis);
1435
1436 // Exactly one operand is a smi.
1437 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1438 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1439 subq(kScratchRegister, Immediate(1));
1440 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1441 movq(dst, src1);
1442 xor_(dst, src2);
1443 and_(dst, kScratchRegister);
1444 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1445 xor_(dst, src1);
1446 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1447}
1448
1449
1450template <typename LabelType>
1451void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1452 ASSERT_EQ(0, kSmiTag);
1453 Condition smi = CheckSmi(src);
1454 j(smi, on_smi);
1455}
1456
1457
1458template <typename LabelType>
1459void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1460 Condition smi = CheckSmi(src);
1461 j(NegateCondition(smi), on_not_smi);
1462}
1463
1464
1465template <typename LabelType>
1466void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1467 LabelType* on_not_positive_smi) {
1468 Condition positive_smi = CheckPositiveSmi(src);
1469 j(NegateCondition(positive_smi), on_not_positive_smi);
1470}
1471
1472
1473template <typename LabelType>
1474void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1475 Smi* constant,
1476 LabelType* on_equals) {
1477 SmiCompare(src, constant);
1478 j(equal, on_equals);
1479}
1480
1481
1482template <typename LabelType>
1483void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1484 LabelType* on_invalid) {
1485 Condition is_valid = CheckInteger32ValidSmiValue(src);
1486 j(NegateCondition(is_valid), on_invalid);
1487}
1488
1489
1490template <typename LabelType>
1491void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1492 LabelType* on_invalid) {
1493 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1494 j(NegateCondition(is_valid), on_invalid);
1495}
1496
1497
1498template <typename LabelType>
1499void MacroAssembler::JumpIfNotBothSmi(Register src1,
1500 Register src2,
1501 LabelType* on_not_both_smi) {
1502 Condition both_smi = CheckBothSmi(src1, src2);
1503 j(NegateCondition(both_smi), on_not_both_smi);
1504}
1505
1506
1507template <typename LabelType>
1508void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1,
1509 Register src2,
1510 LabelType* on_not_both_smi) {
1511 Condition both_smi = CheckBothPositiveSmi(src1, src2);
1512 j(NegateCondition(both_smi), on_not_both_smi);
1513}
1514
1515
1516template <typename LabelType>
1517void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1518 Register second_object,
1519 Register scratch1,
1520 Register scratch2,
1521 LabelType* on_fail) {
1522 // Check that both objects are not smis.
1523 Condition either_smi = CheckEitherSmi(first_object, second_object);
1524 j(either_smi, on_fail);
1525
1526 // Load instance type for both strings.
1527 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1528 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1529 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1530 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1531
1532 // Check that both are flat ascii strings.
1533 ASSERT(kNotStringTag != 0);
1534 const int kFlatAsciiStringMask =
1535 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1536 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1537
1538 andl(scratch1, Immediate(kFlatAsciiStringMask));
1539 andl(scratch2, Immediate(kFlatAsciiStringMask));
1540 // Interleave the bits to check both scratch1 and scratch2 in one test.
1541 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1542 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1543 cmpl(scratch1,
1544 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1545 j(not_equal, on_fail);
1546}
1547
1548
1549template <typename LabelType>
1550void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1551 Register instance_type,
1552 Register scratch,
1553 LabelType *failure) {
1554 if (!scratch.is(instance_type)) {
1555 movl(scratch, instance_type);
1556 }
1557
1558 const int kFlatAsciiStringMask =
1559 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1560
1561 andl(scratch, Immediate(kFlatAsciiStringMask));
1562 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1563 j(not_equal, failure);
1564}
1565
1566
1567template <typename LabelType>
1568void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1569 Register first_object_instance_type,
1570 Register second_object_instance_type,
1571 Register scratch1,
1572 Register scratch2,
1573 LabelType* on_fail) {
1574 // Load instance type for both strings.
1575 movq(scratch1, first_object_instance_type);
1576 movq(scratch2, second_object_instance_type);
1577
1578 // Check that both are flat ascii strings.
1579 ASSERT(kNotStringTag != 0);
1580 const int kFlatAsciiStringMask =
1581 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1582 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1583
1584 andl(scratch1, Immediate(kFlatAsciiStringMask));
1585 andl(scratch2, Immediate(kFlatAsciiStringMask));
1586 // Interleave the bits to check both scratch1 and scratch2 in one test.
1587 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1588 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1589 cmpl(scratch1,
1590 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1591 j(not_equal, on_fail);
1592}
1593
1594
1595template <typename LabelType>
1596void MacroAssembler::InNewSpace(Register object,
1597 Register scratch,
1598 Condition cc,
1599 LabelType* branch) {
1600 if (Serializer::enabled()) {
1601 // Can't do arithmetic on external references if it might get serialized.
1602 // The mask isn't really an address. We load it as an external reference in
1603 // case the size of the new space is different between the snapshot maker
1604 // and the running system.
1605 if (scratch.is(object)) {
1606 movq(kScratchRegister, ExternalReference::new_space_mask());
1607 and_(scratch, kScratchRegister);
1608 } else {
1609 movq(scratch, ExternalReference::new_space_mask());
1610 and_(scratch, object);
1611 }
1612 movq(kScratchRegister, ExternalReference::new_space_start());
1613 cmpq(scratch, kScratchRegister);
1614 j(cc, branch);
1615 } else {
1616 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
1617 intptr_t new_space_start =
1618 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
1619 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1620 if (scratch.is(object)) {
1621 addq(scratch, kScratchRegister);
1622 } else {
1623 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1624 }
1625 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
1626 j(cc, branch);
1627 }
1628}
1629
1630
1631template <typename LabelType>
1632void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1633 const ParameterCount& actual,
1634 Handle<Code> code_constant,
1635 Register code_register,
1636 LabelType* done,
1637 InvokeFlag flag) {
1638 bool definitely_matches = false;
1639 NearLabel invoke;
1640 if (expected.is_immediate()) {
1641 ASSERT(actual.is_immediate());
1642 if (expected.immediate() == actual.immediate()) {
1643 definitely_matches = true;
1644 } else {
1645 Set(rax, actual.immediate());
1646 if (expected.immediate() ==
1647 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1648 // Don't worry about adapting arguments for built-ins that
1649 // don't want that done. Skip adaption code by making it look
1650 // like we have a match between expected and actual number of
1651 // arguments.
1652 definitely_matches = true;
1653 } else {
1654 Set(rbx, expected.immediate());
1655 }
1656 }
1657 } else {
1658 if (actual.is_immediate()) {
1659 // Expected is in register, actual is immediate. This is the
1660 // case when we invoke function values without going through the
1661 // IC mechanism.
1662 cmpq(expected.reg(), Immediate(actual.immediate()));
1663 j(equal, &invoke);
1664 ASSERT(expected.reg().is(rbx));
1665 Set(rax, actual.immediate());
1666 } else if (!expected.reg().is(actual.reg())) {
1667 // Both expected and actual are in (different) registers. This
1668 // is the case when we invoke functions using call and apply.
1669 cmpq(expected.reg(), actual.reg());
1670 j(equal, &invoke);
1671 ASSERT(actual.reg().is(rax));
1672 ASSERT(expected.reg().is(rbx));
1673 }
1674 }
1675
1676 if (!definitely_matches) {
1677 Handle<Code> adaptor =
1678 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1679 if (!code_constant.is_null()) {
1680 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1681 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1682 } else if (!code_register.is(rdx)) {
1683 movq(rdx, code_register);
1684 }
1685
1686 if (flag == CALL_FUNCTION) {
1687 Call(adaptor, RelocInfo::CODE_TARGET);
1688 jmp(done);
1689 } else {
1690 Jump(adaptor, RelocInfo::CODE_TARGET);
1691 }
1692 bind(&invoke);
1693 }
1694}
1695
Steve Blocka7e24c12009-10-30 11:49:00 +00001696
1697} } // namespace v8::internal
1698
1699#endif // V8_X64_MACRO_ASSEMBLER_X64_H_