blob: 219ae4f8fffb5944f09faa36859506e3092b4aa2 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Steve Block8defd9f2010-07-08 12:39:36 +010050static const Register kScratchRegister = { 10 }; // r10.
51static const Register kSmiConstantRegister = { 15 }; // r15 (callee save).
52static const Register kRootRegister = { 13 }; // r13 (callee save).
53// Value of smi in kSmiConstantRegister.
54static const int kSmiConstantRegisterValue = 1;
Steve Blocka7e24c12009-10-30 11:49:00 +000055
Leon Clarkee46be812010-01-19 14:06:41 +000056// Convenience for platform-independent signatures.
57typedef Operand MemOperand;
58
Steve Blocka7e24c12009-10-30 11:49:00 +000059// Forward declaration.
60class JumpTarget;
61
62struct SmiIndex {
63 SmiIndex(Register index_register, ScaleFactor scale)
64 : reg(index_register),
65 scale(scale) {}
66 Register reg;
67 ScaleFactor scale;
68};
69
70// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
75 void LoadRoot(Register destination, Heap::RootListIndex index);
76 void CompareRoot(Register with, Heap::RootListIndex index);
77 void CompareRoot(Operand with, Heap::RootListIndex index);
78 void PushRoot(Heap::RootListIndex index);
Kristian Monsen25f61362010-05-21 11:50:48 +010079 void StoreRoot(Register source, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +000080
81 // ---------------------------------------------------------------------------
82 // GC Support
83
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010084 // For page containing |object| mark region covering |addr| dirty.
85 // RecordWriteHelper only works if the object is not in new
Steve Block6ded16b2010-05-10 14:33:55 +010086 // space.
87 void RecordWriteHelper(Register object,
88 Register addr,
89 Register scratch);
90
91 // Check if object is in new space. The condition cc can be equal or
92 // not_equal. If it is equal a jump will be done if the object is on new
93 // space. The register scratch can be object itself, but it will be clobbered.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010094 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +010095 void InNewSpace(Register object,
96 Register scratch,
97 Condition cc,
Kristian Monsen0d5e1162010-09-30 15:31:59 +010098 LabelType* branch);
Steve Block6ded16b2010-05-10 14:33:55 +010099
Steve Block8defd9f2010-07-08 12:39:36 +0100100 // For page containing |object| mark region covering [object+offset]
101 // dirty. |object| is the object being stored into, |value| is the
102 // object being stored. If |offset| is zero, then the |scratch|
103 // register contains the array index into the elements array
Ben Murdochf87a2032010-10-22 12:50:53 +0100104 // represented as an untagged 32-bit integer. All registers are
105 // clobbered by the operation. RecordWrite filters out smis so it
106 // does not update the write barrier if the value is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000107 void RecordWrite(Register object,
108 int offset,
109 Register value,
110 Register scratch);
111
Steve Block8defd9f2010-07-08 12:39:36 +0100112 // For page containing |object| mark region covering [address]
113 // dirty. |object| is the object being stored into, |value| is the
114 // object being stored. All registers are clobbered by the
115 // operation. RecordWrite filters out smis so it does not update
116 // the write barrier if the value is a smi.
117 void RecordWrite(Register object,
118 Register address,
119 Register value);
120
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100121 // For page containing |object| mark region covering [object+offset] dirty.
Steve Block3ce2e202009-11-05 08:53:23 +0000122 // The value is known to not be a smi.
123 // object is the object being stored into, value is the object being stored.
124 // If offset is zero, then the scratch register contains the array index into
Ben Murdochf87a2032010-10-22 12:50:53 +0100125 // the elements array represented as an untagged 32-bit integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000126 // All registers are clobbered by the operation.
127 void RecordWriteNonSmi(Register object,
128 int offset,
129 Register value,
130 Register scratch);
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132#ifdef ENABLE_DEBUGGER_SUPPORT
133 // ---------------------------------------------------------------------------
134 // Debugger Support
135
Andrei Popescu402d9372010-02-26 13:31:12 +0000136 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000137#endif
138
139 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000140 // Stack limit support
141
142 // Do simple test for stack overflow. This doesn't handle an overflow.
143 void StackLimitCheck(Label* on_stack_limit_hit);
144
145 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 // Activation frames
147
148 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
149 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
150
151 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
152 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
153
Steve Blockd0582a62009-12-15 09:54:21 +0000154 // Enter specific kind of exit frame; either in normal or
155 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000156 // sets up the number of arguments in register rdi and the pointer
157 // to the first argument in register rsi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800158 //
159 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
160 // accessible via StackSpaceOperand.
161 void EnterExitFrame(int arg_stack_space = 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000162
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800163 // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
164 // memory (not GCed) on the stack accessible via StackSpaceOperand.
165 void EnterApiExitFrame(int arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +0100166
Steve Blocka7e24c12009-10-30 11:49:00 +0000167 // Leave the current exit frame. Expects/provides the return value in
168 // register rax:rdx (untouched) and the pointer to the first
169 // argument in register rsi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800170 void LeaveExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000171
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800172 // Leave the current exit frame. Expects/provides the return value in
173 // register rax (untouched).
174 void LeaveApiExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000175
176 // ---------------------------------------------------------------------------
177 // JavaScript invokes
178
179 // Invoke the JavaScript function code by either calling or jumping.
180 void InvokeCode(Register code,
181 const ParameterCount& expected,
182 const ParameterCount& actual,
183 InvokeFlag flag);
184
185 void InvokeCode(Handle<Code> code,
186 const ParameterCount& expected,
187 const ParameterCount& actual,
188 RelocInfo::Mode rmode,
189 InvokeFlag flag);
190
191 // Invoke the JavaScript function in the given register. Changes the
192 // current context to the context in the function before invoking.
193 void InvokeFunction(Register function,
194 const ParameterCount& actual,
195 InvokeFlag flag);
196
Andrei Popescu402d9372010-02-26 13:31:12 +0000197 void InvokeFunction(JSFunction* function,
198 const ParameterCount& actual,
199 InvokeFlag flag);
200
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 // Invoke specified builtin JavaScript function. Adds an entry to
202 // the unresolved list if the name does not resolve.
203 void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag);
204
Steve Block791712a2010-08-27 10:21:07 +0100205 // Store the function for the given builtin in the target register.
206 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
207
Steve Blocka7e24c12009-10-30 11:49:00 +0000208 // Store the code object for the given builtin in the target register.
209 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
210
211
212 // ---------------------------------------------------------------------------
213 // Smi tagging, untagging and operations on tagged smis.
214
Steve Block8defd9f2010-07-08 12:39:36 +0100215 void InitializeSmiConstantRegister() {
216 movq(kSmiConstantRegister,
217 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
218 RelocInfo::NONE);
219 }
220
Steve Blocka7e24c12009-10-30 11:49:00 +0000221 // Conversions between tagged smi values and non-tagged integer values.
222
223 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000224 // Only uses the low 32 bits of the src register. Sets the N and Z flags
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100225 // based on the value of the resulting smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000226 void Integer32ToSmi(Register dst, Register src);
227
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100228 // Stores an integer32 value into a memory field that already holds a smi.
229 void Integer32ToSmiField(const Operand& dst, Register src);
230
Steve Blocka7e24c12009-10-30 11:49:00 +0000231 // Adds constant to src and tags the result as a smi.
232 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000233 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000234
235 // Convert smi to 32-bit integer. I.e., not sign extended into
236 // high 32 bits of destination.
237 void SmiToInteger32(Register dst, Register src);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100238 void SmiToInteger32(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000239
240 // Convert smi to 64-bit integer (sign extended if necessary).
241 void SmiToInteger64(Register dst, Register src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100242 void SmiToInteger64(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000243
244 // Multiply a positive smi's integer value by a power of two.
245 // Provides result as 64-bit integer value.
246 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
247 Register src,
248 int power);
249
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100250 // Divide a positive smi's integer value by a power of two.
251 // Provides result as 32-bit integer value.
252 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
253 Register src,
254 int power);
255
256
Steve Block3ce2e202009-11-05 08:53:23 +0000257 // Simple comparison of smis.
258 void SmiCompare(Register dst, Register src);
259 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100260 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000261 void SmiCompare(const Operand& dst, Register src);
262 void SmiCompare(const Operand& dst, Smi* src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100263 // Compare the int32 in src register to the value of the smi stored at dst.
264 void SmiCompareInteger32(const Operand& dst, Register src);
Steve Block3ce2e202009-11-05 08:53:23 +0000265 // Sets sign and zero flags depending on value of smi in register.
266 void SmiTest(Register src);
267
Steve Blocka7e24c12009-10-30 11:49:00 +0000268 // Functions performing a check on a known or potential smi. Returns
269 // a condition that is satisfied if the check is successful.
270
271 // Is the value a tagged smi.
272 Condition CheckSmi(Register src);
273
Ben Murdochf87a2032010-10-22 12:50:53 +0100274 // Is the value a non-negative tagged smi.
275 Condition CheckNonNegativeSmi(Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000276
Leon Clarkee46be812010-01-19 14:06:41 +0000277 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 Condition CheckBothSmi(Register first, Register second);
279
Ben Murdochf87a2032010-10-22 12:50:53 +0100280 // Are both values non-negative tagged smis.
281 Condition CheckBothNonNegativeSmi(Register first, Register second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000282
Leon Clarkee46be812010-01-19 14:06:41 +0000283 // Are either value a tagged smi.
Ben Murdochbb769b22010-08-11 14:56:33 +0100284 Condition CheckEitherSmi(Register first,
285 Register second,
286 Register scratch = kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +0000287
Steve Blocka7e24c12009-10-30 11:49:00 +0000288 // Is the value the minimum smi value (since we are using
289 // two's complement numbers, negating the value is known to yield
290 // a non-smi value).
291 Condition CheckIsMinSmi(Register src);
292
Steve Blocka7e24c12009-10-30 11:49:00 +0000293 // Checks whether an 32-bit integer value is a valid for conversion
294 // to a smi.
295 Condition CheckInteger32ValidSmiValue(Register src);
296
Steve Block3ce2e202009-11-05 08:53:23 +0000297 // Checks whether an 32-bit unsigned integer value is a valid for
298 // conversion to a smi.
299 Condition CheckUInteger32ValidSmiValue(Register src);
300
Steve Blocka7e24c12009-10-30 11:49:00 +0000301 // Test-and-jump functions. Typically combines a check function
302 // above with a conditional jump.
303
304 // Jump if the value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100305 template <typename LabelType>
306 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Blocka7e24c12009-10-30 11:49:00 +0000307
Steve Block3ce2e202009-11-05 08:53:23 +0000308 // Jump if the unsigned integer value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100309 template <typename LabelType>
310 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Block3ce2e202009-11-05 08:53:23 +0000311
Steve Blocka7e24c12009-10-30 11:49:00 +0000312 // Jump to label if the value is a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100313 template <typename LabelType>
314 void JumpIfSmi(Register src, LabelType* on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000315
316 // Jump to label if the value is not a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100317 template <typename LabelType>
318 void JumpIfNotSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000319
Ben Murdochf87a2032010-10-22 12:50:53 +0100320 // Jump to label if the value is not a non-negative tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100321 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100322 void JumpUnlessNonNegativeSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000323
Steve Block3ce2e202009-11-05 08:53:23 +0000324 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 // to the constant.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100326 template <typename LabelType>
327 void JumpIfSmiEqualsConstant(Register src,
328 Smi* constant,
329 LabelType* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000330
331 // Jump if either or both register are not smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100332 template <typename LabelType>
333 void JumpIfNotBothSmi(Register src1,
334 Register src2,
335 LabelType* on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000336
Ben Murdochf87a2032010-10-22 12:50:53 +0100337 // Jump if either or both register are not non-negative smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100338 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100339 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2,
340 LabelType* on_not_both_smi);
Leon Clarked91b9f72010-01-27 17:25:45 +0000341
Steve Blocka7e24c12009-10-30 11:49:00 +0000342 // Operations on tagged smi values.
343
344 // Smis represent a subset of integers. The subset is always equivalent to
345 // a two's complement interpretation of a fixed number of bits.
346
347 // Optimistically adds an integer constant to a supposed smi.
348 // If the src is not a smi, or the result is not a smi, jump to
349 // the label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100350 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000351 void SmiTryAddConstant(Register dst,
352 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000353 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100354 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000355
Steve Block3ce2e202009-11-05 08:53:23 +0000356 // Add an integer constant to a tagged smi, giving a tagged smi as result.
357 // No overflow testing on the result is done.
358 void SmiAddConstant(Register dst, Register src, Smi* constant);
359
Leon Clarkef7060e22010-06-03 12:02:55 +0100360 // Add an integer constant to a tagged smi, giving a tagged smi as result.
361 // No overflow testing on the result is done.
362 void SmiAddConstant(const Operand& dst, Smi* constant);
363
Steve Blocka7e24c12009-10-30 11:49:00 +0000364 // Add an integer constant to a tagged smi, giving a tagged smi as result,
365 // or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100366 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000367 void SmiAddConstant(Register dst,
368 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000369 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100370 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000371
372 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100373 // result. No testing on the result is done. Sets the N and Z flags
374 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000375 void SmiSubConstant(Register dst, Register src, Smi* constant);
376
377 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000378 // result, or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100379 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000380 void SmiSubConstant(Register dst,
381 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000382 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100383 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000384
385 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000386 // NOTICE: This operation jumps on success, not failure!
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100387 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000388 void SmiNeg(Register dst,
389 Register src,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100390 LabelType* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000391
392 // Adds smi values and return the result as a smi.
393 // If dst is src1, then src1 will be destroyed, even if
394 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100395 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000396 void SmiAdd(Register dst,
397 Register src1,
398 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100399 LabelType* on_not_smi_result);
400
401 void SmiAdd(Register dst,
402 Register src1,
403 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000404
405 // Subtracts smi values and return the result as a smi.
406 // If dst is src1, then src1 will be destroyed, even if
407 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100408 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000409 void SmiSub(Register dst,
410 Register src1,
411 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100412 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000413
Steve Block6ded16b2010-05-10 14:33:55 +0100414 void SmiSub(Register dst,
415 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100416 Register src2);
417
418 template <typename LabelType>
419 void SmiSub(Register dst,
420 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100421 const Operand& src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100422 LabelType* on_not_smi_result);
423
424 void SmiSub(Register dst,
425 Register src1,
426 const Operand& src2);
Steve Block6ded16b2010-05-10 14:33:55 +0100427
Steve Blocka7e24c12009-10-30 11:49:00 +0000428 // Multiplies smi values and return the result as a smi,
429 // if possible.
430 // If dst is src1, then src1 will be destroyed, even if
431 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100432 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000433 void SmiMul(Register dst,
434 Register src1,
435 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100436 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000437
438 // Divides one smi by another and returns the quotient.
439 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100440 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000441 void SmiDiv(Register dst,
442 Register src1,
443 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100444 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000445
446 // Divides one smi by another and returns the remainder.
447 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100448 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000449 void SmiMod(Register dst,
450 Register src1,
451 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100452 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000453
454 // Bitwise operations.
455 void SmiNot(Register dst, Register src);
456 void SmiAnd(Register dst, Register src1, Register src2);
457 void SmiOr(Register dst, Register src1, Register src2);
458 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000459 void SmiAndConstant(Register dst, Register src1, Smi* constant);
460 void SmiOrConstant(Register dst, Register src1, Smi* constant);
461 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000462
463 void SmiShiftLeftConstant(Register dst,
464 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100465 int shift_value);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100466 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 void SmiShiftLogicalRightConstant(Register dst,
468 Register src,
469 int shift_value,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100470 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 void SmiShiftArithmeticRightConstant(Register dst,
472 Register src,
473 int shift_value);
474
475 // Shifts a smi value to the left, and returns the result if that is a smi.
476 // Uses and clobbers rcx, so dst may not be rcx.
477 void SmiShiftLeft(Register dst,
478 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100479 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000480 // Shifts a smi value to the right, shifting in zero bits at the top, and
481 // returns the unsigned intepretation of the result if that is a smi.
482 // Uses and clobbers rcx, so dst may not be rcx.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100483 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000484 void SmiShiftLogicalRight(Register dst,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100485 Register src1,
486 Register src2,
487 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000488 // Shifts a smi value to the right, sign extending the top, and
489 // returns the signed intepretation of the result. That will always
490 // be a valid smi value, since it's numerically smaller than the
491 // original.
492 // Uses and clobbers rcx, so dst may not be rcx.
493 void SmiShiftArithmeticRight(Register dst,
494 Register src1,
495 Register src2);
496
497 // Specialized operations
498
499 // Select the non-smi register of two registers where exactly one is a
500 // smi. If neither are smis, jump to the failure label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100501 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 void SelectNonSmi(Register dst,
503 Register src1,
504 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100505 LabelType* on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +0000506
507 // Converts, if necessary, a smi to a combination of number and
508 // multiplier to be used as a scaled index.
509 // The src register contains a *positive* smi value. The shift is the
510 // power of two to multiply the index value by (e.g.
511 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
512 // The returned index register may be either src or dst, depending
513 // on what is most efficient. If src and dst are different registers,
514 // src is always unchanged.
515 SmiIndex SmiToIndex(Register dst, Register src, int shift);
516
517 // Converts a positive smi to a negative index.
518 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
519
Steve Block3ce2e202009-11-05 08:53:23 +0000520 // Basic Smi operations.
521 void Move(Register dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100522 LoadSmiConstant(dst, source);
Steve Block3ce2e202009-11-05 08:53:23 +0000523 }
524
525 void Move(const Operand& dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100526 Register constant = GetSmiConstant(source);
527 movq(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000528 }
529
530 void Push(Smi* smi);
531 void Test(const Operand& dst, Smi* source);
532
Steve Blocka7e24c12009-10-30 11:49:00 +0000533 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000534 // String macros.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100535 template <typename LabelType>
Leon Clarkee46be812010-01-19 14:06:41 +0000536 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
537 Register second_object,
538 Register scratch1,
539 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100540 LabelType* on_not_both_flat_ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000541
Steve Block6ded16b2010-05-10 14:33:55 +0100542 // Check whether the instance type represents a flat ascii string. Jump to the
543 // label if not. If the instance type can be scratched specify same register
544 // for both instance type and scratch.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100545 template <typename LabelType>
546 void JumpIfInstanceTypeIsNotSequentialAscii(
547 Register instance_type,
548 Register scratch,
549 LabelType *on_not_flat_ascii_string);
Steve Block6ded16b2010-05-10 14:33:55 +0100550
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100551 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100552 void JumpIfBothInstanceTypesAreNotSequentialAscii(
553 Register first_object_instance_type,
554 Register second_object_instance_type,
555 Register scratch1,
556 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100557 LabelType* on_fail);
Steve Block6ded16b2010-05-10 14:33:55 +0100558
Leon Clarkee46be812010-01-19 14:06:41 +0000559 // ---------------------------------------------------------------------------
560 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000561
Steve Block3ce2e202009-11-05 08:53:23 +0000562 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 void Set(Register dst, int64_t x);
564 void Set(const Operand& dst, int64_t x);
565
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100566 // Move if the registers are not identical.
567 void Move(Register target, Register source);
568
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 void Move(Register dst, Handle<Object> source);
571 void Move(const Operand& dst, Handle<Object> source);
572 void Cmp(Register dst, Handle<Object> source);
573 void Cmp(const Operand& dst, Handle<Object> source);
574 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575
Leon Clarkee46be812010-01-19 14:06:41 +0000576 // Emit code to discard a non-negative number of pointer-sized elements
577 // from the stack, clobbering only the rsp register.
578 void Drop(int stack_elements);
579
580 void Call(Label* target) { call(target); }
581
Steve Blocka7e24c12009-10-30 11:49:00 +0000582 // Control Flow
583 void Jump(Address destination, RelocInfo::Mode rmode);
584 void Jump(ExternalReference ext);
585 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
586
587 void Call(Address destination, RelocInfo::Mode rmode);
588 void Call(ExternalReference ext);
589 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
590
591 // Compare object type for heap object.
592 // Always use unsigned comparisons: above and below, not less and greater.
593 // Incoming register is heap_object and outgoing register is map.
594 // They may be the same register, and may be kScratchRegister.
595 void CmpObjectType(Register heap_object, InstanceType type, Register map);
596
597 // Compare instance type for map.
598 // Always use unsigned comparisons: above and below, not less and greater.
599 void CmpInstanceType(Register map, InstanceType type);
600
Andrei Popescu31002712010-02-23 13:46:05 +0000601 // Check if the map of an object is equal to a specified map and
602 // branch to label if not. Skip the smi check if not required
603 // (object is known to be a heap object)
604 void CheckMap(Register obj,
605 Handle<Map> map,
606 Label* fail,
607 bool is_heap_object);
608
Leon Clarked91b9f72010-01-27 17:25:45 +0000609 // Check if the object in register heap_object is a string. Afterwards the
610 // register map contains the object map and the register instance_type
611 // contains the instance_type. The registers map and instance_type can be the
612 // same in which case it contains the instance type afterwards. Either of the
613 // registers map and instance_type can be the same as heap_object.
614 Condition IsObjectStringType(Register heap_object,
615 Register map,
616 Register instance_type);
617
Steve Block8defd9f2010-07-08 12:39:36 +0100618 // FCmp compares and pops the two values on top of the FPU stack.
619 // The flag results are similar to integer cmp, but requires unsigned
Steve Blocka7e24c12009-10-30 11:49:00 +0000620 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
621 void FCmp();
622
Andrei Popescu402d9372010-02-26 13:31:12 +0000623 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100624 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000625
Iain Merrick75681382010-08-19 15:07:18 +0100626 // Abort execution if argument is a smi. Used in debug code.
627 void AbortIfSmi(Register object);
628
Steve Block6ded16b2010-05-10 14:33:55 +0100629 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100630 void AbortIfNotSmi(Register object);
Steve Block6ded16b2010-05-10 14:33:55 +0100631
Ben Murdoch8c569c42011-03-10 11:43:29 +0000632 // Abort execution if argument is a string. Used in debug code.
633 void AbortIfNotString(Register object);
634
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100635 // Abort execution if argument is not the root value with the given index.
636 void AbortIfNotRootValue(Register src,
637 Heap::RootListIndex root_value_index,
638 const char* message);
639
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 // ---------------------------------------------------------------------------
641 // Exception handling
642
643 // Push a new try handler and link into try handler chain. The return
644 // address must be pushed before calling this helper.
645 void PushTryHandler(CodeLocation try_location, HandlerType type);
646
Leon Clarkee46be812010-01-19 14:06:41 +0000647 // Unlink the stack handler on top of the stack from the try handler chain.
648 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000649
650 // ---------------------------------------------------------------------------
651 // Inline caching support
652
Steve Blocka7e24c12009-10-30 11:49:00 +0000653 // Generate code for checking access rights - used for security checks
654 // on access to global objects across environments. The holder register
655 // is left untouched, but the scratch register and kScratchRegister,
656 // which must be different, are clobbered.
657 void CheckAccessGlobalProxy(Register holder_reg,
658 Register scratch,
659 Label* miss);
660
661
662 // ---------------------------------------------------------------------------
663 // Allocation support
664
665 // Allocate an object in new space. If the new space is exhausted control
666 // continues at the gc_required label. The allocated object is returned in
667 // result and end of the new object is returned in result_end. The register
668 // scratch can be passed as no_reg in which case an additional object
669 // reference will be added to the reloc info. The returned pointers in result
670 // and result_end have not yet been tagged as heap objects. If
671 // result_contains_top_on_entry is true the content of result is known to be
672 // the allocation top on entry (could be result_end from a previous call to
673 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
674 // should be no_reg as it is never used.
675 void AllocateInNewSpace(int object_size,
676 Register result,
677 Register result_end,
678 Register scratch,
679 Label* gc_required,
680 AllocationFlags flags);
681
682 void AllocateInNewSpace(int header_size,
683 ScaleFactor element_size,
684 Register element_count,
685 Register result,
686 Register result_end,
687 Register scratch,
688 Label* gc_required,
689 AllocationFlags flags);
690
691 void AllocateInNewSpace(Register object_size,
692 Register result,
693 Register result_end,
694 Register scratch,
695 Label* gc_required,
696 AllocationFlags flags);
697
698 // Undo allocation in new space. The object passed and objects allocated after
699 // it will no longer be allocated. Make sure that no pointers are left to the
700 // object(s) no longer allocated as they would be invalid when allocation is
701 // un-done.
702 void UndoAllocationInNewSpace(Register object);
703
Steve Block3ce2e202009-11-05 08:53:23 +0000704 // Allocate a heap number in new space with undefined value. Returns
705 // tagged pointer in result register, or jumps to gc_required if new
706 // space is full.
707 void AllocateHeapNumber(Register result,
708 Register scratch,
709 Label* gc_required);
710
Leon Clarkee46be812010-01-19 14:06:41 +0000711 // Allocate a sequential string. All the header fields of the string object
712 // are initialized.
713 void AllocateTwoByteString(Register result,
714 Register length,
715 Register scratch1,
716 Register scratch2,
717 Register scratch3,
718 Label* gc_required);
719 void AllocateAsciiString(Register result,
720 Register length,
721 Register scratch1,
722 Register scratch2,
723 Register scratch3,
724 Label* gc_required);
725
726 // Allocate a raw cons string object. Only the map field of the result is
727 // initialized.
728 void AllocateConsString(Register result,
729 Register scratch1,
730 Register scratch2,
731 Label* gc_required);
732 void AllocateAsciiConsString(Register result,
733 Register scratch1,
734 Register scratch2,
735 Label* gc_required);
736
Steve Blocka7e24c12009-10-30 11:49:00 +0000737 // ---------------------------------------------------------------------------
738 // Support functions.
739
740 // Check if result is zero and op is negative.
741 void NegativeZeroTest(Register result, Register op, Label* then_label);
742
743 // Check if result is zero and op is negative in code using jump targets.
744 void NegativeZeroTest(CodeGenerator* cgen,
745 Register result,
746 Register op,
747 JumpTarget* then_target);
748
749 // Check if result is zero and any of op1 and op2 are negative.
750 // Register scratch is destroyed, and it must be different from op2.
751 void NegativeZeroTest(Register result, Register op1, Register op2,
752 Register scratch, Label* then_label);
753
754 // Try to get function prototype of a function and puts the value in
755 // the result register. Checks that the function really is a
756 // function and jumps to the miss label if the fast checks fail. The
757 // function register will be untouched; the other register may be
758 // clobbered.
759 void TryGetFunctionPrototype(Register function,
760 Register result,
761 Label* miss);
762
763 // Generates code for reporting that an illegal operation has
764 // occurred.
765 void IllegalOperation(int num_arguments);
766
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100767 // Picks out an array index from the hash field.
768 // Register use:
769 // hash - holds the index's hash. Clobbered.
770 // index - holds the overwritten index on exit.
771 void IndexFromHash(Register hash, Register index);
772
Steve Blockd0582a62009-12-15 09:54:21 +0000773 // Find the function context up the context chain.
774 void LoadContext(Register dst, int context_chain_length);
775
Steve Blocka7e24c12009-10-30 11:49:00 +0000776 // ---------------------------------------------------------------------------
777 // Runtime calls
778
779 // Call a code stub.
780 void CallStub(CodeStub* stub);
781
Ben Murdochbb769b22010-08-11 14:56:33 +0100782 // Call a code stub and return the code object called. Try to generate
783 // the code if necessary. Do not perform a GC but instead return a retry
784 // after GC failure.
John Reck59135872010-11-02 12:39:01 -0700785 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100786
Leon Clarkee46be812010-01-19 14:06:41 +0000787 // Tail call a code stub (jump).
788 void TailCallStub(CodeStub* stub);
789
Ben Murdochbb769b22010-08-11 14:56:33 +0100790 // Tail call a code stub (jump) and return the code object called. Try to
791 // generate the code if necessary. Do not perform a GC but instead return
792 // a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700793 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100794
Steve Blocka7e24c12009-10-30 11:49:00 +0000795 // Return from a code stub after popping its arguments.
796 void StubReturn(int argc);
797
798 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000799 void CallRuntime(Runtime::Function* f, int num_arguments);
800
Ben Murdochbb769b22010-08-11 14:56:33 +0100801 // Call a runtime function, returning the CodeStub object called.
802 // Try to generate the stub code if necessary. Do not perform a GC
803 // but instead return a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700804 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::Function* f,
805 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100806
Steve Blocka7e24c12009-10-30 11:49:00 +0000807 // Convenience function: Same as above, but takes the fid instead.
808 void CallRuntime(Runtime::FunctionId id, int num_arguments);
809
Ben Murdochbb769b22010-08-11 14:56:33 +0100810 // Convenience function: Same as above, but takes the fid instead.
John Reck59135872010-11-02 12:39:01 -0700811 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::FunctionId id,
812 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100813
Andrei Popescu402d9372010-02-26 13:31:12 +0000814 // Convenience function: call an external reference.
815 void CallExternalReference(const ExternalReference& ext,
816 int num_arguments);
817
Steve Blocka7e24c12009-10-30 11:49:00 +0000818 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100819 // Like JumpToExternalReference, but also takes care of passing the number
820 // of parameters.
821 void TailCallExternalReference(const ExternalReference& ext,
822 int num_arguments,
823 int result_size);
824
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800825 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
826 const ExternalReference& ext, int num_arguments, int result_size);
827
Steve Block6ded16b2010-05-10 14:33:55 +0100828 // Convenience function: tail call a runtime routine (jump).
829 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000830 int num_arguments,
831 int result_size);
832
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800833 MUST_USE_RESULT MaybeObject* TryTailCallRuntime(Runtime::FunctionId fid,
834 int num_arguments,
835 int result_size);
836
Steve Blocka7e24c12009-10-30 11:49:00 +0000837 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100838 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000839
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800840 // Jump to a runtime routine.
841 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext,
842 int result_size);
John Reck59135872010-11-02 12:39:01 -0700843
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800844 // Prepares stack to put arguments (aligns and so on).
845 // WIN64 calling convention requires to put the pointer to the return value
846 // slot into rcx (rcx must be preserverd until TryCallApiFunctionAndReturn).
847 // Saves context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
848 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
849 void PrepareCallApiFunction(int arg_stack_space);
850
851 // Calls an API function. Allocates HandleScope, extracts
852 // returned value from handle and propagates exceptions.
853 // Clobbers r12, r14, rbx and caller-save registers. Restores context.
854 // On return removes stack_space * kPointerSize (GCed).
855 MUST_USE_RESULT MaybeObject* TryCallApiFunctionAndReturn(
856 ApiFunction* function, int stack_space);
John Reck59135872010-11-02 12:39:01 -0700857
Leon Clarke4515c472010-02-03 11:58:03 +0000858 // Before calling a C-function from generated code, align arguments on stack.
859 // After aligning the frame, arguments must be stored in esp[0], esp[4],
860 // etc., not pushed. The argument count assumes all arguments are word sized.
861 // The number of slots reserved for arguments depends on platform. On Windows
862 // stack slots are reserved for the arguments passed in registers. On other
863 // platforms stack slots are only reserved for the arguments actually passed
864 // on the stack.
865 void PrepareCallCFunction(int num_arguments);
866
867 // Calls a C function and cleans up the space for arguments allocated
868 // by PrepareCallCFunction. The called function is not allowed to trigger a
869 // garbage collection, since that might move the code and invalidate the
870 // return address (unless this is somehow accounted for by the called
871 // function).
872 void CallCFunction(ExternalReference function, int num_arguments);
873 void CallCFunction(Register function, int num_arguments);
874
875 // Calculate the number of stack slots to reserve for arguments when calling a
876 // C function.
877 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000878
879 // ---------------------------------------------------------------------------
880 // Utilities
881
882 void Ret();
883
Steve Blocka7e24c12009-10-30 11:49:00 +0000884 Handle<Object> CodeObject() { return code_object_; }
885
886
887 // ---------------------------------------------------------------------------
888 // StatsCounter support
889
890 void SetCounter(StatsCounter* counter, int value);
891 void IncrementCounter(StatsCounter* counter, int value);
892 void DecrementCounter(StatsCounter* counter, int value);
893
894
895 // ---------------------------------------------------------------------------
896 // Debugging
897
898 // Calls Abort(msg) if the condition cc is not satisfied.
899 // Use --debug_code to enable.
900 void Assert(Condition cc, const char* msg);
901
Iain Merrick75681382010-08-19 15:07:18 +0100902 void AssertFastElements(Register elements);
903
Steve Blocka7e24c12009-10-30 11:49:00 +0000904 // Like Assert(), but always enabled.
905 void Check(Condition cc, const char* msg);
906
907 // Print a message to stdout and abort execution.
908 void Abort(const char* msg);
909
Steve Block6ded16b2010-05-10 14:33:55 +0100910 // Check that the stack is aligned.
911 void CheckStackAlignment();
912
Steve Blocka7e24c12009-10-30 11:49:00 +0000913 // Verify restrictions about code generated in stubs.
914 void set_generating_stub(bool value) { generating_stub_ = value; }
915 bool generating_stub() { return generating_stub_; }
916 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
917 bool allow_stub_calls() { return allow_stub_calls_; }
918
919 private:
Steve Blocka7e24c12009-10-30 11:49:00 +0000920 bool generating_stub_;
921 bool allow_stub_calls_;
Steve Block8defd9f2010-07-08 12:39:36 +0100922
923 // Returns a register holding the smi value. The register MUST NOT be
924 // modified. It may be the "smi 1 constant" register.
925 Register GetSmiConstant(Smi* value);
926
927 // Moves the smi value to the destination register.
928 void LoadSmiConstant(Register dst, Smi* value);
929
Andrei Popescu31002712010-02-23 13:46:05 +0000930 // This handle will be patched with the code object on installation.
931 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000932
933 // Helper functions for generating invokes.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100934 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000935 void InvokePrologue(const ParameterCount& expected,
936 const ParameterCount& actual,
937 Handle<Code> code_constant,
938 Register code_register,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100939 LabelType* done,
Steve Blocka7e24c12009-10-30 11:49:00 +0000940 InvokeFlag flag);
941
Steve Blocka7e24c12009-10-30 11:49:00 +0000942 // Activation support.
943 void EnterFrame(StackFrame::Type type);
944 void LeaveFrame(StackFrame::Type type);
945
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100946 void EnterExitFramePrologue(bool save_rax);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800947
948 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
949 // accessible via StackSpaceOperand.
950 void EnterExitFrameEpilogue(int arg_stack_space);
951
952 void LeaveExitFrameEpilogue();
Ben Murdochbb769b22010-08-11 14:56:33 +0100953
Steve Blocka7e24c12009-10-30 11:49:00 +0000954 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +0100955 // Loads the top of new-space into the result register.
Steve Block6ded16b2010-05-10 14:33:55 +0100956 // Otherwise the address of the new-space top is loaded into scratch (if
957 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +0000958 void LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000959 Register scratch,
960 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +0100961 // Update allocation top with value in result_end register.
962 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +0100964
965 // Helper for PopHandleScope. Allowed to perform a GC and returns
966 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
967 // possibly returns a failure object indicating an allocation failure.
968 Object* PopHandleScopeHelper(Register saved,
969 Register scratch,
970 bool gc_allowed);
Steve Blocka7e24c12009-10-30 11:49:00 +0000971};
972
973
974// The code patcher is used to patch (typically) small parts of code e.g. for
975// debugging and other types of instrumentation. When using the code patcher
976// the exact number of bytes specified must be emitted. Is not legal to emit
977// relocation information. If any of these constraints are violated it causes
978// an assertion.
979class CodePatcher {
980 public:
981 CodePatcher(byte* address, int size);
982 virtual ~CodePatcher();
983
984 // Macro assembler to emit code.
985 MacroAssembler* masm() { return &masm_; }
986
987 private:
988 byte* address_; // The address of the code being patched.
989 int size_; // Number of bytes of the expected patch size.
990 MacroAssembler masm_; // Macro assembler used to generate the code.
991};
992
993
994// -----------------------------------------------------------------------------
995// Static helper functions.
996
997// Generate an Operand for loading a field from an object.
998static inline Operand FieldOperand(Register object, int offset) {
999 return Operand(object, offset - kHeapObjectTag);
1000}
1001
1002
1003// Generate an Operand for loading an indexed field from an object.
1004static inline Operand FieldOperand(Register object,
1005 Register index,
1006 ScaleFactor scale,
1007 int offset) {
1008 return Operand(object, index, scale, offset - kHeapObjectTag);
1009}
1010
1011
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001012static inline Operand ContextOperand(Register context, int index) {
1013 return Operand(context, Context::SlotOffset(index));
1014}
1015
1016
1017static inline Operand GlobalObjectOperand() {
1018 return ContextOperand(rsi, Context::GLOBAL_INDEX);
1019}
1020
1021
1022// Provides access to exit frame stack space (not GCed).
1023static inline Operand StackSpaceOperand(int index) {
1024#ifdef _WIN64
1025 const int kShaddowSpace = 4;
1026 return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
1027#else
1028 return Operand(rsp, index * kPointerSize);
1029#endif
1030}
1031
1032
1033
Steve Blocka7e24c12009-10-30 11:49:00 +00001034#ifdef GENERATED_CODE_COVERAGE
1035extern void LogGeneratedCodeCoverage(const char* file_line);
1036#define CODE_COVERAGE_STRINGIFY(x) #x
1037#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1038#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1039#define ACCESS_MASM(masm) { \
1040 byte* x64_coverage_function = \
1041 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
1042 masm->pushfd(); \
1043 masm->pushad(); \
1044 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
1045 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1046 masm->pop(rax); \
1047 masm->popad(); \
1048 masm->popfd(); \
1049 } \
1050 masm->
1051#else
1052#define ACCESS_MASM(masm) masm->
1053#endif
1054
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001055// -----------------------------------------------------------------------------
1056// Template implementations.
1057
1058static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1059
1060
1061template <typename LabelType>
1062void MacroAssembler::SmiNeg(Register dst,
1063 Register src,
1064 LabelType* on_smi_result) {
1065 if (dst.is(src)) {
1066 ASSERT(!dst.is(kScratchRegister));
1067 movq(kScratchRegister, src);
1068 neg(dst); // Low 32 bits are retained as zero by negation.
1069 // Test if result is zero or Smi::kMinValue.
1070 cmpq(dst, kScratchRegister);
1071 j(not_equal, on_smi_result);
1072 movq(src, kScratchRegister);
1073 } else {
1074 movq(dst, src);
1075 neg(dst);
1076 cmpq(dst, src);
1077 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1078 j(not_equal, on_smi_result);
1079 }
1080}
1081
1082
1083template <typename LabelType>
1084void MacroAssembler::SmiAdd(Register dst,
1085 Register src1,
1086 Register src2,
1087 LabelType* on_not_smi_result) {
1088 ASSERT_NOT_NULL(on_not_smi_result);
1089 ASSERT(!dst.is(src2));
1090 if (dst.is(src1)) {
1091 movq(kScratchRegister, src1);
1092 addq(kScratchRegister, src2);
1093 j(overflow, on_not_smi_result);
1094 movq(dst, kScratchRegister);
1095 } else {
1096 movq(dst, src1);
1097 addq(dst, src2);
1098 j(overflow, on_not_smi_result);
1099 }
1100}
1101
1102
1103template <typename LabelType>
1104void MacroAssembler::SmiSub(Register dst,
1105 Register src1,
1106 Register src2,
1107 LabelType* on_not_smi_result) {
1108 ASSERT_NOT_NULL(on_not_smi_result);
1109 ASSERT(!dst.is(src2));
1110 if (dst.is(src1)) {
1111 cmpq(dst, src2);
1112 j(overflow, on_not_smi_result);
1113 subq(dst, src2);
1114 } else {
1115 movq(dst, src1);
1116 subq(dst, src2);
1117 j(overflow, on_not_smi_result);
1118 }
1119}
1120
1121
1122template <typename LabelType>
1123void MacroAssembler::SmiSub(Register dst,
1124 Register src1,
1125 const Operand& src2,
1126 LabelType* on_not_smi_result) {
1127 ASSERT_NOT_NULL(on_not_smi_result);
1128 if (dst.is(src1)) {
1129 movq(kScratchRegister, src2);
1130 cmpq(src1, kScratchRegister);
1131 j(overflow, on_not_smi_result);
1132 subq(src1, kScratchRegister);
1133 } else {
1134 movq(dst, src1);
1135 subq(dst, src2);
1136 j(overflow, on_not_smi_result);
1137 }
1138}
1139
1140
1141template <typename LabelType>
1142void MacroAssembler::SmiMul(Register dst,
1143 Register src1,
1144 Register src2,
1145 LabelType* on_not_smi_result) {
1146 ASSERT(!dst.is(src2));
1147 ASSERT(!dst.is(kScratchRegister));
1148 ASSERT(!src1.is(kScratchRegister));
1149 ASSERT(!src2.is(kScratchRegister));
1150
1151 if (dst.is(src1)) {
1152 NearLabel failure, zero_correct_result;
1153 movq(kScratchRegister, src1); // Create backup for later testing.
1154 SmiToInteger64(dst, src1);
1155 imul(dst, src2);
1156 j(overflow, &failure);
1157
1158 // Check for negative zero result. If product is zero, and one
1159 // argument is negative, go to slow case.
1160 NearLabel correct_result;
1161 testq(dst, dst);
1162 j(not_zero, &correct_result);
1163
1164 movq(dst, kScratchRegister);
1165 xor_(dst, src2);
1166 j(positive, &zero_correct_result); // Result was positive zero.
1167
1168 bind(&failure); // Reused failure exit, restores src1.
1169 movq(src1, kScratchRegister);
1170 jmp(on_not_smi_result);
1171
1172 bind(&zero_correct_result);
1173 xor_(dst, dst);
1174
1175 bind(&correct_result);
1176 } else {
1177 SmiToInteger64(dst, src1);
1178 imul(dst, src2);
1179 j(overflow, on_not_smi_result);
1180 // Check for negative zero result. If product is zero, and one
1181 // argument is negative, go to slow case.
1182 NearLabel correct_result;
1183 testq(dst, dst);
1184 j(not_zero, &correct_result);
1185 // One of src1 and src2 is zero, the check whether the other is
1186 // negative.
1187 movq(kScratchRegister, src1);
1188 xor_(kScratchRegister, src2);
1189 j(negative, on_not_smi_result);
1190 bind(&correct_result);
1191 }
1192}
1193
1194
1195template <typename LabelType>
1196void MacroAssembler::SmiTryAddConstant(Register dst,
1197 Register src,
1198 Smi* constant,
1199 LabelType* on_not_smi_result) {
1200 // Does not assume that src is a smi.
1201 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1202 ASSERT_EQ(0, kSmiTag);
1203 ASSERT(!dst.is(kScratchRegister));
1204 ASSERT(!src.is(kScratchRegister));
1205
1206 JumpIfNotSmi(src, on_not_smi_result);
1207 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1208 LoadSmiConstant(tmp, constant);
1209 addq(tmp, src);
1210 j(overflow, on_not_smi_result);
1211 if (dst.is(src)) {
1212 movq(dst, tmp);
1213 }
1214}
1215
1216
1217template <typename LabelType>
1218void MacroAssembler::SmiAddConstant(Register dst,
1219 Register src,
1220 Smi* constant,
1221 LabelType* on_not_smi_result) {
1222 if (constant->value() == 0) {
1223 if (!dst.is(src)) {
1224 movq(dst, src);
1225 }
1226 } else if (dst.is(src)) {
1227 ASSERT(!dst.is(kScratchRegister));
1228
1229 LoadSmiConstant(kScratchRegister, constant);
1230 addq(kScratchRegister, src);
1231 j(overflow, on_not_smi_result);
1232 movq(dst, kScratchRegister);
1233 } else {
1234 LoadSmiConstant(dst, constant);
1235 addq(dst, src);
1236 j(overflow, on_not_smi_result);
1237 }
1238}
1239
1240
1241template <typename LabelType>
1242void MacroAssembler::SmiSubConstant(Register dst,
1243 Register src,
1244 Smi* constant,
1245 LabelType* on_not_smi_result) {
1246 if (constant->value() == 0) {
1247 if (!dst.is(src)) {
1248 movq(dst, src);
1249 }
1250 } else if (dst.is(src)) {
1251 ASSERT(!dst.is(kScratchRegister));
1252 if (constant->value() == Smi::kMinValue) {
1253 // Subtracting min-value from any non-negative value will overflow.
1254 // We test the non-negativeness before doing the subtraction.
1255 testq(src, src);
1256 j(not_sign, on_not_smi_result);
1257 LoadSmiConstant(kScratchRegister, constant);
1258 subq(dst, kScratchRegister);
1259 } else {
1260 // Subtract by adding the negation.
1261 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1262 addq(kScratchRegister, dst);
1263 j(overflow, on_not_smi_result);
1264 movq(dst, kScratchRegister);
1265 }
1266 } else {
1267 if (constant->value() == Smi::kMinValue) {
1268 // Subtracting min-value from any non-negative value will overflow.
1269 // We test the non-negativeness before doing the subtraction.
1270 testq(src, src);
1271 j(not_sign, on_not_smi_result);
1272 LoadSmiConstant(dst, constant);
1273 // Adding and subtracting the min-value gives the same result, it only
1274 // differs on the overflow bit, which we don't check here.
1275 addq(dst, src);
1276 } else {
1277 // Subtract by adding the negation.
1278 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1279 addq(dst, src);
1280 j(overflow, on_not_smi_result);
1281 }
1282 }
1283}
1284
1285
1286template <typename LabelType>
1287void MacroAssembler::SmiDiv(Register dst,
1288 Register src1,
1289 Register src2,
1290 LabelType* on_not_smi_result) {
1291 ASSERT(!src1.is(kScratchRegister));
1292 ASSERT(!src2.is(kScratchRegister));
1293 ASSERT(!dst.is(kScratchRegister));
1294 ASSERT(!src2.is(rax));
1295 ASSERT(!src2.is(rdx));
1296 ASSERT(!src1.is(rdx));
1297
1298 // Check for 0 divisor (result is +/-Infinity).
1299 NearLabel positive_divisor;
1300 testq(src2, src2);
1301 j(zero, on_not_smi_result);
1302
1303 if (src1.is(rax)) {
1304 movq(kScratchRegister, src1);
1305 }
1306 SmiToInteger32(rax, src1);
1307 // We need to rule out dividing Smi::kMinValue by -1, since that would
1308 // overflow in idiv and raise an exception.
1309 // We combine this with negative zero test (negative zero only happens
1310 // when dividing zero by a negative number).
1311
1312 // We overshoot a little and go to slow case if we divide min-value
1313 // by any negative value, not just -1.
1314 NearLabel safe_div;
1315 testl(rax, Immediate(0x7fffffff));
1316 j(not_zero, &safe_div);
1317 testq(src2, src2);
1318 if (src1.is(rax)) {
1319 j(positive, &safe_div);
1320 movq(src1, kScratchRegister);
1321 jmp(on_not_smi_result);
1322 } else {
1323 j(negative, on_not_smi_result);
1324 }
1325 bind(&safe_div);
1326
1327 SmiToInteger32(src2, src2);
1328 // Sign extend src1 into edx:eax.
1329 cdq();
1330 idivl(src2);
1331 Integer32ToSmi(src2, src2);
1332 // Check that the remainder is zero.
1333 testl(rdx, rdx);
1334 if (src1.is(rax)) {
1335 NearLabel smi_result;
1336 j(zero, &smi_result);
1337 movq(src1, kScratchRegister);
1338 jmp(on_not_smi_result);
1339 bind(&smi_result);
1340 } else {
1341 j(not_zero, on_not_smi_result);
1342 }
1343 if (!dst.is(src1) && src1.is(rax)) {
1344 movq(src1, kScratchRegister);
1345 }
1346 Integer32ToSmi(dst, rax);
1347}
1348
1349
1350template <typename LabelType>
1351void MacroAssembler::SmiMod(Register dst,
1352 Register src1,
1353 Register src2,
1354 LabelType* on_not_smi_result) {
1355 ASSERT(!dst.is(kScratchRegister));
1356 ASSERT(!src1.is(kScratchRegister));
1357 ASSERT(!src2.is(kScratchRegister));
1358 ASSERT(!src2.is(rax));
1359 ASSERT(!src2.is(rdx));
1360 ASSERT(!src1.is(rdx));
1361 ASSERT(!src1.is(src2));
1362
1363 testq(src2, src2);
1364 j(zero, on_not_smi_result);
1365
1366 if (src1.is(rax)) {
1367 movq(kScratchRegister, src1);
1368 }
1369 SmiToInteger32(rax, src1);
1370 SmiToInteger32(src2, src2);
1371
1372 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1373 NearLabel safe_div;
1374 cmpl(rax, Immediate(Smi::kMinValue));
1375 j(not_equal, &safe_div);
1376 cmpl(src2, Immediate(-1));
1377 j(not_equal, &safe_div);
1378 // Retag inputs and go slow case.
1379 Integer32ToSmi(src2, src2);
1380 if (src1.is(rax)) {
1381 movq(src1, kScratchRegister);
1382 }
1383 jmp(on_not_smi_result);
1384 bind(&safe_div);
1385
1386 // Sign extend eax into edx:eax.
1387 cdq();
1388 idivl(src2);
1389 // Restore smi tags on inputs.
1390 Integer32ToSmi(src2, src2);
1391 if (src1.is(rax)) {
1392 movq(src1, kScratchRegister);
1393 }
1394 // Check for a negative zero result. If the result is zero, and the
1395 // dividend is negative, go slow to return a floating point negative zero.
1396 NearLabel smi_result;
1397 testl(rdx, rdx);
1398 j(not_zero, &smi_result);
1399 testq(src1, src1);
1400 j(negative, on_not_smi_result);
1401 bind(&smi_result);
1402 Integer32ToSmi(dst, rdx);
1403}
1404
1405
1406template <typename LabelType>
1407void MacroAssembler::SmiShiftLogicalRightConstant(
1408 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1409 // Logic right shift interprets its result as an *unsigned* number.
1410 if (dst.is(src)) {
1411 UNIMPLEMENTED(); // Not used.
1412 } else {
1413 movq(dst, src);
1414 if (shift_value == 0) {
1415 testq(dst, dst);
1416 j(negative, on_not_smi_result);
1417 }
1418 shr(dst, Immediate(shift_value + kSmiShift));
1419 shl(dst, Immediate(kSmiShift));
1420 }
1421}
1422
1423
1424template <typename LabelType>
1425void MacroAssembler::SmiShiftLogicalRight(Register dst,
1426 Register src1,
1427 Register src2,
1428 LabelType* on_not_smi_result) {
1429 ASSERT(!dst.is(kScratchRegister));
1430 ASSERT(!src1.is(kScratchRegister));
1431 ASSERT(!src2.is(kScratchRegister));
1432 ASSERT(!dst.is(rcx));
1433 NearLabel result_ok;
1434 if (src1.is(rcx) || src2.is(rcx)) {
1435 movq(kScratchRegister, rcx);
1436 }
1437 if (!dst.is(src1)) {
1438 movq(dst, src1);
1439 }
1440 SmiToInteger32(rcx, src2);
1441 orl(rcx, Immediate(kSmiShift));
1442 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1443 shl(dst, Immediate(kSmiShift));
1444 testq(dst, dst);
1445 if (src1.is(rcx) || src2.is(rcx)) {
1446 NearLabel positive_result;
1447 j(positive, &positive_result);
1448 if (src1.is(rcx)) {
1449 movq(src1, kScratchRegister);
1450 } else {
1451 movq(src2, kScratchRegister);
1452 }
1453 jmp(on_not_smi_result);
1454 bind(&positive_result);
1455 } else {
1456 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1457 }
1458}
1459
1460
1461template <typename LabelType>
1462void MacroAssembler::SelectNonSmi(Register dst,
1463 Register src1,
1464 Register src2,
1465 LabelType* on_not_smis) {
1466 ASSERT(!dst.is(kScratchRegister));
1467 ASSERT(!src1.is(kScratchRegister));
1468 ASSERT(!src2.is(kScratchRegister));
1469 ASSERT(!dst.is(src1));
1470 ASSERT(!dst.is(src2));
1471 // Both operands must not be smis.
1472#ifdef DEBUG
1473 if (allow_stub_calls()) { // Check contains a stub call.
1474 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1475 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1476 }
1477#endif
1478 ASSERT_EQ(0, kSmiTag);
1479 ASSERT_EQ(0, Smi::FromInt(0));
1480 movl(kScratchRegister, Immediate(kSmiTagMask));
1481 and_(kScratchRegister, src1);
1482 testl(kScratchRegister, src2);
1483 // If non-zero then both are smis.
1484 j(not_zero, on_not_smis);
1485
1486 // Exactly one operand is a smi.
1487 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1488 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1489 subq(kScratchRegister, Immediate(1));
1490 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1491 movq(dst, src1);
1492 xor_(dst, src2);
1493 and_(dst, kScratchRegister);
1494 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1495 xor_(dst, src1);
1496 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1497}
1498
1499
1500template <typename LabelType>
1501void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1502 ASSERT_EQ(0, kSmiTag);
1503 Condition smi = CheckSmi(src);
1504 j(smi, on_smi);
1505}
1506
1507
1508template <typename LabelType>
1509void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1510 Condition smi = CheckSmi(src);
1511 j(NegateCondition(smi), on_not_smi);
1512}
1513
1514
1515template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001516void MacroAssembler::JumpUnlessNonNegativeSmi(
1517 Register src, LabelType* on_not_smi_or_negative) {
1518 Condition non_negative_smi = CheckNonNegativeSmi(src);
1519 j(NegateCondition(non_negative_smi), on_not_smi_or_negative);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001520}
1521
1522
1523template <typename LabelType>
1524void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1525 Smi* constant,
1526 LabelType* on_equals) {
1527 SmiCompare(src, constant);
1528 j(equal, on_equals);
1529}
1530
1531
1532template <typename LabelType>
1533void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1534 LabelType* on_invalid) {
1535 Condition is_valid = CheckInteger32ValidSmiValue(src);
1536 j(NegateCondition(is_valid), on_invalid);
1537}
1538
1539
1540template <typename LabelType>
1541void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1542 LabelType* on_invalid) {
1543 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1544 j(NegateCondition(is_valid), on_invalid);
1545}
1546
1547
1548template <typename LabelType>
1549void MacroAssembler::JumpIfNotBothSmi(Register src1,
1550 Register src2,
1551 LabelType* on_not_both_smi) {
1552 Condition both_smi = CheckBothSmi(src1, src2);
1553 j(NegateCondition(both_smi), on_not_both_smi);
1554}
1555
1556
1557template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001558void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1559 Register src2,
1560 LabelType* on_not_both_smi) {
1561 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001562 j(NegateCondition(both_smi), on_not_both_smi);
1563}
1564
1565
1566template <typename LabelType>
1567void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1568 Register second_object,
1569 Register scratch1,
1570 Register scratch2,
1571 LabelType* on_fail) {
1572 // Check that both objects are not smis.
1573 Condition either_smi = CheckEitherSmi(first_object, second_object);
1574 j(either_smi, on_fail);
1575
1576 // Load instance type for both strings.
1577 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1578 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1579 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1580 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1581
1582 // Check that both are flat ascii strings.
1583 ASSERT(kNotStringTag != 0);
1584 const int kFlatAsciiStringMask =
1585 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1586 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1587
1588 andl(scratch1, Immediate(kFlatAsciiStringMask));
1589 andl(scratch2, Immediate(kFlatAsciiStringMask));
1590 // Interleave the bits to check both scratch1 and scratch2 in one test.
1591 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1592 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1593 cmpl(scratch1,
1594 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1595 j(not_equal, on_fail);
1596}
1597
1598
1599template <typename LabelType>
1600void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1601 Register instance_type,
1602 Register scratch,
1603 LabelType *failure) {
1604 if (!scratch.is(instance_type)) {
1605 movl(scratch, instance_type);
1606 }
1607
1608 const int kFlatAsciiStringMask =
1609 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1610
1611 andl(scratch, Immediate(kFlatAsciiStringMask));
1612 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1613 j(not_equal, failure);
1614}
1615
1616
1617template <typename LabelType>
1618void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1619 Register first_object_instance_type,
1620 Register second_object_instance_type,
1621 Register scratch1,
1622 Register scratch2,
1623 LabelType* on_fail) {
1624 // Load instance type for both strings.
1625 movq(scratch1, first_object_instance_type);
1626 movq(scratch2, second_object_instance_type);
1627
1628 // Check that both are flat ascii strings.
1629 ASSERT(kNotStringTag != 0);
1630 const int kFlatAsciiStringMask =
1631 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1632 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1633
1634 andl(scratch1, Immediate(kFlatAsciiStringMask));
1635 andl(scratch2, Immediate(kFlatAsciiStringMask));
1636 // Interleave the bits to check both scratch1 and scratch2 in one test.
1637 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1638 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1639 cmpl(scratch1,
1640 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1641 j(not_equal, on_fail);
1642}
1643
1644
1645template <typename LabelType>
1646void MacroAssembler::InNewSpace(Register object,
1647 Register scratch,
1648 Condition cc,
1649 LabelType* branch) {
1650 if (Serializer::enabled()) {
1651 // Can't do arithmetic on external references if it might get serialized.
1652 // The mask isn't really an address. We load it as an external reference in
1653 // case the size of the new space is different between the snapshot maker
1654 // and the running system.
1655 if (scratch.is(object)) {
1656 movq(kScratchRegister, ExternalReference::new_space_mask());
1657 and_(scratch, kScratchRegister);
1658 } else {
1659 movq(scratch, ExternalReference::new_space_mask());
1660 and_(scratch, object);
1661 }
1662 movq(kScratchRegister, ExternalReference::new_space_start());
1663 cmpq(scratch, kScratchRegister);
1664 j(cc, branch);
1665 } else {
1666 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
1667 intptr_t new_space_start =
1668 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
1669 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1670 if (scratch.is(object)) {
1671 addq(scratch, kScratchRegister);
1672 } else {
1673 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1674 }
1675 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
1676 j(cc, branch);
1677 }
1678}
1679
1680
1681template <typename LabelType>
1682void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1683 const ParameterCount& actual,
1684 Handle<Code> code_constant,
1685 Register code_register,
1686 LabelType* done,
1687 InvokeFlag flag) {
1688 bool definitely_matches = false;
1689 NearLabel invoke;
1690 if (expected.is_immediate()) {
1691 ASSERT(actual.is_immediate());
1692 if (expected.immediate() == actual.immediate()) {
1693 definitely_matches = true;
1694 } else {
1695 Set(rax, actual.immediate());
1696 if (expected.immediate() ==
1697 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1698 // Don't worry about adapting arguments for built-ins that
1699 // don't want that done. Skip adaption code by making it look
1700 // like we have a match between expected and actual number of
1701 // arguments.
1702 definitely_matches = true;
1703 } else {
1704 Set(rbx, expected.immediate());
1705 }
1706 }
1707 } else {
1708 if (actual.is_immediate()) {
1709 // Expected is in register, actual is immediate. This is the
1710 // case when we invoke function values without going through the
1711 // IC mechanism.
1712 cmpq(expected.reg(), Immediate(actual.immediate()));
1713 j(equal, &invoke);
1714 ASSERT(expected.reg().is(rbx));
1715 Set(rax, actual.immediate());
1716 } else if (!expected.reg().is(actual.reg())) {
1717 // Both expected and actual are in (different) registers. This
1718 // is the case when we invoke functions using call and apply.
1719 cmpq(expected.reg(), actual.reg());
1720 j(equal, &invoke);
1721 ASSERT(actual.reg().is(rax));
1722 ASSERT(expected.reg().is(rbx));
1723 }
1724 }
1725
1726 if (!definitely_matches) {
1727 Handle<Code> adaptor =
1728 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1729 if (!code_constant.is_null()) {
1730 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1731 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1732 } else if (!code_register.is(rdx)) {
1733 movq(rdx, code_register);
1734 }
1735
1736 if (flag == CALL_FUNCTION) {
1737 Call(adaptor, RelocInfo::CODE_TARGET);
1738 jmp(done);
1739 } else {
1740 Jump(adaptor, RelocInfo::CODE_TARGET);
1741 }
1742 bind(&invoke);
1743 }
1744}
1745
Steve Blocka7e24c12009-10-30 11:49:00 +00001746
1747} } // namespace v8::internal
1748
1749#endif // V8_X64_MACRO_ASSEMBLER_X64_H_