blob: 0b7e6018f5609acfc9be339aaea75e0fe4bd6bf0 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Steve Block8defd9f2010-07-08 12:39:36 +010050static const Register kScratchRegister = { 10 }; // r10.
51static const Register kSmiConstantRegister = { 15 }; // r15 (callee save).
52static const Register kRootRegister = { 13 }; // r13 (callee save).
53// Value of smi in kSmiConstantRegister.
54static const int kSmiConstantRegisterValue = 1;
Steve Blocka7e24c12009-10-30 11:49:00 +000055
Leon Clarkee46be812010-01-19 14:06:41 +000056// Convenience for platform-independent signatures.
57typedef Operand MemOperand;
58
Steve Blocka7e24c12009-10-30 11:49:00 +000059// Forward declaration.
60class JumpTarget;
61
62struct SmiIndex {
63 SmiIndex(Register index_register, ScaleFactor scale)
64 : reg(index_register),
65 scale(scale) {}
66 Register reg;
67 ScaleFactor scale;
68};
69
70// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
75 void LoadRoot(Register destination, Heap::RootListIndex index);
76 void CompareRoot(Register with, Heap::RootListIndex index);
77 void CompareRoot(Operand with, Heap::RootListIndex index);
78 void PushRoot(Heap::RootListIndex index);
Kristian Monsen25f61362010-05-21 11:50:48 +010079 void StoreRoot(Register source, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +000080
81 // ---------------------------------------------------------------------------
82 // GC Support
83
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010084 // For page containing |object| mark region covering |addr| dirty.
85 // RecordWriteHelper only works if the object is not in new
Steve Block6ded16b2010-05-10 14:33:55 +010086 // space.
87 void RecordWriteHelper(Register object,
88 Register addr,
89 Register scratch);
90
91 // Check if object is in new space. The condition cc can be equal or
92 // not_equal. If it is equal a jump will be done if the object is on new
93 // space. The register scratch can be object itself, but it will be clobbered.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010094 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +010095 void InNewSpace(Register object,
96 Register scratch,
97 Condition cc,
Kristian Monsen0d5e1162010-09-30 15:31:59 +010098 LabelType* branch);
Steve Block6ded16b2010-05-10 14:33:55 +010099
Steve Block8defd9f2010-07-08 12:39:36 +0100100 // For page containing |object| mark region covering [object+offset]
101 // dirty. |object| is the object being stored into, |value| is the
102 // object being stored. If |offset| is zero, then the |scratch|
103 // register contains the array index into the elements array
Ben Murdochf87a2032010-10-22 12:50:53 +0100104 // represented as an untagged 32-bit integer. All registers are
105 // clobbered by the operation. RecordWrite filters out smis so it
106 // does not update the write barrier if the value is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000107 void RecordWrite(Register object,
108 int offset,
109 Register value,
110 Register scratch);
111
Steve Block8defd9f2010-07-08 12:39:36 +0100112 // For page containing |object| mark region covering [address]
113 // dirty. |object| is the object being stored into, |value| is the
114 // object being stored. All registers are clobbered by the
115 // operation. RecordWrite filters out smis so it does not update
116 // the write barrier if the value is a smi.
117 void RecordWrite(Register object,
118 Register address,
119 Register value);
120
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100121 // For page containing |object| mark region covering [object+offset] dirty.
Steve Block3ce2e202009-11-05 08:53:23 +0000122 // The value is known to not be a smi.
123 // object is the object being stored into, value is the object being stored.
124 // If offset is zero, then the scratch register contains the array index into
Ben Murdochf87a2032010-10-22 12:50:53 +0100125 // the elements array represented as an untagged 32-bit integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000126 // All registers are clobbered by the operation.
127 void RecordWriteNonSmi(Register object,
128 int offset,
129 Register value,
130 Register scratch);
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132#ifdef ENABLE_DEBUGGER_SUPPORT
133 // ---------------------------------------------------------------------------
134 // Debugger Support
135
Andrei Popescu402d9372010-02-26 13:31:12 +0000136 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000137#endif
138
139 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000140 // Stack limit support
141
142 // Do simple test for stack overflow. This doesn't handle an overflow.
143 void StackLimitCheck(Label* on_stack_limit_hit);
144
145 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 // Activation frames
147
148 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
149 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
150
151 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
152 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
153
Steve Blockd0582a62009-12-15 09:54:21 +0000154 // Enter specific kind of exit frame; either in normal or
155 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000156 // sets up the number of arguments in register rdi and the pointer
157 // to the first argument in register rsi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800158 //
159 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
160 // accessible via StackSpaceOperand.
161 void EnterExitFrame(int arg_stack_space = 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000162
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800163 // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
164 // memory (not GCed) on the stack accessible via StackSpaceOperand.
165 void EnterApiExitFrame(int arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +0100166
Steve Blocka7e24c12009-10-30 11:49:00 +0000167 // Leave the current exit frame. Expects/provides the return value in
168 // register rax:rdx (untouched) and the pointer to the first
169 // argument in register rsi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800170 void LeaveExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000171
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800172 // Leave the current exit frame. Expects/provides the return value in
173 // register rax (untouched).
174 void LeaveApiExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000175
176 // ---------------------------------------------------------------------------
177 // JavaScript invokes
178
179 // Invoke the JavaScript function code by either calling or jumping.
180 void InvokeCode(Register code,
181 const ParameterCount& expected,
182 const ParameterCount& actual,
183 InvokeFlag flag);
184
185 void InvokeCode(Handle<Code> code,
186 const ParameterCount& expected,
187 const ParameterCount& actual,
188 RelocInfo::Mode rmode,
189 InvokeFlag flag);
190
191 // Invoke the JavaScript function in the given register. Changes the
192 // current context to the context in the function before invoking.
193 void InvokeFunction(Register function,
194 const ParameterCount& actual,
195 InvokeFlag flag);
196
Andrei Popescu402d9372010-02-26 13:31:12 +0000197 void InvokeFunction(JSFunction* function,
198 const ParameterCount& actual,
199 InvokeFlag flag);
200
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 // Invoke specified builtin JavaScript function. Adds an entry to
202 // the unresolved list if the name does not resolve.
203 void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag);
204
Steve Block791712a2010-08-27 10:21:07 +0100205 // Store the function for the given builtin in the target register.
206 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
207
Steve Blocka7e24c12009-10-30 11:49:00 +0000208 // Store the code object for the given builtin in the target register.
209 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
210
211
212 // ---------------------------------------------------------------------------
213 // Smi tagging, untagging and operations on tagged smis.
214
Steve Block8defd9f2010-07-08 12:39:36 +0100215 void InitializeSmiConstantRegister() {
216 movq(kSmiConstantRegister,
217 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
218 RelocInfo::NONE);
219 }
220
Steve Blocka7e24c12009-10-30 11:49:00 +0000221 // Conversions between tagged smi values and non-tagged integer values.
222
223 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000224 // Only uses the low 32 bits of the src register. Sets the N and Z flags
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100225 // based on the value of the resulting smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000226 void Integer32ToSmi(Register dst, Register src);
227
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100228 // Stores an integer32 value into a memory field that already holds a smi.
229 void Integer32ToSmiField(const Operand& dst, Register src);
230
Steve Blocka7e24c12009-10-30 11:49:00 +0000231 // Adds constant to src and tags the result as a smi.
232 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000233 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000234
235 // Convert smi to 32-bit integer. I.e., not sign extended into
236 // high 32 bits of destination.
237 void SmiToInteger32(Register dst, Register src);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100238 void SmiToInteger32(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000239
240 // Convert smi to 64-bit integer (sign extended if necessary).
241 void SmiToInteger64(Register dst, Register src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100242 void SmiToInteger64(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000243
244 // Multiply a positive smi's integer value by a power of two.
245 // Provides result as 64-bit integer value.
246 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
247 Register src,
248 int power);
249
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100250 // Divide a positive smi's integer value by a power of two.
251 // Provides result as 32-bit integer value.
252 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
253 Register src,
254 int power);
255
256
Steve Block3ce2e202009-11-05 08:53:23 +0000257 // Simple comparison of smis.
258 void SmiCompare(Register dst, Register src);
259 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100260 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000261 void SmiCompare(const Operand& dst, Register src);
262 void SmiCompare(const Operand& dst, Smi* src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100263 // Compare the int32 in src register to the value of the smi stored at dst.
264 void SmiCompareInteger32(const Operand& dst, Register src);
Steve Block3ce2e202009-11-05 08:53:23 +0000265 // Sets sign and zero flags depending on value of smi in register.
266 void SmiTest(Register src);
267
Steve Blocka7e24c12009-10-30 11:49:00 +0000268 // Functions performing a check on a known or potential smi. Returns
269 // a condition that is satisfied if the check is successful.
270
271 // Is the value a tagged smi.
272 Condition CheckSmi(Register src);
273
Ben Murdochf87a2032010-10-22 12:50:53 +0100274 // Is the value a non-negative tagged smi.
275 Condition CheckNonNegativeSmi(Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000276
Leon Clarkee46be812010-01-19 14:06:41 +0000277 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 Condition CheckBothSmi(Register first, Register second);
279
Ben Murdochf87a2032010-10-22 12:50:53 +0100280 // Are both values non-negative tagged smis.
281 Condition CheckBothNonNegativeSmi(Register first, Register second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000282
Leon Clarkee46be812010-01-19 14:06:41 +0000283 // Are either value a tagged smi.
Ben Murdochbb769b22010-08-11 14:56:33 +0100284 Condition CheckEitherSmi(Register first,
285 Register second,
286 Register scratch = kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +0000287
Steve Blocka7e24c12009-10-30 11:49:00 +0000288 // Is the value the minimum smi value (since we are using
289 // two's complement numbers, negating the value is known to yield
290 // a non-smi value).
291 Condition CheckIsMinSmi(Register src);
292
Steve Blocka7e24c12009-10-30 11:49:00 +0000293 // Checks whether an 32-bit integer value is a valid for conversion
294 // to a smi.
295 Condition CheckInteger32ValidSmiValue(Register src);
296
Steve Block3ce2e202009-11-05 08:53:23 +0000297 // Checks whether an 32-bit unsigned integer value is a valid for
298 // conversion to a smi.
299 Condition CheckUInteger32ValidSmiValue(Register src);
300
Steve Blocka7e24c12009-10-30 11:49:00 +0000301 // Test-and-jump functions. Typically combines a check function
302 // above with a conditional jump.
303
304 // Jump if the value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100305 template <typename LabelType>
306 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Blocka7e24c12009-10-30 11:49:00 +0000307
Steve Block3ce2e202009-11-05 08:53:23 +0000308 // Jump if the unsigned integer value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100309 template <typename LabelType>
310 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Block3ce2e202009-11-05 08:53:23 +0000311
Steve Blocka7e24c12009-10-30 11:49:00 +0000312 // Jump to label if the value is a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100313 template <typename LabelType>
314 void JumpIfSmi(Register src, LabelType* on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000315
316 // Jump to label if the value is not a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100317 template <typename LabelType>
318 void JumpIfNotSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000319
Ben Murdochf87a2032010-10-22 12:50:53 +0100320 // Jump to label if the value is not a non-negative tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100321 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100322 void JumpUnlessNonNegativeSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000323
Steve Block3ce2e202009-11-05 08:53:23 +0000324 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 // to the constant.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100326 template <typename LabelType>
327 void JumpIfSmiEqualsConstant(Register src,
328 Smi* constant,
329 LabelType* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000330
331 // Jump if either or both register are not smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100332 template <typename LabelType>
333 void JumpIfNotBothSmi(Register src1,
334 Register src2,
335 LabelType* on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000336
Ben Murdochf87a2032010-10-22 12:50:53 +0100337 // Jump if either or both register are not non-negative smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100338 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100339 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2,
340 LabelType* on_not_both_smi);
Leon Clarked91b9f72010-01-27 17:25:45 +0000341
Steve Blocka7e24c12009-10-30 11:49:00 +0000342 // Operations on tagged smi values.
343
344 // Smis represent a subset of integers. The subset is always equivalent to
345 // a two's complement interpretation of a fixed number of bits.
346
347 // Optimistically adds an integer constant to a supposed smi.
348 // If the src is not a smi, or the result is not a smi, jump to
349 // the label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100350 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000351 void SmiTryAddConstant(Register dst,
352 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000353 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100354 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000355
Steve Block3ce2e202009-11-05 08:53:23 +0000356 // Add an integer constant to a tagged smi, giving a tagged smi as result.
357 // No overflow testing on the result is done.
358 void SmiAddConstant(Register dst, Register src, Smi* constant);
359
Leon Clarkef7060e22010-06-03 12:02:55 +0100360 // Add an integer constant to a tagged smi, giving a tagged smi as result.
361 // No overflow testing on the result is done.
362 void SmiAddConstant(const Operand& dst, Smi* constant);
363
Steve Blocka7e24c12009-10-30 11:49:00 +0000364 // Add an integer constant to a tagged smi, giving a tagged smi as result,
365 // or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100366 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000367 void SmiAddConstant(Register dst,
368 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000369 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100370 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000371
372 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100373 // result. No testing on the result is done. Sets the N and Z flags
374 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000375 void SmiSubConstant(Register dst, Register src, Smi* constant);
376
377 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000378 // result, or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100379 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000380 void SmiSubConstant(Register dst,
381 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000382 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100383 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000384
385 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000386 // NOTICE: This operation jumps on success, not failure!
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100387 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000388 void SmiNeg(Register dst,
389 Register src,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100390 LabelType* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000391
392 // Adds smi values and return the result as a smi.
393 // If dst is src1, then src1 will be destroyed, even if
394 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100395 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000396 void SmiAdd(Register dst,
397 Register src1,
398 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100399 LabelType* on_not_smi_result);
400
401 void SmiAdd(Register dst,
402 Register src1,
403 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000404
405 // Subtracts smi values and return the result as a smi.
406 // If dst is src1, then src1 will be destroyed, even if
407 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100408 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000409 void SmiSub(Register dst,
410 Register src1,
411 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100412 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000413
Steve Block6ded16b2010-05-10 14:33:55 +0100414 void SmiSub(Register dst,
415 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100416 Register src2);
417
418 template <typename LabelType>
419 void SmiSub(Register dst,
420 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100421 const Operand& src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100422 LabelType* on_not_smi_result);
423
424 void SmiSub(Register dst,
425 Register src1,
426 const Operand& src2);
Steve Block6ded16b2010-05-10 14:33:55 +0100427
Steve Blocka7e24c12009-10-30 11:49:00 +0000428 // Multiplies smi values and return the result as a smi,
429 // if possible.
430 // If dst is src1, then src1 will be destroyed, even if
431 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100432 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000433 void SmiMul(Register dst,
434 Register src1,
435 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100436 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000437
438 // Divides one smi by another and returns the quotient.
439 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100440 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000441 void SmiDiv(Register dst,
442 Register src1,
443 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100444 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000445
446 // Divides one smi by another and returns the remainder.
447 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100448 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000449 void SmiMod(Register dst,
450 Register src1,
451 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100452 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000453
454 // Bitwise operations.
455 void SmiNot(Register dst, Register src);
456 void SmiAnd(Register dst, Register src1, Register src2);
457 void SmiOr(Register dst, Register src1, Register src2);
458 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000459 void SmiAndConstant(Register dst, Register src1, Smi* constant);
460 void SmiOrConstant(Register dst, Register src1, Smi* constant);
461 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000462
463 void SmiShiftLeftConstant(Register dst,
464 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100465 int shift_value);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100466 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 void SmiShiftLogicalRightConstant(Register dst,
468 Register src,
469 int shift_value,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100470 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 void SmiShiftArithmeticRightConstant(Register dst,
472 Register src,
473 int shift_value);
474
475 // Shifts a smi value to the left, and returns the result if that is a smi.
476 // Uses and clobbers rcx, so dst may not be rcx.
477 void SmiShiftLeft(Register dst,
478 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100479 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000480 // Shifts a smi value to the right, shifting in zero bits at the top, and
481 // returns the unsigned intepretation of the result if that is a smi.
482 // Uses and clobbers rcx, so dst may not be rcx.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100483 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000484 void SmiShiftLogicalRight(Register dst,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100485 Register src1,
486 Register src2,
487 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000488 // Shifts a smi value to the right, sign extending the top, and
489 // returns the signed intepretation of the result. That will always
490 // be a valid smi value, since it's numerically smaller than the
491 // original.
492 // Uses and clobbers rcx, so dst may not be rcx.
493 void SmiShiftArithmeticRight(Register dst,
494 Register src1,
495 Register src2);
496
497 // Specialized operations
498
499 // Select the non-smi register of two registers where exactly one is a
500 // smi. If neither are smis, jump to the failure label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100501 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 void SelectNonSmi(Register dst,
503 Register src1,
504 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100505 LabelType* on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +0000506
507 // Converts, if necessary, a smi to a combination of number and
508 // multiplier to be used as a scaled index.
509 // The src register contains a *positive* smi value. The shift is the
510 // power of two to multiply the index value by (e.g.
511 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
512 // The returned index register may be either src or dst, depending
513 // on what is most efficient. If src and dst are different registers,
514 // src is always unchanged.
515 SmiIndex SmiToIndex(Register dst, Register src, int shift);
516
517 // Converts a positive smi to a negative index.
518 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
519
Steve Block3ce2e202009-11-05 08:53:23 +0000520 // Basic Smi operations.
521 void Move(Register dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100522 LoadSmiConstant(dst, source);
Steve Block3ce2e202009-11-05 08:53:23 +0000523 }
524
525 void Move(const Operand& dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100526 Register constant = GetSmiConstant(source);
527 movq(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000528 }
529
530 void Push(Smi* smi);
531 void Test(const Operand& dst, Smi* source);
532
Steve Blocka7e24c12009-10-30 11:49:00 +0000533 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000534 // String macros.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100535 template <typename LabelType>
Leon Clarkee46be812010-01-19 14:06:41 +0000536 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
537 Register second_object,
538 Register scratch1,
539 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100540 LabelType* on_not_both_flat_ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000541
Steve Block6ded16b2010-05-10 14:33:55 +0100542 // Check whether the instance type represents a flat ascii string. Jump to the
543 // label if not. If the instance type can be scratched specify same register
544 // for both instance type and scratch.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100545 template <typename LabelType>
546 void JumpIfInstanceTypeIsNotSequentialAscii(
547 Register instance_type,
548 Register scratch,
549 LabelType *on_not_flat_ascii_string);
Steve Block6ded16b2010-05-10 14:33:55 +0100550
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100551 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100552 void JumpIfBothInstanceTypesAreNotSequentialAscii(
553 Register first_object_instance_type,
554 Register second_object_instance_type,
555 Register scratch1,
556 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100557 LabelType* on_fail);
Steve Block6ded16b2010-05-10 14:33:55 +0100558
Leon Clarkee46be812010-01-19 14:06:41 +0000559 // ---------------------------------------------------------------------------
560 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000561
Steve Block3ce2e202009-11-05 08:53:23 +0000562 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 void Set(Register dst, int64_t x);
564 void Set(const Operand& dst, int64_t x);
565
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100566 // Move if the registers are not identical.
567 void Move(Register target, Register source);
568
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 void Move(Register dst, Handle<Object> source);
571 void Move(const Operand& dst, Handle<Object> source);
572 void Cmp(Register dst, Handle<Object> source);
573 void Cmp(const Operand& dst, Handle<Object> source);
574 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575
Leon Clarkee46be812010-01-19 14:06:41 +0000576 // Emit code to discard a non-negative number of pointer-sized elements
577 // from the stack, clobbering only the rsp register.
578 void Drop(int stack_elements);
579
580 void Call(Label* target) { call(target); }
581
Steve Blocka7e24c12009-10-30 11:49:00 +0000582 // Control Flow
583 void Jump(Address destination, RelocInfo::Mode rmode);
584 void Jump(ExternalReference ext);
585 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
586
587 void Call(Address destination, RelocInfo::Mode rmode);
588 void Call(ExternalReference ext);
589 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
590
591 // Compare object type for heap object.
592 // Always use unsigned comparisons: above and below, not less and greater.
593 // Incoming register is heap_object and outgoing register is map.
594 // They may be the same register, and may be kScratchRegister.
595 void CmpObjectType(Register heap_object, InstanceType type, Register map);
596
597 // Compare instance type for map.
598 // Always use unsigned comparisons: above and below, not less and greater.
599 void CmpInstanceType(Register map, InstanceType type);
600
Andrei Popescu31002712010-02-23 13:46:05 +0000601 // Check if the map of an object is equal to a specified map and
602 // branch to label if not. Skip the smi check if not required
603 // (object is known to be a heap object)
604 void CheckMap(Register obj,
605 Handle<Map> map,
606 Label* fail,
607 bool is_heap_object);
608
Leon Clarked91b9f72010-01-27 17:25:45 +0000609 // Check if the object in register heap_object is a string. Afterwards the
610 // register map contains the object map and the register instance_type
611 // contains the instance_type. The registers map and instance_type can be the
612 // same in which case it contains the instance type afterwards. Either of the
613 // registers map and instance_type can be the same as heap_object.
614 Condition IsObjectStringType(Register heap_object,
615 Register map,
616 Register instance_type);
617
Steve Block8defd9f2010-07-08 12:39:36 +0100618 // FCmp compares and pops the two values on top of the FPU stack.
619 // The flag results are similar to integer cmp, but requires unsigned
Steve Blocka7e24c12009-10-30 11:49:00 +0000620 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
621 void FCmp();
622
Andrei Popescu402d9372010-02-26 13:31:12 +0000623 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100624 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000625
Iain Merrick75681382010-08-19 15:07:18 +0100626 // Abort execution if argument is a smi. Used in debug code.
627 void AbortIfSmi(Register object);
628
Steve Block6ded16b2010-05-10 14:33:55 +0100629 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100630 void AbortIfNotSmi(Register object);
Steve Block6ded16b2010-05-10 14:33:55 +0100631
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100632 // Abort execution if argument is not the root value with the given index.
633 void AbortIfNotRootValue(Register src,
634 Heap::RootListIndex root_value_index,
635 const char* message);
636
Steve Blocka7e24c12009-10-30 11:49:00 +0000637 // ---------------------------------------------------------------------------
638 // Exception handling
639
640 // Push a new try handler and link into try handler chain. The return
641 // address must be pushed before calling this helper.
642 void PushTryHandler(CodeLocation try_location, HandlerType type);
643
Leon Clarkee46be812010-01-19 14:06:41 +0000644 // Unlink the stack handler on top of the stack from the try handler chain.
645 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000646
647 // ---------------------------------------------------------------------------
648 // Inline caching support
649
Steve Blocka7e24c12009-10-30 11:49:00 +0000650 // Generate code for checking access rights - used for security checks
651 // on access to global objects across environments. The holder register
652 // is left untouched, but the scratch register and kScratchRegister,
653 // which must be different, are clobbered.
654 void CheckAccessGlobalProxy(Register holder_reg,
655 Register scratch,
656 Label* miss);
657
658
659 // ---------------------------------------------------------------------------
660 // Allocation support
661
662 // Allocate an object in new space. If the new space is exhausted control
663 // continues at the gc_required label. The allocated object is returned in
664 // result and end of the new object is returned in result_end. The register
665 // scratch can be passed as no_reg in which case an additional object
666 // reference will be added to the reloc info. The returned pointers in result
667 // and result_end have not yet been tagged as heap objects. If
668 // result_contains_top_on_entry is true the content of result is known to be
669 // the allocation top on entry (could be result_end from a previous call to
670 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
671 // should be no_reg as it is never used.
672 void AllocateInNewSpace(int object_size,
673 Register result,
674 Register result_end,
675 Register scratch,
676 Label* gc_required,
677 AllocationFlags flags);
678
679 void AllocateInNewSpace(int header_size,
680 ScaleFactor element_size,
681 Register element_count,
682 Register result,
683 Register result_end,
684 Register scratch,
685 Label* gc_required,
686 AllocationFlags flags);
687
688 void AllocateInNewSpace(Register object_size,
689 Register result,
690 Register result_end,
691 Register scratch,
692 Label* gc_required,
693 AllocationFlags flags);
694
695 // Undo allocation in new space. The object passed and objects allocated after
696 // it will no longer be allocated. Make sure that no pointers are left to the
697 // object(s) no longer allocated as they would be invalid when allocation is
698 // un-done.
699 void UndoAllocationInNewSpace(Register object);
700
Steve Block3ce2e202009-11-05 08:53:23 +0000701 // Allocate a heap number in new space with undefined value. Returns
702 // tagged pointer in result register, or jumps to gc_required if new
703 // space is full.
704 void AllocateHeapNumber(Register result,
705 Register scratch,
706 Label* gc_required);
707
Leon Clarkee46be812010-01-19 14:06:41 +0000708 // Allocate a sequential string. All the header fields of the string object
709 // are initialized.
710 void AllocateTwoByteString(Register result,
711 Register length,
712 Register scratch1,
713 Register scratch2,
714 Register scratch3,
715 Label* gc_required);
716 void AllocateAsciiString(Register result,
717 Register length,
718 Register scratch1,
719 Register scratch2,
720 Register scratch3,
721 Label* gc_required);
722
723 // Allocate a raw cons string object. Only the map field of the result is
724 // initialized.
725 void AllocateConsString(Register result,
726 Register scratch1,
727 Register scratch2,
728 Label* gc_required);
729 void AllocateAsciiConsString(Register result,
730 Register scratch1,
731 Register scratch2,
732 Label* gc_required);
733
Steve Blocka7e24c12009-10-30 11:49:00 +0000734 // ---------------------------------------------------------------------------
735 // Support functions.
736
737 // Check if result is zero and op is negative.
738 void NegativeZeroTest(Register result, Register op, Label* then_label);
739
740 // Check if result is zero and op is negative in code using jump targets.
741 void NegativeZeroTest(CodeGenerator* cgen,
742 Register result,
743 Register op,
744 JumpTarget* then_target);
745
746 // Check if result is zero and any of op1 and op2 are negative.
747 // Register scratch is destroyed, and it must be different from op2.
748 void NegativeZeroTest(Register result, Register op1, Register op2,
749 Register scratch, Label* then_label);
750
751 // Try to get function prototype of a function and puts the value in
752 // the result register. Checks that the function really is a
753 // function and jumps to the miss label if the fast checks fail. The
754 // function register will be untouched; the other register may be
755 // clobbered.
756 void TryGetFunctionPrototype(Register function,
757 Register result,
758 Label* miss);
759
760 // Generates code for reporting that an illegal operation has
761 // occurred.
762 void IllegalOperation(int num_arguments);
763
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100764 // Picks out an array index from the hash field.
765 // Register use:
766 // hash - holds the index's hash. Clobbered.
767 // index - holds the overwritten index on exit.
768 void IndexFromHash(Register hash, Register index);
769
Steve Blockd0582a62009-12-15 09:54:21 +0000770 // Find the function context up the context chain.
771 void LoadContext(Register dst, int context_chain_length);
772
Steve Blocka7e24c12009-10-30 11:49:00 +0000773 // ---------------------------------------------------------------------------
774 // Runtime calls
775
776 // Call a code stub.
777 void CallStub(CodeStub* stub);
778
Ben Murdochbb769b22010-08-11 14:56:33 +0100779 // Call a code stub and return the code object called. Try to generate
780 // the code if necessary. Do not perform a GC but instead return a retry
781 // after GC failure.
John Reck59135872010-11-02 12:39:01 -0700782 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100783
Leon Clarkee46be812010-01-19 14:06:41 +0000784 // Tail call a code stub (jump).
785 void TailCallStub(CodeStub* stub);
786
Ben Murdochbb769b22010-08-11 14:56:33 +0100787 // Tail call a code stub (jump) and return the code object called. Try to
788 // generate the code if necessary. Do not perform a GC but instead return
789 // a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700790 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100791
Steve Blocka7e24c12009-10-30 11:49:00 +0000792 // Return from a code stub after popping its arguments.
793 void StubReturn(int argc);
794
795 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000796 void CallRuntime(Runtime::Function* f, int num_arguments);
797
Ben Murdochbb769b22010-08-11 14:56:33 +0100798 // Call a runtime function, returning the CodeStub object called.
799 // Try to generate the stub code if necessary. Do not perform a GC
800 // but instead return a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700801 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::Function* f,
802 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100803
Steve Blocka7e24c12009-10-30 11:49:00 +0000804 // Convenience function: Same as above, but takes the fid instead.
805 void CallRuntime(Runtime::FunctionId id, int num_arguments);
806
Ben Murdochbb769b22010-08-11 14:56:33 +0100807 // Convenience function: Same as above, but takes the fid instead.
John Reck59135872010-11-02 12:39:01 -0700808 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::FunctionId id,
809 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100810
Andrei Popescu402d9372010-02-26 13:31:12 +0000811 // Convenience function: call an external reference.
812 void CallExternalReference(const ExternalReference& ext,
813 int num_arguments);
814
Steve Blocka7e24c12009-10-30 11:49:00 +0000815 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100816 // Like JumpToExternalReference, but also takes care of passing the number
817 // of parameters.
818 void TailCallExternalReference(const ExternalReference& ext,
819 int num_arguments,
820 int result_size);
821
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800822 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
823 const ExternalReference& ext, int num_arguments, int result_size);
824
Steve Block6ded16b2010-05-10 14:33:55 +0100825 // Convenience function: tail call a runtime routine (jump).
826 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 int num_arguments,
828 int result_size);
829
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800830 MUST_USE_RESULT MaybeObject* TryTailCallRuntime(Runtime::FunctionId fid,
831 int num_arguments,
832 int result_size);
833
Steve Blocka7e24c12009-10-30 11:49:00 +0000834 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100835 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000836
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800837 // Jump to a runtime routine.
838 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext,
839 int result_size);
John Reck59135872010-11-02 12:39:01 -0700840
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800841 // Prepares stack to put arguments (aligns and so on).
842 // WIN64 calling convention requires to put the pointer to the return value
843 // slot into rcx (rcx must be preserverd until TryCallApiFunctionAndReturn).
844 // Saves context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
845 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
846 void PrepareCallApiFunction(int arg_stack_space);
847
848 // Calls an API function. Allocates HandleScope, extracts
849 // returned value from handle and propagates exceptions.
850 // Clobbers r12, r14, rbx and caller-save registers. Restores context.
851 // On return removes stack_space * kPointerSize (GCed).
852 MUST_USE_RESULT MaybeObject* TryCallApiFunctionAndReturn(
853 ApiFunction* function, int stack_space);
John Reck59135872010-11-02 12:39:01 -0700854
Leon Clarke4515c472010-02-03 11:58:03 +0000855 // Before calling a C-function from generated code, align arguments on stack.
856 // After aligning the frame, arguments must be stored in esp[0], esp[4],
857 // etc., not pushed. The argument count assumes all arguments are word sized.
858 // The number of slots reserved for arguments depends on platform. On Windows
859 // stack slots are reserved for the arguments passed in registers. On other
860 // platforms stack slots are only reserved for the arguments actually passed
861 // on the stack.
862 void PrepareCallCFunction(int num_arguments);
863
864 // Calls a C function and cleans up the space for arguments allocated
865 // by PrepareCallCFunction. The called function is not allowed to trigger a
866 // garbage collection, since that might move the code and invalidate the
867 // return address (unless this is somehow accounted for by the called
868 // function).
869 void CallCFunction(ExternalReference function, int num_arguments);
870 void CallCFunction(Register function, int num_arguments);
871
872 // Calculate the number of stack slots to reserve for arguments when calling a
873 // C function.
874 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000875
876 // ---------------------------------------------------------------------------
877 // Utilities
878
879 void Ret();
880
Steve Blocka7e24c12009-10-30 11:49:00 +0000881 Handle<Object> CodeObject() { return code_object_; }
882
883
884 // ---------------------------------------------------------------------------
885 // StatsCounter support
886
887 void SetCounter(StatsCounter* counter, int value);
888 void IncrementCounter(StatsCounter* counter, int value);
889 void DecrementCounter(StatsCounter* counter, int value);
890
891
892 // ---------------------------------------------------------------------------
893 // Debugging
894
895 // Calls Abort(msg) if the condition cc is not satisfied.
896 // Use --debug_code to enable.
897 void Assert(Condition cc, const char* msg);
898
Iain Merrick75681382010-08-19 15:07:18 +0100899 void AssertFastElements(Register elements);
900
Steve Blocka7e24c12009-10-30 11:49:00 +0000901 // Like Assert(), but always enabled.
902 void Check(Condition cc, const char* msg);
903
904 // Print a message to stdout and abort execution.
905 void Abort(const char* msg);
906
Steve Block6ded16b2010-05-10 14:33:55 +0100907 // Check that the stack is aligned.
908 void CheckStackAlignment();
909
Steve Blocka7e24c12009-10-30 11:49:00 +0000910 // Verify restrictions about code generated in stubs.
911 void set_generating_stub(bool value) { generating_stub_ = value; }
912 bool generating_stub() { return generating_stub_; }
913 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
914 bool allow_stub_calls() { return allow_stub_calls_; }
915
916 private:
Steve Blocka7e24c12009-10-30 11:49:00 +0000917 bool generating_stub_;
918 bool allow_stub_calls_;
Steve Block8defd9f2010-07-08 12:39:36 +0100919
920 // Returns a register holding the smi value. The register MUST NOT be
921 // modified. It may be the "smi 1 constant" register.
922 Register GetSmiConstant(Smi* value);
923
924 // Moves the smi value to the destination register.
925 void LoadSmiConstant(Register dst, Smi* value);
926
Andrei Popescu31002712010-02-23 13:46:05 +0000927 // This handle will be patched with the code object on installation.
928 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000929
930 // Helper functions for generating invokes.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100931 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000932 void InvokePrologue(const ParameterCount& expected,
933 const ParameterCount& actual,
934 Handle<Code> code_constant,
935 Register code_register,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100936 LabelType* done,
Steve Blocka7e24c12009-10-30 11:49:00 +0000937 InvokeFlag flag);
938
Steve Blocka7e24c12009-10-30 11:49:00 +0000939 // Activation support.
940 void EnterFrame(StackFrame::Type type);
941 void LeaveFrame(StackFrame::Type type);
942
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100943 void EnterExitFramePrologue(bool save_rax);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800944
945 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
946 // accessible via StackSpaceOperand.
947 void EnterExitFrameEpilogue(int arg_stack_space);
948
949 void LeaveExitFrameEpilogue();
Ben Murdochbb769b22010-08-11 14:56:33 +0100950
Steve Blocka7e24c12009-10-30 11:49:00 +0000951 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +0100952 // Loads the top of new-space into the result register.
Steve Block6ded16b2010-05-10 14:33:55 +0100953 // Otherwise the address of the new-space top is loaded into scratch (if
954 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +0000955 void LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000956 Register scratch,
957 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +0100958 // Update allocation top with value in result_end register.
959 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +0000960 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +0100961
962 // Helper for PopHandleScope. Allowed to perform a GC and returns
963 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
964 // possibly returns a failure object indicating an allocation failure.
965 Object* PopHandleScopeHelper(Register saved,
966 Register scratch,
967 bool gc_allowed);
Steve Blocka7e24c12009-10-30 11:49:00 +0000968};
969
970
971// The code patcher is used to patch (typically) small parts of code e.g. for
972// debugging and other types of instrumentation. When using the code patcher
973// the exact number of bytes specified must be emitted. Is not legal to emit
974// relocation information. If any of these constraints are violated it causes
975// an assertion.
976class CodePatcher {
977 public:
978 CodePatcher(byte* address, int size);
979 virtual ~CodePatcher();
980
981 // Macro assembler to emit code.
982 MacroAssembler* masm() { return &masm_; }
983
984 private:
985 byte* address_; // The address of the code being patched.
986 int size_; // Number of bytes of the expected patch size.
987 MacroAssembler masm_; // Macro assembler used to generate the code.
988};
989
990
991// -----------------------------------------------------------------------------
992// Static helper functions.
993
994// Generate an Operand for loading a field from an object.
995static inline Operand FieldOperand(Register object, int offset) {
996 return Operand(object, offset - kHeapObjectTag);
997}
998
999
1000// Generate an Operand for loading an indexed field from an object.
1001static inline Operand FieldOperand(Register object,
1002 Register index,
1003 ScaleFactor scale,
1004 int offset) {
1005 return Operand(object, index, scale, offset - kHeapObjectTag);
1006}
1007
1008
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001009static inline Operand ContextOperand(Register context, int index) {
1010 return Operand(context, Context::SlotOffset(index));
1011}
1012
1013
1014static inline Operand GlobalObjectOperand() {
1015 return ContextOperand(rsi, Context::GLOBAL_INDEX);
1016}
1017
1018
1019// Provides access to exit frame stack space (not GCed).
1020static inline Operand StackSpaceOperand(int index) {
1021#ifdef _WIN64
1022 const int kShaddowSpace = 4;
1023 return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
1024#else
1025 return Operand(rsp, index * kPointerSize);
1026#endif
1027}
1028
1029
1030
Steve Blocka7e24c12009-10-30 11:49:00 +00001031#ifdef GENERATED_CODE_COVERAGE
1032extern void LogGeneratedCodeCoverage(const char* file_line);
1033#define CODE_COVERAGE_STRINGIFY(x) #x
1034#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1035#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1036#define ACCESS_MASM(masm) { \
1037 byte* x64_coverage_function = \
1038 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
1039 masm->pushfd(); \
1040 masm->pushad(); \
1041 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
1042 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1043 masm->pop(rax); \
1044 masm->popad(); \
1045 masm->popfd(); \
1046 } \
1047 masm->
1048#else
1049#define ACCESS_MASM(masm) masm->
1050#endif
1051
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001052// -----------------------------------------------------------------------------
1053// Template implementations.
1054
1055static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1056
1057
1058template <typename LabelType>
1059void MacroAssembler::SmiNeg(Register dst,
1060 Register src,
1061 LabelType* on_smi_result) {
1062 if (dst.is(src)) {
1063 ASSERT(!dst.is(kScratchRegister));
1064 movq(kScratchRegister, src);
1065 neg(dst); // Low 32 bits are retained as zero by negation.
1066 // Test if result is zero or Smi::kMinValue.
1067 cmpq(dst, kScratchRegister);
1068 j(not_equal, on_smi_result);
1069 movq(src, kScratchRegister);
1070 } else {
1071 movq(dst, src);
1072 neg(dst);
1073 cmpq(dst, src);
1074 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1075 j(not_equal, on_smi_result);
1076 }
1077}
1078
1079
1080template <typename LabelType>
1081void MacroAssembler::SmiAdd(Register dst,
1082 Register src1,
1083 Register src2,
1084 LabelType* on_not_smi_result) {
1085 ASSERT_NOT_NULL(on_not_smi_result);
1086 ASSERT(!dst.is(src2));
1087 if (dst.is(src1)) {
1088 movq(kScratchRegister, src1);
1089 addq(kScratchRegister, src2);
1090 j(overflow, on_not_smi_result);
1091 movq(dst, kScratchRegister);
1092 } else {
1093 movq(dst, src1);
1094 addq(dst, src2);
1095 j(overflow, on_not_smi_result);
1096 }
1097}
1098
1099
1100template <typename LabelType>
1101void MacroAssembler::SmiSub(Register dst,
1102 Register src1,
1103 Register src2,
1104 LabelType* on_not_smi_result) {
1105 ASSERT_NOT_NULL(on_not_smi_result);
1106 ASSERT(!dst.is(src2));
1107 if (dst.is(src1)) {
1108 cmpq(dst, src2);
1109 j(overflow, on_not_smi_result);
1110 subq(dst, src2);
1111 } else {
1112 movq(dst, src1);
1113 subq(dst, src2);
1114 j(overflow, on_not_smi_result);
1115 }
1116}
1117
1118
1119template <typename LabelType>
1120void MacroAssembler::SmiSub(Register dst,
1121 Register src1,
1122 const Operand& src2,
1123 LabelType* on_not_smi_result) {
1124 ASSERT_NOT_NULL(on_not_smi_result);
1125 if (dst.is(src1)) {
1126 movq(kScratchRegister, src2);
1127 cmpq(src1, kScratchRegister);
1128 j(overflow, on_not_smi_result);
1129 subq(src1, kScratchRegister);
1130 } else {
1131 movq(dst, src1);
1132 subq(dst, src2);
1133 j(overflow, on_not_smi_result);
1134 }
1135}
1136
1137
1138template <typename LabelType>
1139void MacroAssembler::SmiMul(Register dst,
1140 Register src1,
1141 Register src2,
1142 LabelType* on_not_smi_result) {
1143 ASSERT(!dst.is(src2));
1144 ASSERT(!dst.is(kScratchRegister));
1145 ASSERT(!src1.is(kScratchRegister));
1146 ASSERT(!src2.is(kScratchRegister));
1147
1148 if (dst.is(src1)) {
1149 NearLabel failure, zero_correct_result;
1150 movq(kScratchRegister, src1); // Create backup for later testing.
1151 SmiToInteger64(dst, src1);
1152 imul(dst, src2);
1153 j(overflow, &failure);
1154
1155 // Check for negative zero result. If product is zero, and one
1156 // argument is negative, go to slow case.
1157 NearLabel correct_result;
1158 testq(dst, dst);
1159 j(not_zero, &correct_result);
1160
1161 movq(dst, kScratchRegister);
1162 xor_(dst, src2);
1163 j(positive, &zero_correct_result); // Result was positive zero.
1164
1165 bind(&failure); // Reused failure exit, restores src1.
1166 movq(src1, kScratchRegister);
1167 jmp(on_not_smi_result);
1168
1169 bind(&zero_correct_result);
1170 xor_(dst, dst);
1171
1172 bind(&correct_result);
1173 } else {
1174 SmiToInteger64(dst, src1);
1175 imul(dst, src2);
1176 j(overflow, on_not_smi_result);
1177 // Check for negative zero result. If product is zero, and one
1178 // argument is negative, go to slow case.
1179 NearLabel correct_result;
1180 testq(dst, dst);
1181 j(not_zero, &correct_result);
1182 // One of src1 and src2 is zero, the check whether the other is
1183 // negative.
1184 movq(kScratchRegister, src1);
1185 xor_(kScratchRegister, src2);
1186 j(negative, on_not_smi_result);
1187 bind(&correct_result);
1188 }
1189}
1190
1191
1192template <typename LabelType>
1193void MacroAssembler::SmiTryAddConstant(Register dst,
1194 Register src,
1195 Smi* constant,
1196 LabelType* on_not_smi_result) {
1197 // Does not assume that src is a smi.
1198 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1199 ASSERT_EQ(0, kSmiTag);
1200 ASSERT(!dst.is(kScratchRegister));
1201 ASSERT(!src.is(kScratchRegister));
1202
1203 JumpIfNotSmi(src, on_not_smi_result);
1204 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1205 LoadSmiConstant(tmp, constant);
1206 addq(tmp, src);
1207 j(overflow, on_not_smi_result);
1208 if (dst.is(src)) {
1209 movq(dst, tmp);
1210 }
1211}
1212
1213
1214template <typename LabelType>
1215void MacroAssembler::SmiAddConstant(Register dst,
1216 Register src,
1217 Smi* constant,
1218 LabelType* on_not_smi_result) {
1219 if (constant->value() == 0) {
1220 if (!dst.is(src)) {
1221 movq(dst, src);
1222 }
1223 } else if (dst.is(src)) {
1224 ASSERT(!dst.is(kScratchRegister));
1225
1226 LoadSmiConstant(kScratchRegister, constant);
1227 addq(kScratchRegister, src);
1228 j(overflow, on_not_smi_result);
1229 movq(dst, kScratchRegister);
1230 } else {
1231 LoadSmiConstant(dst, constant);
1232 addq(dst, src);
1233 j(overflow, on_not_smi_result);
1234 }
1235}
1236
1237
1238template <typename LabelType>
1239void MacroAssembler::SmiSubConstant(Register dst,
1240 Register src,
1241 Smi* constant,
1242 LabelType* on_not_smi_result) {
1243 if (constant->value() == 0) {
1244 if (!dst.is(src)) {
1245 movq(dst, src);
1246 }
1247 } else if (dst.is(src)) {
1248 ASSERT(!dst.is(kScratchRegister));
1249 if (constant->value() == Smi::kMinValue) {
1250 // Subtracting min-value from any non-negative value will overflow.
1251 // We test the non-negativeness before doing the subtraction.
1252 testq(src, src);
1253 j(not_sign, on_not_smi_result);
1254 LoadSmiConstant(kScratchRegister, constant);
1255 subq(dst, kScratchRegister);
1256 } else {
1257 // Subtract by adding the negation.
1258 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1259 addq(kScratchRegister, dst);
1260 j(overflow, on_not_smi_result);
1261 movq(dst, kScratchRegister);
1262 }
1263 } else {
1264 if (constant->value() == Smi::kMinValue) {
1265 // Subtracting min-value from any non-negative value will overflow.
1266 // We test the non-negativeness before doing the subtraction.
1267 testq(src, src);
1268 j(not_sign, on_not_smi_result);
1269 LoadSmiConstant(dst, constant);
1270 // Adding and subtracting the min-value gives the same result, it only
1271 // differs on the overflow bit, which we don't check here.
1272 addq(dst, src);
1273 } else {
1274 // Subtract by adding the negation.
1275 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1276 addq(dst, src);
1277 j(overflow, on_not_smi_result);
1278 }
1279 }
1280}
1281
1282
1283template <typename LabelType>
1284void MacroAssembler::SmiDiv(Register dst,
1285 Register src1,
1286 Register src2,
1287 LabelType* on_not_smi_result) {
1288 ASSERT(!src1.is(kScratchRegister));
1289 ASSERT(!src2.is(kScratchRegister));
1290 ASSERT(!dst.is(kScratchRegister));
1291 ASSERT(!src2.is(rax));
1292 ASSERT(!src2.is(rdx));
1293 ASSERT(!src1.is(rdx));
1294
1295 // Check for 0 divisor (result is +/-Infinity).
1296 NearLabel positive_divisor;
1297 testq(src2, src2);
1298 j(zero, on_not_smi_result);
1299
1300 if (src1.is(rax)) {
1301 movq(kScratchRegister, src1);
1302 }
1303 SmiToInteger32(rax, src1);
1304 // We need to rule out dividing Smi::kMinValue by -1, since that would
1305 // overflow in idiv and raise an exception.
1306 // We combine this with negative zero test (negative zero only happens
1307 // when dividing zero by a negative number).
1308
1309 // We overshoot a little and go to slow case if we divide min-value
1310 // by any negative value, not just -1.
1311 NearLabel safe_div;
1312 testl(rax, Immediate(0x7fffffff));
1313 j(not_zero, &safe_div);
1314 testq(src2, src2);
1315 if (src1.is(rax)) {
1316 j(positive, &safe_div);
1317 movq(src1, kScratchRegister);
1318 jmp(on_not_smi_result);
1319 } else {
1320 j(negative, on_not_smi_result);
1321 }
1322 bind(&safe_div);
1323
1324 SmiToInteger32(src2, src2);
1325 // Sign extend src1 into edx:eax.
1326 cdq();
1327 idivl(src2);
1328 Integer32ToSmi(src2, src2);
1329 // Check that the remainder is zero.
1330 testl(rdx, rdx);
1331 if (src1.is(rax)) {
1332 NearLabel smi_result;
1333 j(zero, &smi_result);
1334 movq(src1, kScratchRegister);
1335 jmp(on_not_smi_result);
1336 bind(&smi_result);
1337 } else {
1338 j(not_zero, on_not_smi_result);
1339 }
1340 if (!dst.is(src1) && src1.is(rax)) {
1341 movq(src1, kScratchRegister);
1342 }
1343 Integer32ToSmi(dst, rax);
1344}
1345
1346
1347template <typename LabelType>
1348void MacroAssembler::SmiMod(Register dst,
1349 Register src1,
1350 Register src2,
1351 LabelType* on_not_smi_result) {
1352 ASSERT(!dst.is(kScratchRegister));
1353 ASSERT(!src1.is(kScratchRegister));
1354 ASSERT(!src2.is(kScratchRegister));
1355 ASSERT(!src2.is(rax));
1356 ASSERT(!src2.is(rdx));
1357 ASSERT(!src1.is(rdx));
1358 ASSERT(!src1.is(src2));
1359
1360 testq(src2, src2);
1361 j(zero, on_not_smi_result);
1362
1363 if (src1.is(rax)) {
1364 movq(kScratchRegister, src1);
1365 }
1366 SmiToInteger32(rax, src1);
1367 SmiToInteger32(src2, src2);
1368
1369 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1370 NearLabel safe_div;
1371 cmpl(rax, Immediate(Smi::kMinValue));
1372 j(not_equal, &safe_div);
1373 cmpl(src2, Immediate(-1));
1374 j(not_equal, &safe_div);
1375 // Retag inputs and go slow case.
1376 Integer32ToSmi(src2, src2);
1377 if (src1.is(rax)) {
1378 movq(src1, kScratchRegister);
1379 }
1380 jmp(on_not_smi_result);
1381 bind(&safe_div);
1382
1383 // Sign extend eax into edx:eax.
1384 cdq();
1385 idivl(src2);
1386 // Restore smi tags on inputs.
1387 Integer32ToSmi(src2, src2);
1388 if (src1.is(rax)) {
1389 movq(src1, kScratchRegister);
1390 }
1391 // Check for a negative zero result. If the result is zero, and the
1392 // dividend is negative, go slow to return a floating point negative zero.
1393 NearLabel smi_result;
1394 testl(rdx, rdx);
1395 j(not_zero, &smi_result);
1396 testq(src1, src1);
1397 j(negative, on_not_smi_result);
1398 bind(&smi_result);
1399 Integer32ToSmi(dst, rdx);
1400}
1401
1402
1403template <typename LabelType>
1404void MacroAssembler::SmiShiftLogicalRightConstant(
1405 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1406 // Logic right shift interprets its result as an *unsigned* number.
1407 if (dst.is(src)) {
1408 UNIMPLEMENTED(); // Not used.
1409 } else {
1410 movq(dst, src);
1411 if (shift_value == 0) {
1412 testq(dst, dst);
1413 j(negative, on_not_smi_result);
1414 }
1415 shr(dst, Immediate(shift_value + kSmiShift));
1416 shl(dst, Immediate(kSmiShift));
1417 }
1418}
1419
1420
1421template <typename LabelType>
1422void MacroAssembler::SmiShiftLogicalRight(Register dst,
1423 Register src1,
1424 Register src2,
1425 LabelType* on_not_smi_result) {
1426 ASSERT(!dst.is(kScratchRegister));
1427 ASSERT(!src1.is(kScratchRegister));
1428 ASSERT(!src2.is(kScratchRegister));
1429 ASSERT(!dst.is(rcx));
1430 NearLabel result_ok;
1431 if (src1.is(rcx) || src2.is(rcx)) {
1432 movq(kScratchRegister, rcx);
1433 }
1434 if (!dst.is(src1)) {
1435 movq(dst, src1);
1436 }
1437 SmiToInteger32(rcx, src2);
1438 orl(rcx, Immediate(kSmiShift));
1439 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1440 shl(dst, Immediate(kSmiShift));
1441 testq(dst, dst);
1442 if (src1.is(rcx) || src2.is(rcx)) {
1443 NearLabel positive_result;
1444 j(positive, &positive_result);
1445 if (src1.is(rcx)) {
1446 movq(src1, kScratchRegister);
1447 } else {
1448 movq(src2, kScratchRegister);
1449 }
1450 jmp(on_not_smi_result);
1451 bind(&positive_result);
1452 } else {
1453 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1454 }
1455}
1456
1457
1458template <typename LabelType>
1459void MacroAssembler::SelectNonSmi(Register dst,
1460 Register src1,
1461 Register src2,
1462 LabelType* on_not_smis) {
1463 ASSERT(!dst.is(kScratchRegister));
1464 ASSERT(!src1.is(kScratchRegister));
1465 ASSERT(!src2.is(kScratchRegister));
1466 ASSERT(!dst.is(src1));
1467 ASSERT(!dst.is(src2));
1468 // Both operands must not be smis.
1469#ifdef DEBUG
1470 if (allow_stub_calls()) { // Check contains a stub call.
1471 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1472 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1473 }
1474#endif
1475 ASSERT_EQ(0, kSmiTag);
1476 ASSERT_EQ(0, Smi::FromInt(0));
1477 movl(kScratchRegister, Immediate(kSmiTagMask));
1478 and_(kScratchRegister, src1);
1479 testl(kScratchRegister, src2);
1480 // If non-zero then both are smis.
1481 j(not_zero, on_not_smis);
1482
1483 // Exactly one operand is a smi.
1484 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1485 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1486 subq(kScratchRegister, Immediate(1));
1487 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1488 movq(dst, src1);
1489 xor_(dst, src2);
1490 and_(dst, kScratchRegister);
1491 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1492 xor_(dst, src1);
1493 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1494}
1495
1496
1497template <typename LabelType>
1498void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1499 ASSERT_EQ(0, kSmiTag);
1500 Condition smi = CheckSmi(src);
1501 j(smi, on_smi);
1502}
1503
1504
1505template <typename LabelType>
1506void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1507 Condition smi = CheckSmi(src);
1508 j(NegateCondition(smi), on_not_smi);
1509}
1510
1511
1512template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001513void MacroAssembler::JumpUnlessNonNegativeSmi(
1514 Register src, LabelType* on_not_smi_or_negative) {
1515 Condition non_negative_smi = CheckNonNegativeSmi(src);
1516 j(NegateCondition(non_negative_smi), on_not_smi_or_negative);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001517}
1518
1519
1520template <typename LabelType>
1521void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1522 Smi* constant,
1523 LabelType* on_equals) {
1524 SmiCompare(src, constant);
1525 j(equal, on_equals);
1526}
1527
1528
1529template <typename LabelType>
1530void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1531 LabelType* on_invalid) {
1532 Condition is_valid = CheckInteger32ValidSmiValue(src);
1533 j(NegateCondition(is_valid), on_invalid);
1534}
1535
1536
1537template <typename LabelType>
1538void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1539 LabelType* on_invalid) {
1540 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1541 j(NegateCondition(is_valid), on_invalid);
1542}
1543
1544
1545template <typename LabelType>
1546void MacroAssembler::JumpIfNotBothSmi(Register src1,
1547 Register src2,
1548 LabelType* on_not_both_smi) {
1549 Condition both_smi = CheckBothSmi(src1, src2);
1550 j(NegateCondition(both_smi), on_not_both_smi);
1551}
1552
1553
1554template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001555void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1556 Register src2,
1557 LabelType* on_not_both_smi) {
1558 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001559 j(NegateCondition(both_smi), on_not_both_smi);
1560}
1561
1562
1563template <typename LabelType>
1564void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1565 Register second_object,
1566 Register scratch1,
1567 Register scratch2,
1568 LabelType* on_fail) {
1569 // Check that both objects are not smis.
1570 Condition either_smi = CheckEitherSmi(first_object, second_object);
1571 j(either_smi, on_fail);
1572
1573 // Load instance type for both strings.
1574 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1575 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1576 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1577 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1578
1579 // Check that both are flat ascii strings.
1580 ASSERT(kNotStringTag != 0);
1581 const int kFlatAsciiStringMask =
1582 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1583 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1584
1585 andl(scratch1, Immediate(kFlatAsciiStringMask));
1586 andl(scratch2, Immediate(kFlatAsciiStringMask));
1587 // Interleave the bits to check both scratch1 and scratch2 in one test.
1588 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1589 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1590 cmpl(scratch1,
1591 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1592 j(not_equal, on_fail);
1593}
1594
1595
1596template <typename LabelType>
1597void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1598 Register instance_type,
1599 Register scratch,
1600 LabelType *failure) {
1601 if (!scratch.is(instance_type)) {
1602 movl(scratch, instance_type);
1603 }
1604
1605 const int kFlatAsciiStringMask =
1606 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1607
1608 andl(scratch, Immediate(kFlatAsciiStringMask));
1609 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1610 j(not_equal, failure);
1611}
1612
1613
1614template <typename LabelType>
1615void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1616 Register first_object_instance_type,
1617 Register second_object_instance_type,
1618 Register scratch1,
1619 Register scratch2,
1620 LabelType* on_fail) {
1621 // Load instance type for both strings.
1622 movq(scratch1, first_object_instance_type);
1623 movq(scratch2, second_object_instance_type);
1624
1625 // Check that both are flat ascii strings.
1626 ASSERT(kNotStringTag != 0);
1627 const int kFlatAsciiStringMask =
1628 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1629 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1630
1631 andl(scratch1, Immediate(kFlatAsciiStringMask));
1632 andl(scratch2, Immediate(kFlatAsciiStringMask));
1633 // Interleave the bits to check both scratch1 and scratch2 in one test.
1634 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1635 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1636 cmpl(scratch1,
1637 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1638 j(not_equal, on_fail);
1639}
1640
1641
1642template <typename LabelType>
1643void MacroAssembler::InNewSpace(Register object,
1644 Register scratch,
1645 Condition cc,
1646 LabelType* branch) {
1647 if (Serializer::enabled()) {
1648 // Can't do arithmetic on external references if it might get serialized.
1649 // The mask isn't really an address. We load it as an external reference in
1650 // case the size of the new space is different between the snapshot maker
1651 // and the running system.
1652 if (scratch.is(object)) {
1653 movq(kScratchRegister, ExternalReference::new_space_mask());
1654 and_(scratch, kScratchRegister);
1655 } else {
1656 movq(scratch, ExternalReference::new_space_mask());
1657 and_(scratch, object);
1658 }
1659 movq(kScratchRegister, ExternalReference::new_space_start());
1660 cmpq(scratch, kScratchRegister);
1661 j(cc, branch);
1662 } else {
1663 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
1664 intptr_t new_space_start =
1665 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
1666 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1667 if (scratch.is(object)) {
1668 addq(scratch, kScratchRegister);
1669 } else {
1670 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1671 }
1672 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
1673 j(cc, branch);
1674 }
1675}
1676
1677
1678template <typename LabelType>
1679void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1680 const ParameterCount& actual,
1681 Handle<Code> code_constant,
1682 Register code_register,
1683 LabelType* done,
1684 InvokeFlag flag) {
1685 bool definitely_matches = false;
1686 NearLabel invoke;
1687 if (expected.is_immediate()) {
1688 ASSERT(actual.is_immediate());
1689 if (expected.immediate() == actual.immediate()) {
1690 definitely_matches = true;
1691 } else {
1692 Set(rax, actual.immediate());
1693 if (expected.immediate() ==
1694 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1695 // Don't worry about adapting arguments for built-ins that
1696 // don't want that done. Skip adaption code by making it look
1697 // like we have a match between expected and actual number of
1698 // arguments.
1699 definitely_matches = true;
1700 } else {
1701 Set(rbx, expected.immediate());
1702 }
1703 }
1704 } else {
1705 if (actual.is_immediate()) {
1706 // Expected is in register, actual is immediate. This is the
1707 // case when we invoke function values without going through the
1708 // IC mechanism.
1709 cmpq(expected.reg(), Immediate(actual.immediate()));
1710 j(equal, &invoke);
1711 ASSERT(expected.reg().is(rbx));
1712 Set(rax, actual.immediate());
1713 } else if (!expected.reg().is(actual.reg())) {
1714 // Both expected and actual are in (different) registers. This
1715 // is the case when we invoke functions using call and apply.
1716 cmpq(expected.reg(), actual.reg());
1717 j(equal, &invoke);
1718 ASSERT(actual.reg().is(rax));
1719 ASSERT(expected.reg().is(rbx));
1720 }
1721 }
1722
1723 if (!definitely_matches) {
1724 Handle<Code> adaptor =
1725 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1726 if (!code_constant.is_null()) {
1727 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1728 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1729 } else if (!code_register.is(rdx)) {
1730 movq(rdx, code_register);
1731 }
1732
1733 if (flag == CALL_FUNCTION) {
1734 Call(adaptor, RelocInfo::CODE_TARGET);
1735 jmp(done);
1736 } else {
1737 Jump(adaptor, RelocInfo::CODE_TARGET);
1738 }
1739 bind(&invoke);
1740 }
1741}
1742
Steve Blocka7e24c12009-10-30 11:49:00 +00001743
1744} } // namespace v8::internal
1745
1746#endif // V8_X64_MACRO_ASSEMBLER_X64_H_