blob: 7a7f1a278ad53a5d6544b527e3568fafcac358ac [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Steve Block8defd9f2010-07-08 12:39:36 +010050static const Register kScratchRegister = { 10 }; // r10.
51static const Register kSmiConstantRegister = { 15 }; // r15 (callee save).
52static const Register kRootRegister = { 13 }; // r13 (callee save).
53// Value of smi in kSmiConstantRegister.
54static const int kSmiConstantRegisterValue = 1;
Ben Murdoche0cee9b2011-05-25 10:26:03 +010055// Actual value of root register is offset from the root array's start
56// to take advantage of negitive 8-bit displacement values.
57static const int kRootRegisterBias = 128;
Steve Blocka7e24c12009-10-30 11:49:00 +000058
Leon Clarkee46be812010-01-19 14:06:41 +000059// Convenience for platform-independent signatures.
60typedef Operand MemOperand;
61
Steve Blocka7e24c12009-10-30 11:49:00 +000062// Forward declaration.
63class JumpTarget;
Ben Murdoche0cee9b2011-05-25 10:26:03 +010064class PostCallGenerator;
Steve Blocka7e24c12009-10-30 11:49:00 +000065
66struct SmiIndex {
67 SmiIndex(Register index_register, ScaleFactor scale)
68 : reg(index_register),
69 scale(scale) {}
70 Register reg;
71 ScaleFactor scale;
72};
73
74// MacroAssembler implements a collection of frequently used macros.
75class MacroAssembler: public Assembler {
76 public:
77 MacroAssembler(void* buffer, int size);
78
79 void LoadRoot(Register destination, Heap::RootListIndex index);
Ben Murdoche0cee9b2011-05-25 10:26:03 +010080 // Load a root value where the index (or part of it) is variable.
81 // The variable_offset register is added to the fixed_offset value
82 // to get the index into the root-array.
83 void LoadRootIndexed(Register destination,
84 Register variable_offset,
85 int fixed_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +000086 void CompareRoot(Register with, Heap::RootListIndex index);
Steve Block1e0659c2011-05-24 12:43:12 +010087 void CompareRoot(const Operand& with, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +000088 void PushRoot(Heap::RootListIndex index);
Kristian Monsen25f61362010-05-21 11:50:48 +010089 void StoreRoot(Register source, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +000090
91 // ---------------------------------------------------------------------------
92 // GC Support
93
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010094 // For page containing |object| mark region covering |addr| dirty.
95 // RecordWriteHelper only works if the object is not in new
Steve Block6ded16b2010-05-10 14:33:55 +010096 // space.
97 void RecordWriteHelper(Register object,
98 Register addr,
99 Register scratch);
100
101 // Check if object is in new space. The condition cc can be equal or
102 // not_equal. If it is equal a jump will be done if the object is on new
103 // space. The register scratch can be object itself, but it will be clobbered.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100104 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100105 void InNewSpace(Register object,
106 Register scratch,
107 Condition cc,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100108 LabelType* branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100109
Steve Block8defd9f2010-07-08 12:39:36 +0100110 // For page containing |object| mark region covering [object+offset]
111 // dirty. |object| is the object being stored into, |value| is the
112 // object being stored. If |offset| is zero, then the |scratch|
113 // register contains the array index into the elements array
Ben Murdochf87a2032010-10-22 12:50:53 +0100114 // represented as an untagged 32-bit integer. All registers are
115 // clobbered by the operation. RecordWrite filters out smis so it
116 // does not update the write barrier if the value is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000117 void RecordWrite(Register object,
118 int offset,
119 Register value,
120 Register scratch);
121
Steve Block8defd9f2010-07-08 12:39:36 +0100122 // For page containing |object| mark region covering [address]
123 // dirty. |object| is the object being stored into, |value| is the
124 // object being stored. All registers are clobbered by the
125 // operation. RecordWrite filters out smis so it does not update
126 // the write barrier if the value is a smi.
127 void RecordWrite(Register object,
128 Register address,
129 Register value);
130
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100131 // For page containing |object| mark region covering [object+offset] dirty.
Steve Block3ce2e202009-11-05 08:53:23 +0000132 // The value is known to not be a smi.
133 // object is the object being stored into, value is the object being stored.
134 // If offset is zero, then the scratch register contains the array index into
Ben Murdochf87a2032010-10-22 12:50:53 +0100135 // the elements array represented as an untagged 32-bit integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000136 // All registers are clobbered by the operation.
137 void RecordWriteNonSmi(Register object,
138 int offset,
139 Register value,
140 Register scratch);
141
Steve Blocka7e24c12009-10-30 11:49:00 +0000142#ifdef ENABLE_DEBUGGER_SUPPORT
143 // ---------------------------------------------------------------------------
144 // Debugger Support
145
Andrei Popescu402d9372010-02-26 13:31:12 +0000146 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000147#endif
148
149 // ---------------------------------------------------------------------------
150 // Activation frames
151
152 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
153 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
154
155 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
156 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
157
Steve Blockd0582a62009-12-15 09:54:21 +0000158 // Enter specific kind of exit frame; either in normal or
159 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000160 // sets up the number of arguments in register rdi and the pointer
161 // to the first argument in register rsi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800162 //
163 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
164 // accessible via StackSpaceOperand.
Steve Block1e0659c2011-05-24 12:43:12 +0100165 void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000166
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800167 // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
168 // memory (not GCed) on the stack accessible via StackSpaceOperand.
169 void EnterApiExitFrame(int arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +0100170
Steve Blocka7e24c12009-10-30 11:49:00 +0000171 // Leave the current exit frame. Expects/provides the return value in
172 // register rax:rdx (untouched) and the pointer to the first
173 // argument in register rsi.
Steve Block1e0659c2011-05-24 12:43:12 +0100174 void LeaveExitFrame(bool save_doubles = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000175
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800176 // Leave the current exit frame. Expects/provides the return value in
177 // register rax (untouched).
178 void LeaveApiExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000179
Ben Murdochb0fe1622011-05-05 13:52:32 +0100180 // Push and pop the registers that can hold pointers.
Steve Block1e0659c2011-05-24 12:43:12 +0100181 void PushSafepointRegisters() { Pushad(); }
182 void PopSafepointRegisters() { Popad(); }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100183 // Store the value in register src in the safepoint register stack
184 // slot for register dst.
185 void StoreToSafepointRegisterSlot(Register dst, Register src);
186 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100187
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100188 void InitializeRootRegister() {
189 ExternalReference roots_address = ExternalReference::roots_address();
190 movq(kRootRegister, roots_address);
191 addq(kRootRegister, Immediate(kRootRegisterBias));
192 }
Steve Block1e0659c2011-05-24 12:43:12 +0100193
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 // ---------------------------------------------------------------------------
195 // JavaScript invokes
196
197 // Invoke the JavaScript function code by either calling or jumping.
198 void InvokeCode(Register code,
199 const ParameterCount& expected,
200 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100201 InvokeFlag flag,
202 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000203
204 void InvokeCode(Handle<Code> code,
205 const ParameterCount& expected,
206 const ParameterCount& actual,
207 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100208 InvokeFlag flag,
209 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000210
211 // Invoke the JavaScript function in the given register. Changes the
212 // current context to the context in the function before invoking.
213 void InvokeFunction(Register function,
214 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100215 InvokeFlag flag,
216 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000217
Andrei Popescu402d9372010-02-26 13:31:12 +0000218 void InvokeFunction(JSFunction* function,
219 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100220 InvokeFlag flag,
221 PostCallGenerator* post_call_generator = NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +0000222
Steve Blocka7e24c12009-10-30 11:49:00 +0000223 // Invoke specified builtin JavaScript function. Adds an entry to
224 // the unresolved list if the name does not resolve.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100225 void InvokeBuiltin(Builtins::JavaScript id,
226 InvokeFlag flag,
227 PostCallGenerator* post_call_generator = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000228
Steve Block791712a2010-08-27 10:21:07 +0100229 // Store the function for the given builtin in the target register.
230 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
231
Steve Blocka7e24c12009-10-30 11:49:00 +0000232 // Store the code object for the given builtin in the target register.
233 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
234
235
236 // ---------------------------------------------------------------------------
237 // Smi tagging, untagging and operations on tagged smis.
238
Steve Block8defd9f2010-07-08 12:39:36 +0100239 void InitializeSmiConstantRegister() {
240 movq(kSmiConstantRegister,
241 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
242 RelocInfo::NONE);
243 }
244
Steve Blocka7e24c12009-10-30 11:49:00 +0000245 // Conversions between tagged smi values and non-tagged integer values.
246
247 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000248 // Only uses the low 32 bits of the src register. Sets the N and Z flags
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100249 // based on the value of the resulting smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000250 void Integer32ToSmi(Register dst, Register src);
251
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100252 // Stores an integer32 value into a memory field that already holds a smi.
253 void Integer32ToSmiField(const Operand& dst, Register src);
254
Steve Blocka7e24c12009-10-30 11:49:00 +0000255 // Adds constant to src and tags the result as a smi.
256 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000257 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000258
259 // Convert smi to 32-bit integer. I.e., not sign extended into
260 // high 32 bits of destination.
261 void SmiToInteger32(Register dst, Register src);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100262 void SmiToInteger32(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000263
264 // Convert smi to 64-bit integer (sign extended if necessary).
265 void SmiToInteger64(Register dst, Register src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100266 void SmiToInteger64(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000267
268 // Multiply a positive smi's integer value by a power of two.
269 // Provides result as 64-bit integer value.
270 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
271 Register src,
272 int power);
273
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100274 // Divide a positive smi's integer value by a power of two.
275 // Provides result as 32-bit integer value.
276 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
277 Register src,
278 int power);
279
280
Steve Block3ce2e202009-11-05 08:53:23 +0000281 // Simple comparison of smis.
282 void SmiCompare(Register dst, Register src);
283 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100284 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000285 void SmiCompare(const Operand& dst, Register src);
286 void SmiCompare(const Operand& dst, Smi* src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100287 // Compare the int32 in src register to the value of the smi stored at dst.
288 void SmiCompareInteger32(const Operand& dst, Register src);
Steve Block3ce2e202009-11-05 08:53:23 +0000289 // Sets sign and zero flags depending on value of smi in register.
290 void SmiTest(Register src);
291
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 // Functions performing a check on a known or potential smi. Returns
293 // a condition that is satisfied if the check is successful.
294
295 // Is the value a tagged smi.
296 Condition CheckSmi(Register src);
Steve Block1e0659c2011-05-24 12:43:12 +0100297 Condition CheckSmi(const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000298
Ben Murdochf87a2032010-10-22 12:50:53 +0100299 // Is the value a non-negative tagged smi.
300 Condition CheckNonNegativeSmi(Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000301
Leon Clarkee46be812010-01-19 14:06:41 +0000302 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000303 Condition CheckBothSmi(Register first, Register second);
304
Ben Murdochf87a2032010-10-22 12:50:53 +0100305 // Are both values non-negative tagged smis.
306 Condition CheckBothNonNegativeSmi(Register first, Register second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000307
Leon Clarkee46be812010-01-19 14:06:41 +0000308 // Are either value a tagged smi.
Ben Murdochbb769b22010-08-11 14:56:33 +0100309 Condition CheckEitherSmi(Register first,
310 Register second,
311 Register scratch = kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +0000312
Steve Blocka7e24c12009-10-30 11:49:00 +0000313 // Is the value the minimum smi value (since we are using
314 // two's complement numbers, negating the value is known to yield
315 // a non-smi value).
316 Condition CheckIsMinSmi(Register src);
317
Steve Blocka7e24c12009-10-30 11:49:00 +0000318 // Checks whether an 32-bit integer value is a valid for conversion
319 // to a smi.
320 Condition CheckInteger32ValidSmiValue(Register src);
321
Steve Block3ce2e202009-11-05 08:53:23 +0000322 // Checks whether an 32-bit unsigned integer value is a valid for
323 // conversion to a smi.
324 Condition CheckUInteger32ValidSmiValue(Register src);
325
Steve Block1e0659c2011-05-24 12:43:12 +0100326 // Check whether src is a Smi, and set dst to zero if it is a smi,
327 // and to one if it isn't.
328 void CheckSmiToIndicator(Register dst, Register src);
329 void CheckSmiToIndicator(Register dst, const Operand& src);
330
Steve Blocka7e24c12009-10-30 11:49:00 +0000331 // Test-and-jump functions. Typically combines a check function
332 // above with a conditional jump.
333
334 // Jump if the value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100335 template <typename LabelType>
336 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Blocka7e24c12009-10-30 11:49:00 +0000337
Steve Block3ce2e202009-11-05 08:53:23 +0000338 // Jump if the unsigned integer value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100339 template <typename LabelType>
340 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Block3ce2e202009-11-05 08:53:23 +0000341
Steve Blocka7e24c12009-10-30 11:49:00 +0000342 // Jump to label if the value is a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100343 template <typename LabelType>
344 void JumpIfSmi(Register src, LabelType* on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000345
346 // Jump to label if the value is not a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100347 template <typename LabelType>
348 void JumpIfNotSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000349
Ben Murdochf87a2032010-10-22 12:50:53 +0100350 // Jump to label if the value is not a non-negative tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100351 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100352 void JumpUnlessNonNegativeSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000353
Steve Block3ce2e202009-11-05 08:53:23 +0000354 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 // to the constant.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100356 template <typename LabelType>
357 void JumpIfSmiEqualsConstant(Register src,
358 Smi* constant,
359 LabelType* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000360
361 // Jump if either or both register are not smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100362 template <typename LabelType>
363 void JumpIfNotBothSmi(Register src1,
364 Register src2,
365 LabelType* on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000366
Ben Murdochf87a2032010-10-22 12:50:53 +0100367 // Jump if either or both register are not non-negative smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100368 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100369 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2,
370 LabelType* on_not_both_smi);
Leon Clarked91b9f72010-01-27 17:25:45 +0000371
Steve Blocka7e24c12009-10-30 11:49:00 +0000372 // Operations on tagged smi values.
373
374 // Smis represent a subset of integers. The subset is always equivalent to
375 // a two's complement interpretation of a fixed number of bits.
376
377 // Optimistically adds an integer constant to a supposed smi.
378 // If the src is not a smi, or the result is not a smi, jump to
379 // the label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100380 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000381 void SmiTryAddConstant(Register dst,
382 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000383 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100384 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000385
Steve Block3ce2e202009-11-05 08:53:23 +0000386 // Add an integer constant to a tagged smi, giving a tagged smi as result.
387 // No overflow testing on the result is done.
388 void SmiAddConstant(Register dst, Register src, Smi* constant);
389
Leon Clarkef7060e22010-06-03 12:02:55 +0100390 // Add an integer constant to a tagged smi, giving a tagged smi as result.
391 // No overflow testing on the result is done.
392 void SmiAddConstant(const Operand& dst, Smi* constant);
393
Steve Blocka7e24c12009-10-30 11:49:00 +0000394 // Add an integer constant to a tagged smi, giving a tagged smi as result,
395 // or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100396 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000397 void SmiAddConstant(Register dst,
398 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000399 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100400 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000401
402 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100403 // result. No testing on the result is done. Sets the N and Z flags
404 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000405 void SmiSubConstant(Register dst, Register src, Smi* constant);
406
407 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000408 // result, or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100409 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000410 void SmiSubConstant(Register dst,
411 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000412 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100413 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000414
415 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000416 // NOTICE: This operation jumps on success, not failure!
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100417 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000418 void SmiNeg(Register dst,
419 Register src,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100420 LabelType* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000421
422 // Adds smi values and return the result as a smi.
423 // If dst is src1, then src1 will be destroyed, even if
424 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100425 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000426 void SmiAdd(Register dst,
427 Register src1,
428 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100429 LabelType* on_not_smi_result);
430
431 void SmiAdd(Register dst,
432 Register src1,
433 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000434
435 // Subtracts smi values and return the result as a smi.
436 // If dst is src1, then src1 will be destroyed, even if
437 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100438 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000439 void SmiSub(Register dst,
440 Register src1,
441 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100442 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000443
Steve Block6ded16b2010-05-10 14:33:55 +0100444 void SmiSub(Register dst,
445 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100446 Register src2);
447
448 template <typename LabelType>
449 void SmiSub(Register dst,
450 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100451 const Operand& src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100452 LabelType* on_not_smi_result);
453
454 void SmiSub(Register dst,
455 Register src1,
456 const Operand& src2);
Steve Block6ded16b2010-05-10 14:33:55 +0100457
Steve Blocka7e24c12009-10-30 11:49:00 +0000458 // Multiplies smi values and return the result as a smi,
459 // if possible.
460 // If dst is src1, then src1 will be destroyed, even if
461 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100462 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000463 void SmiMul(Register dst,
464 Register src1,
465 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100466 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000467
468 // Divides one smi by another and returns the quotient.
469 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100470 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 void SmiDiv(Register dst,
472 Register src1,
473 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100474 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000475
476 // Divides one smi by another and returns the remainder.
477 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100478 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000479 void SmiMod(Register dst,
480 Register src1,
481 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100482 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000483
484 // Bitwise operations.
485 void SmiNot(Register dst, Register src);
486 void SmiAnd(Register dst, Register src1, Register src2);
487 void SmiOr(Register dst, Register src1, Register src2);
488 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000489 void SmiAndConstant(Register dst, Register src1, Smi* constant);
490 void SmiOrConstant(Register dst, Register src1, Smi* constant);
491 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000492
493 void SmiShiftLeftConstant(Register dst,
494 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100495 int shift_value);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100496 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000497 void SmiShiftLogicalRightConstant(Register dst,
498 Register src,
499 int shift_value,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100500 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000501 void SmiShiftArithmeticRightConstant(Register dst,
502 Register src,
503 int shift_value);
504
505 // Shifts a smi value to the left, and returns the result if that is a smi.
506 // Uses and clobbers rcx, so dst may not be rcx.
507 void SmiShiftLeft(Register dst,
508 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100509 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000510 // Shifts a smi value to the right, shifting in zero bits at the top, and
511 // returns the unsigned intepretation of the result if that is a smi.
512 // Uses and clobbers rcx, so dst may not be rcx.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100513 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000514 void SmiShiftLogicalRight(Register dst,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100515 Register src1,
516 Register src2,
517 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000518 // Shifts a smi value to the right, sign extending the top, and
519 // returns the signed intepretation of the result. That will always
520 // be a valid smi value, since it's numerically smaller than the
521 // original.
522 // Uses and clobbers rcx, so dst may not be rcx.
523 void SmiShiftArithmeticRight(Register dst,
524 Register src1,
525 Register src2);
526
527 // Specialized operations
528
529 // Select the non-smi register of two registers where exactly one is a
530 // smi. If neither are smis, jump to the failure label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100531 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000532 void SelectNonSmi(Register dst,
533 Register src1,
534 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100535 LabelType* on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +0000536
537 // Converts, if necessary, a smi to a combination of number and
538 // multiplier to be used as a scaled index.
539 // The src register contains a *positive* smi value. The shift is the
540 // power of two to multiply the index value by (e.g.
541 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
542 // The returned index register may be either src or dst, depending
543 // on what is most efficient. If src and dst are different registers,
544 // src is always unchanged.
545 SmiIndex SmiToIndex(Register dst, Register src, int shift);
546
547 // Converts a positive smi to a negative index.
548 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
549
Steve Block3ce2e202009-11-05 08:53:23 +0000550 // Basic Smi operations.
551 void Move(Register dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100552 LoadSmiConstant(dst, source);
Steve Block3ce2e202009-11-05 08:53:23 +0000553 }
554
555 void Move(const Operand& dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100556 Register constant = GetSmiConstant(source);
557 movq(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000558 }
559
560 void Push(Smi* smi);
561 void Test(const Operand& dst, Smi* source);
562
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000564 // String macros.
Steve Block1e0659c2011-05-24 12:43:12 +0100565
566 // If object is a string, its map is loaded into object_map.
567 template <typename LabelType>
568 void JumpIfNotString(Register object,
569 Register object_map,
570 LabelType* not_string);
571
572
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100573 template <typename LabelType>
Leon Clarkee46be812010-01-19 14:06:41 +0000574 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
575 Register second_object,
576 Register scratch1,
577 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100578 LabelType* on_not_both_flat_ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000579
Steve Block6ded16b2010-05-10 14:33:55 +0100580 // Check whether the instance type represents a flat ascii string. Jump to the
581 // label if not. If the instance type can be scratched specify same register
582 // for both instance type and scratch.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100583 template <typename LabelType>
584 void JumpIfInstanceTypeIsNotSequentialAscii(
585 Register instance_type,
586 Register scratch,
587 LabelType *on_not_flat_ascii_string);
Steve Block6ded16b2010-05-10 14:33:55 +0100588
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100589 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100590 void JumpIfBothInstanceTypesAreNotSequentialAscii(
591 Register first_object_instance_type,
592 Register second_object_instance_type,
593 Register scratch1,
594 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100595 LabelType* on_fail);
Steve Block6ded16b2010-05-10 14:33:55 +0100596
Leon Clarkee46be812010-01-19 14:06:41 +0000597 // ---------------------------------------------------------------------------
598 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000599
Steve Block3ce2e202009-11-05 08:53:23 +0000600 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000601 void Set(Register dst, int64_t x);
602 void Set(const Operand& dst, int64_t x);
603
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100604 // Move if the registers are not identical.
605 void Move(Register target, Register source);
606
Steve Blocka7e24c12009-10-30 11:49:00 +0000607 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000608 void Move(Register dst, Handle<Object> source);
609 void Move(const Operand& dst, Handle<Object> source);
610 void Cmp(Register dst, Handle<Object> source);
611 void Cmp(const Operand& dst, Handle<Object> source);
612 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000613
Leon Clarkee46be812010-01-19 14:06:41 +0000614 // Emit code to discard a non-negative number of pointer-sized elements
615 // from the stack, clobbering only the rsp register.
616 void Drop(int stack_elements);
617
618 void Call(Label* target) { call(target); }
619
Steve Blocka7e24c12009-10-30 11:49:00 +0000620 // Control Flow
621 void Jump(Address destination, RelocInfo::Mode rmode);
622 void Jump(ExternalReference ext);
623 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
624
625 void Call(Address destination, RelocInfo::Mode rmode);
626 void Call(ExternalReference ext);
627 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
628
Steve Block1e0659c2011-05-24 12:43:12 +0100629 // Emit call to the code we are currently generating.
630 void CallSelf() {
631 Handle<Code> self(reinterpret_cast<Code**>(CodeObject().location()));
632 Call(self, RelocInfo::CODE_TARGET);
633 }
634
635 // Non-x64 instructions.
636 // Push/pop all general purpose registers.
637 // Does not push rsp/rbp nor any of the assembler's special purpose registers
638 // (kScratchRegister, kSmiConstantRegister, kRootRegister).
639 void Pushad();
640 void Popad();
641 // Sets the stack as after performing Popad, without actually loading the
642 // registers.
643 void Dropad();
644
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 // Compare object type for heap object.
646 // Always use unsigned comparisons: above and below, not less and greater.
647 // Incoming register is heap_object and outgoing register is map.
648 // They may be the same register, and may be kScratchRegister.
649 void CmpObjectType(Register heap_object, InstanceType type, Register map);
650
651 // Compare instance type for map.
652 // Always use unsigned comparisons: above and below, not less and greater.
653 void CmpInstanceType(Register map, InstanceType type);
654
Andrei Popescu31002712010-02-23 13:46:05 +0000655 // Check if the map of an object is equal to a specified map and
656 // branch to label if not. Skip the smi check if not required
657 // (object is known to be a heap object)
658 void CheckMap(Register obj,
659 Handle<Map> map,
660 Label* fail,
661 bool is_heap_object);
662
Leon Clarked91b9f72010-01-27 17:25:45 +0000663 // Check if the object in register heap_object is a string. Afterwards the
664 // register map contains the object map and the register instance_type
665 // contains the instance_type. The registers map and instance_type can be the
666 // same in which case it contains the instance type afterwards. Either of the
667 // registers map and instance_type can be the same as heap_object.
668 Condition IsObjectStringType(Register heap_object,
669 Register map,
670 Register instance_type);
671
Steve Block8defd9f2010-07-08 12:39:36 +0100672 // FCmp compares and pops the two values on top of the FPU stack.
673 // The flag results are similar to integer cmp, but requires unsigned
Steve Blocka7e24c12009-10-30 11:49:00 +0000674 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
675 void FCmp();
676
Andrei Popescu402d9372010-02-26 13:31:12 +0000677 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100678 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000679
Iain Merrick75681382010-08-19 15:07:18 +0100680 // Abort execution if argument is a smi. Used in debug code.
681 void AbortIfSmi(Register object);
682
Steve Block6ded16b2010-05-10 14:33:55 +0100683 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100684 void AbortIfNotSmi(Register object);
Steve Block6ded16b2010-05-10 14:33:55 +0100685
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100686 // Abort execution if argument is a string. Used in debug code.
687 void AbortIfNotString(Register object);
688
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100689 // Abort execution if argument is not the root value with the given index.
690 void AbortIfNotRootValue(Register src,
691 Heap::RootListIndex root_value_index,
692 const char* message);
693
Steve Blocka7e24c12009-10-30 11:49:00 +0000694 // ---------------------------------------------------------------------------
695 // Exception handling
696
697 // Push a new try handler and link into try handler chain. The return
698 // address must be pushed before calling this helper.
699 void PushTryHandler(CodeLocation try_location, HandlerType type);
700
Leon Clarkee46be812010-01-19 14:06:41 +0000701 // Unlink the stack handler on top of the stack from the try handler chain.
702 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000703
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100704 // Activate the top handler in the try hander chain and pass the
705 // thrown value.
706 void Throw(Register value);
707
708 // Propagate an uncatchable exception out of the current JS stack.
709 void ThrowUncatchable(UncatchableExceptionType type, Register value);
710
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 // ---------------------------------------------------------------------------
712 // Inline caching support
713
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 // Generate code for checking access rights - used for security checks
715 // on access to global objects across environments. The holder register
716 // is left untouched, but the scratch register and kScratchRegister,
717 // which must be different, are clobbered.
718 void CheckAccessGlobalProxy(Register holder_reg,
719 Register scratch,
720 Label* miss);
721
722
723 // ---------------------------------------------------------------------------
724 // Allocation support
725
726 // Allocate an object in new space. If the new space is exhausted control
727 // continues at the gc_required label. The allocated object is returned in
728 // result and end of the new object is returned in result_end. The register
729 // scratch can be passed as no_reg in which case an additional object
730 // reference will be added to the reloc info. The returned pointers in result
731 // and result_end have not yet been tagged as heap objects. If
732 // result_contains_top_on_entry is true the content of result is known to be
733 // the allocation top on entry (could be result_end from a previous call to
734 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
735 // should be no_reg as it is never used.
736 void AllocateInNewSpace(int object_size,
737 Register result,
738 Register result_end,
739 Register scratch,
740 Label* gc_required,
741 AllocationFlags flags);
742
743 void AllocateInNewSpace(int header_size,
744 ScaleFactor element_size,
745 Register element_count,
746 Register result,
747 Register result_end,
748 Register scratch,
749 Label* gc_required,
750 AllocationFlags flags);
751
752 void AllocateInNewSpace(Register object_size,
753 Register result,
754 Register result_end,
755 Register scratch,
756 Label* gc_required,
757 AllocationFlags flags);
758
759 // Undo allocation in new space. The object passed and objects allocated after
760 // it will no longer be allocated. Make sure that no pointers are left to the
761 // object(s) no longer allocated as they would be invalid when allocation is
762 // un-done.
763 void UndoAllocationInNewSpace(Register object);
764
Steve Block3ce2e202009-11-05 08:53:23 +0000765 // Allocate a heap number in new space with undefined value. Returns
766 // tagged pointer in result register, or jumps to gc_required if new
767 // space is full.
768 void AllocateHeapNumber(Register result,
769 Register scratch,
770 Label* gc_required);
771
Leon Clarkee46be812010-01-19 14:06:41 +0000772 // Allocate a sequential string. All the header fields of the string object
773 // are initialized.
774 void AllocateTwoByteString(Register result,
775 Register length,
776 Register scratch1,
777 Register scratch2,
778 Register scratch3,
779 Label* gc_required);
780 void AllocateAsciiString(Register result,
781 Register length,
782 Register scratch1,
783 Register scratch2,
784 Register scratch3,
785 Label* gc_required);
786
787 // Allocate a raw cons string object. Only the map field of the result is
788 // initialized.
789 void AllocateConsString(Register result,
790 Register scratch1,
791 Register scratch2,
792 Label* gc_required);
793 void AllocateAsciiConsString(Register result,
794 Register scratch1,
795 Register scratch2,
796 Label* gc_required);
797
Steve Blocka7e24c12009-10-30 11:49:00 +0000798 // ---------------------------------------------------------------------------
799 // Support functions.
800
801 // Check if result is zero and op is negative.
802 void NegativeZeroTest(Register result, Register op, Label* then_label);
803
804 // Check if result is zero and op is negative in code using jump targets.
805 void NegativeZeroTest(CodeGenerator* cgen,
806 Register result,
807 Register op,
808 JumpTarget* then_target);
809
810 // Check if result is zero and any of op1 and op2 are negative.
811 // Register scratch is destroyed, and it must be different from op2.
812 void NegativeZeroTest(Register result, Register op1, Register op2,
813 Register scratch, Label* then_label);
814
815 // Try to get function prototype of a function and puts the value in
816 // the result register. Checks that the function really is a
817 // function and jumps to the miss label if the fast checks fail. The
818 // function register will be untouched; the other register may be
819 // clobbered.
820 void TryGetFunctionPrototype(Register function,
821 Register result,
822 Label* miss);
823
824 // Generates code for reporting that an illegal operation has
825 // occurred.
826 void IllegalOperation(int num_arguments);
827
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100828 // Picks out an array index from the hash field.
829 // Register use:
830 // hash - holds the index's hash. Clobbered.
831 // index - holds the overwritten index on exit.
832 void IndexFromHash(Register hash, Register index);
833
Steve Blockd0582a62009-12-15 09:54:21 +0000834 // Find the function context up the context chain.
835 void LoadContext(Register dst, int context_chain_length);
836
Ben Murdochb0fe1622011-05-05 13:52:32 +0100837 // Load the global function with the given index.
838 void LoadGlobalFunction(int index, Register function);
839
840 // Load the initial map from the global function. The registers
841 // function and map can be the same.
842 void LoadGlobalFunctionInitialMap(Register function, Register map);
843
Steve Blocka7e24c12009-10-30 11:49:00 +0000844 // ---------------------------------------------------------------------------
845 // Runtime calls
846
847 // Call a code stub.
848 void CallStub(CodeStub* stub);
849
Ben Murdochbb769b22010-08-11 14:56:33 +0100850 // Call a code stub and return the code object called. Try to generate
851 // the code if necessary. Do not perform a GC but instead return a retry
852 // after GC failure.
John Reck59135872010-11-02 12:39:01 -0700853 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100854
Leon Clarkee46be812010-01-19 14:06:41 +0000855 // Tail call a code stub (jump).
856 void TailCallStub(CodeStub* stub);
857
Ben Murdochbb769b22010-08-11 14:56:33 +0100858 // Tail call a code stub (jump) and return the code object called. Try to
859 // generate the code if necessary. Do not perform a GC but instead return
860 // a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700861 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100862
Steve Blocka7e24c12009-10-30 11:49:00 +0000863 // Return from a code stub after popping its arguments.
864 void StubReturn(int argc);
865
866 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000867 void CallRuntime(Runtime::Function* f, int num_arguments);
868
Steve Block1e0659c2011-05-24 12:43:12 +0100869 // Call a runtime function and save the value of XMM registers.
870 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
871
Ben Murdochbb769b22010-08-11 14:56:33 +0100872 // Call a runtime function, returning the CodeStub object called.
873 // Try to generate the stub code if necessary. Do not perform a GC
874 // but instead return a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700875 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::Function* f,
876 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100877
Steve Blocka7e24c12009-10-30 11:49:00 +0000878 // Convenience function: Same as above, but takes the fid instead.
879 void CallRuntime(Runtime::FunctionId id, int num_arguments);
880
Ben Murdochbb769b22010-08-11 14:56:33 +0100881 // Convenience function: Same as above, but takes the fid instead.
John Reck59135872010-11-02 12:39:01 -0700882 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::FunctionId id,
883 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100884
Andrei Popescu402d9372010-02-26 13:31:12 +0000885 // Convenience function: call an external reference.
886 void CallExternalReference(const ExternalReference& ext,
887 int num_arguments);
888
Steve Blocka7e24c12009-10-30 11:49:00 +0000889 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100890 // Like JumpToExternalReference, but also takes care of passing the number
891 // of parameters.
892 void TailCallExternalReference(const ExternalReference& ext,
893 int num_arguments,
894 int result_size);
895
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800896 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
897 const ExternalReference& ext, int num_arguments, int result_size);
898
Steve Block6ded16b2010-05-10 14:33:55 +0100899 // Convenience function: tail call a runtime routine (jump).
900 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000901 int num_arguments,
902 int result_size);
903
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800904 MUST_USE_RESULT MaybeObject* TryTailCallRuntime(Runtime::FunctionId fid,
905 int num_arguments,
906 int result_size);
907
Steve Blocka7e24c12009-10-30 11:49:00 +0000908 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100909 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000910
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800911 // Jump to a runtime routine.
912 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext,
913 int result_size);
John Reck59135872010-11-02 12:39:01 -0700914
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800915 // Prepares stack to put arguments (aligns and so on).
916 // WIN64 calling convention requires to put the pointer to the return value
917 // slot into rcx (rcx must be preserverd until TryCallApiFunctionAndReturn).
918 // Saves context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
919 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
920 void PrepareCallApiFunction(int arg_stack_space);
921
922 // Calls an API function. Allocates HandleScope, extracts
923 // returned value from handle and propagates exceptions.
924 // Clobbers r12, r14, rbx and caller-save registers. Restores context.
925 // On return removes stack_space * kPointerSize (GCed).
926 MUST_USE_RESULT MaybeObject* TryCallApiFunctionAndReturn(
927 ApiFunction* function, int stack_space);
John Reck59135872010-11-02 12:39:01 -0700928
Leon Clarke4515c472010-02-03 11:58:03 +0000929 // Before calling a C-function from generated code, align arguments on stack.
930 // After aligning the frame, arguments must be stored in esp[0], esp[4],
931 // etc., not pushed. The argument count assumes all arguments are word sized.
932 // The number of slots reserved for arguments depends on platform. On Windows
933 // stack slots are reserved for the arguments passed in registers. On other
934 // platforms stack slots are only reserved for the arguments actually passed
935 // on the stack.
936 void PrepareCallCFunction(int num_arguments);
937
938 // Calls a C function and cleans up the space for arguments allocated
939 // by PrepareCallCFunction. The called function is not allowed to trigger a
940 // garbage collection, since that might move the code and invalidate the
941 // return address (unless this is somehow accounted for by the called
942 // function).
943 void CallCFunction(ExternalReference function, int num_arguments);
944 void CallCFunction(Register function, int num_arguments);
945
946 // Calculate the number of stack slots to reserve for arguments when calling a
947 // C function.
948 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000949
950 // ---------------------------------------------------------------------------
951 // Utilities
952
953 void Ret();
954
Steve Block1e0659c2011-05-24 12:43:12 +0100955 // Return and drop arguments from stack, where the number of arguments
956 // may be bigger than 2^16 - 1. Requires a scratch register.
957 void Ret(int bytes_dropped, Register scratch);
958
Steve Blocka7e24c12009-10-30 11:49:00 +0000959 Handle<Object> CodeObject() { return code_object_; }
960
961
962 // ---------------------------------------------------------------------------
963 // StatsCounter support
964
965 void SetCounter(StatsCounter* counter, int value);
966 void IncrementCounter(StatsCounter* counter, int value);
967 void DecrementCounter(StatsCounter* counter, int value);
968
969
970 // ---------------------------------------------------------------------------
971 // Debugging
972
973 // Calls Abort(msg) if the condition cc is not satisfied.
974 // Use --debug_code to enable.
975 void Assert(Condition cc, const char* msg);
976
Iain Merrick75681382010-08-19 15:07:18 +0100977 void AssertFastElements(Register elements);
978
Steve Blocka7e24c12009-10-30 11:49:00 +0000979 // Like Assert(), but always enabled.
980 void Check(Condition cc, const char* msg);
981
982 // Print a message to stdout and abort execution.
983 void Abort(const char* msg);
984
Steve Block6ded16b2010-05-10 14:33:55 +0100985 // Check that the stack is aligned.
986 void CheckStackAlignment();
987
Steve Blocka7e24c12009-10-30 11:49:00 +0000988 // Verify restrictions about code generated in stubs.
989 void set_generating_stub(bool value) { generating_stub_ = value; }
990 bool generating_stub() { return generating_stub_; }
991 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
992 bool allow_stub_calls() { return allow_stub_calls_; }
993
994 private:
Steve Block1e0659c2011-05-24 12:43:12 +0100995 // Order general registers are pushed by Pushad.
996 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
997 static int kSafepointPushRegisterIndices[Register::kNumRegisters];
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100998 static const int kNumSafepointSavedRegisters = 11;
999
Steve Blocka7e24c12009-10-30 11:49:00 +00001000 bool generating_stub_;
1001 bool allow_stub_calls_;
Steve Block8defd9f2010-07-08 12:39:36 +01001002
1003 // Returns a register holding the smi value. The register MUST NOT be
1004 // modified. It may be the "smi 1 constant" register.
1005 Register GetSmiConstant(Smi* value);
1006
1007 // Moves the smi value to the destination register.
1008 void LoadSmiConstant(Register dst, Smi* value);
1009
Andrei Popescu31002712010-02-23 13:46:05 +00001010 // This handle will be patched with the code object on installation.
1011 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001012
1013 // Helper functions for generating invokes.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001014 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +00001015 void InvokePrologue(const ParameterCount& expected,
1016 const ParameterCount& actual,
1017 Handle<Code> code_constant,
1018 Register code_register,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001019 LabelType* done,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001020 InvokeFlag flag,
1021 PostCallGenerator* post_call_generator);
Steve Blocka7e24c12009-10-30 11:49:00 +00001022
Steve Blocka7e24c12009-10-30 11:49:00 +00001023 // Activation support.
1024 void EnterFrame(StackFrame::Type type);
1025 void LeaveFrame(StackFrame::Type type);
1026
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001027 void EnterExitFramePrologue(bool save_rax);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001028
1029 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
1030 // accessible via StackSpaceOperand.
Steve Block1e0659c2011-05-24 12:43:12 +01001031 void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001032
1033 void LeaveExitFrameEpilogue();
Ben Murdochbb769b22010-08-11 14:56:33 +01001034
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +01001036 // Loads the top of new-space into the result register.
Steve Block6ded16b2010-05-10 14:33:55 +01001037 // Otherwise the address of the new-space top is loaded into scratch (if
1038 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +00001039 void LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001040 Register scratch,
1041 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +01001042 // Update allocation top with value in result_end register.
1043 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +01001045
1046 // Helper for PopHandleScope. Allowed to perform a GC and returns
1047 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
1048 // possibly returns a failure object indicating an allocation failure.
1049 Object* PopHandleScopeHelper(Register saved,
1050 Register scratch,
1051 bool gc_allowed);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001052
1053
1054 // Compute memory operands for safepoint stack slots.
1055 Operand SafepointRegisterSlot(Register reg);
1056 static int SafepointRegisterStackIndex(int reg_code) {
1057 return kNumSafepointRegisters - kSafepointPushRegisterIndices[reg_code] - 1;
1058 }
1059
1060 // Needs access to SafepointRegisterStackIndex for optimized frame
1061 // traversal.
1062 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +00001063};
1064
1065
1066// The code patcher is used to patch (typically) small parts of code e.g. for
1067// debugging and other types of instrumentation. When using the code patcher
1068// the exact number of bytes specified must be emitted. Is not legal to emit
1069// relocation information. If any of these constraints are violated it causes
1070// an assertion.
1071class CodePatcher {
1072 public:
1073 CodePatcher(byte* address, int size);
1074 virtual ~CodePatcher();
1075
1076 // Macro assembler to emit code.
1077 MacroAssembler* masm() { return &masm_; }
1078
1079 private:
1080 byte* address_; // The address of the code being patched.
1081 int size_; // Number of bytes of the expected patch size.
1082 MacroAssembler masm_; // Macro assembler used to generate the code.
1083};
1084
1085
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001086// Helper class for generating code or data associated with the code
1087// right after a call instruction. As an example this can be used to
1088// generate safepoint data after calls for crankshaft.
1089class PostCallGenerator {
1090 public:
1091 PostCallGenerator() { }
1092 virtual ~PostCallGenerator() { }
1093 virtual void Generate() = 0;
1094};
1095
1096
Steve Blocka7e24c12009-10-30 11:49:00 +00001097// -----------------------------------------------------------------------------
1098// Static helper functions.
1099
1100// Generate an Operand for loading a field from an object.
1101static inline Operand FieldOperand(Register object, int offset) {
1102 return Operand(object, offset - kHeapObjectTag);
1103}
1104
1105
1106// Generate an Operand for loading an indexed field from an object.
1107static inline Operand FieldOperand(Register object,
1108 Register index,
1109 ScaleFactor scale,
1110 int offset) {
1111 return Operand(object, index, scale, offset - kHeapObjectTag);
1112}
1113
1114
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001115static inline Operand ContextOperand(Register context, int index) {
1116 return Operand(context, Context::SlotOffset(index));
1117}
1118
1119
1120static inline Operand GlobalObjectOperand() {
1121 return ContextOperand(rsi, Context::GLOBAL_INDEX);
1122}
1123
1124
1125// Provides access to exit frame stack space (not GCed).
1126static inline Operand StackSpaceOperand(int index) {
1127#ifdef _WIN64
1128 const int kShaddowSpace = 4;
1129 return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
1130#else
1131 return Operand(rsp, index * kPointerSize);
1132#endif
1133}
1134
1135
1136
Steve Blocka7e24c12009-10-30 11:49:00 +00001137#ifdef GENERATED_CODE_COVERAGE
1138extern void LogGeneratedCodeCoverage(const char* file_line);
1139#define CODE_COVERAGE_STRINGIFY(x) #x
1140#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1141#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1142#define ACCESS_MASM(masm) { \
1143 byte* x64_coverage_function = \
1144 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
1145 masm->pushfd(); \
1146 masm->pushad(); \
1147 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
1148 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1149 masm->pop(rax); \
1150 masm->popad(); \
1151 masm->popfd(); \
1152 } \
1153 masm->
1154#else
1155#define ACCESS_MASM(masm) masm->
1156#endif
1157
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001158// -----------------------------------------------------------------------------
1159// Template implementations.
1160
1161static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1162
1163
1164template <typename LabelType>
1165void MacroAssembler::SmiNeg(Register dst,
1166 Register src,
1167 LabelType* on_smi_result) {
1168 if (dst.is(src)) {
1169 ASSERT(!dst.is(kScratchRegister));
1170 movq(kScratchRegister, src);
1171 neg(dst); // Low 32 bits are retained as zero by negation.
1172 // Test if result is zero or Smi::kMinValue.
1173 cmpq(dst, kScratchRegister);
1174 j(not_equal, on_smi_result);
1175 movq(src, kScratchRegister);
1176 } else {
1177 movq(dst, src);
1178 neg(dst);
1179 cmpq(dst, src);
1180 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1181 j(not_equal, on_smi_result);
1182 }
1183}
1184
1185
1186template <typename LabelType>
1187void MacroAssembler::SmiAdd(Register dst,
1188 Register src1,
1189 Register src2,
1190 LabelType* on_not_smi_result) {
1191 ASSERT_NOT_NULL(on_not_smi_result);
1192 ASSERT(!dst.is(src2));
1193 if (dst.is(src1)) {
1194 movq(kScratchRegister, src1);
1195 addq(kScratchRegister, src2);
1196 j(overflow, on_not_smi_result);
1197 movq(dst, kScratchRegister);
1198 } else {
1199 movq(dst, src1);
1200 addq(dst, src2);
1201 j(overflow, on_not_smi_result);
1202 }
1203}
1204
1205
1206template <typename LabelType>
1207void MacroAssembler::SmiSub(Register dst,
1208 Register src1,
1209 Register src2,
1210 LabelType* on_not_smi_result) {
1211 ASSERT_NOT_NULL(on_not_smi_result);
1212 ASSERT(!dst.is(src2));
1213 if (dst.is(src1)) {
1214 cmpq(dst, src2);
1215 j(overflow, on_not_smi_result);
1216 subq(dst, src2);
1217 } else {
1218 movq(dst, src1);
1219 subq(dst, src2);
1220 j(overflow, on_not_smi_result);
1221 }
1222}
1223
1224
1225template <typename LabelType>
1226void MacroAssembler::SmiSub(Register dst,
1227 Register src1,
1228 const Operand& src2,
1229 LabelType* on_not_smi_result) {
1230 ASSERT_NOT_NULL(on_not_smi_result);
1231 if (dst.is(src1)) {
1232 movq(kScratchRegister, src2);
1233 cmpq(src1, kScratchRegister);
1234 j(overflow, on_not_smi_result);
1235 subq(src1, kScratchRegister);
1236 } else {
1237 movq(dst, src1);
1238 subq(dst, src2);
1239 j(overflow, on_not_smi_result);
1240 }
1241}
1242
1243
1244template <typename LabelType>
1245void MacroAssembler::SmiMul(Register dst,
1246 Register src1,
1247 Register src2,
1248 LabelType* on_not_smi_result) {
1249 ASSERT(!dst.is(src2));
1250 ASSERT(!dst.is(kScratchRegister));
1251 ASSERT(!src1.is(kScratchRegister));
1252 ASSERT(!src2.is(kScratchRegister));
1253
1254 if (dst.is(src1)) {
1255 NearLabel failure, zero_correct_result;
1256 movq(kScratchRegister, src1); // Create backup for later testing.
1257 SmiToInteger64(dst, src1);
1258 imul(dst, src2);
1259 j(overflow, &failure);
1260
1261 // Check for negative zero result. If product is zero, and one
1262 // argument is negative, go to slow case.
1263 NearLabel correct_result;
1264 testq(dst, dst);
1265 j(not_zero, &correct_result);
1266
1267 movq(dst, kScratchRegister);
1268 xor_(dst, src2);
1269 j(positive, &zero_correct_result); // Result was positive zero.
1270
1271 bind(&failure); // Reused failure exit, restores src1.
1272 movq(src1, kScratchRegister);
1273 jmp(on_not_smi_result);
1274
1275 bind(&zero_correct_result);
Steve Block9fac8402011-05-12 15:51:54 +01001276 Set(dst, 0);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001277
1278 bind(&correct_result);
1279 } else {
1280 SmiToInteger64(dst, src1);
1281 imul(dst, src2);
1282 j(overflow, on_not_smi_result);
1283 // Check for negative zero result. If product is zero, and one
1284 // argument is negative, go to slow case.
1285 NearLabel correct_result;
1286 testq(dst, dst);
1287 j(not_zero, &correct_result);
1288 // One of src1 and src2 is zero, the check whether the other is
1289 // negative.
1290 movq(kScratchRegister, src1);
1291 xor_(kScratchRegister, src2);
1292 j(negative, on_not_smi_result);
1293 bind(&correct_result);
1294 }
1295}
1296
1297
1298template <typename LabelType>
1299void MacroAssembler::SmiTryAddConstant(Register dst,
1300 Register src,
1301 Smi* constant,
1302 LabelType* on_not_smi_result) {
1303 // Does not assume that src is a smi.
1304 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1305 ASSERT_EQ(0, kSmiTag);
1306 ASSERT(!dst.is(kScratchRegister));
1307 ASSERT(!src.is(kScratchRegister));
1308
1309 JumpIfNotSmi(src, on_not_smi_result);
1310 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1311 LoadSmiConstant(tmp, constant);
1312 addq(tmp, src);
1313 j(overflow, on_not_smi_result);
1314 if (dst.is(src)) {
1315 movq(dst, tmp);
1316 }
1317}
1318
1319
1320template <typename LabelType>
1321void MacroAssembler::SmiAddConstant(Register dst,
1322 Register src,
1323 Smi* constant,
1324 LabelType* on_not_smi_result) {
1325 if (constant->value() == 0) {
1326 if (!dst.is(src)) {
1327 movq(dst, src);
1328 }
1329 } else if (dst.is(src)) {
1330 ASSERT(!dst.is(kScratchRegister));
1331
1332 LoadSmiConstant(kScratchRegister, constant);
1333 addq(kScratchRegister, src);
1334 j(overflow, on_not_smi_result);
1335 movq(dst, kScratchRegister);
1336 } else {
1337 LoadSmiConstant(dst, constant);
1338 addq(dst, src);
1339 j(overflow, on_not_smi_result);
1340 }
1341}
1342
1343
1344template <typename LabelType>
1345void MacroAssembler::SmiSubConstant(Register dst,
1346 Register src,
1347 Smi* constant,
1348 LabelType* on_not_smi_result) {
1349 if (constant->value() == 0) {
1350 if (!dst.is(src)) {
1351 movq(dst, src);
1352 }
1353 } else if (dst.is(src)) {
1354 ASSERT(!dst.is(kScratchRegister));
1355 if (constant->value() == Smi::kMinValue) {
1356 // Subtracting min-value from any non-negative value will overflow.
1357 // We test the non-negativeness before doing the subtraction.
1358 testq(src, src);
1359 j(not_sign, on_not_smi_result);
1360 LoadSmiConstant(kScratchRegister, constant);
1361 subq(dst, kScratchRegister);
1362 } else {
1363 // Subtract by adding the negation.
1364 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1365 addq(kScratchRegister, dst);
1366 j(overflow, on_not_smi_result);
1367 movq(dst, kScratchRegister);
1368 }
1369 } else {
1370 if (constant->value() == Smi::kMinValue) {
1371 // Subtracting min-value from any non-negative value will overflow.
1372 // We test the non-negativeness before doing the subtraction.
1373 testq(src, src);
1374 j(not_sign, on_not_smi_result);
1375 LoadSmiConstant(dst, constant);
1376 // Adding and subtracting the min-value gives the same result, it only
1377 // differs on the overflow bit, which we don't check here.
1378 addq(dst, src);
1379 } else {
1380 // Subtract by adding the negation.
1381 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1382 addq(dst, src);
1383 j(overflow, on_not_smi_result);
1384 }
1385 }
1386}
1387
1388
1389template <typename LabelType>
1390void MacroAssembler::SmiDiv(Register dst,
1391 Register src1,
1392 Register src2,
1393 LabelType* on_not_smi_result) {
1394 ASSERT(!src1.is(kScratchRegister));
1395 ASSERT(!src2.is(kScratchRegister));
1396 ASSERT(!dst.is(kScratchRegister));
1397 ASSERT(!src2.is(rax));
1398 ASSERT(!src2.is(rdx));
1399 ASSERT(!src1.is(rdx));
1400
1401 // Check for 0 divisor (result is +/-Infinity).
1402 NearLabel positive_divisor;
1403 testq(src2, src2);
1404 j(zero, on_not_smi_result);
1405
1406 if (src1.is(rax)) {
1407 movq(kScratchRegister, src1);
1408 }
1409 SmiToInteger32(rax, src1);
1410 // We need to rule out dividing Smi::kMinValue by -1, since that would
1411 // overflow in idiv and raise an exception.
1412 // We combine this with negative zero test (negative zero only happens
1413 // when dividing zero by a negative number).
1414
1415 // We overshoot a little and go to slow case if we divide min-value
1416 // by any negative value, not just -1.
1417 NearLabel safe_div;
1418 testl(rax, Immediate(0x7fffffff));
1419 j(not_zero, &safe_div);
1420 testq(src2, src2);
1421 if (src1.is(rax)) {
1422 j(positive, &safe_div);
1423 movq(src1, kScratchRegister);
1424 jmp(on_not_smi_result);
1425 } else {
1426 j(negative, on_not_smi_result);
1427 }
1428 bind(&safe_div);
1429
1430 SmiToInteger32(src2, src2);
1431 // Sign extend src1 into edx:eax.
1432 cdq();
1433 idivl(src2);
1434 Integer32ToSmi(src2, src2);
1435 // Check that the remainder is zero.
1436 testl(rdx, rdx);
1437 if (src1.is(rax)) {
1438 NearLabel smi_result;
1439 j(zero, &smi_result);
1440 movq(src1, kScratchRegister);
1441 jmp(on_not_smi_result);
1442 bind(&smi_result);
1443 } else {
1444 j(not_zero, on_not_smi_result);
1445 }
1446 if (!dst.is(src1) && src1.is(rax)) {
1447 movq(src1, kScratchRegister);
1448 }
1449 Integer32ToSmi(dst, rax);
1450}
1451
1452
1453template <typename LabelType>
1454void MacroAssembler::SmiMod(Register dst,
1455 Register src1,
1456 Register src2,
1457 LabelType* on_not_smi_result) {
1458 ASSERT(!dst.is(kScratchRegister));
1459 ASSERT(!src1.is(kScratchRegister));
1460 ASSERT(!src2.is(kScratchRegister));
1461 ASSERT(!src2.is(rax));
1462 ASSERT(!src2.is(rdx));
1463 ASSERT(!src1.is(rdx));
1464 ASSERT(!src1.is(src2));
1465
1466 testq(src2, src2);
1467 j(zero, on_not_smi_result);
1468
1469 if (src1.is(rax)) {
1470 movq(kScratchRegister, src1);
1471 }
1472 SmiToInteger32(rax, src1);
1473 SmiToInteger32(src2, src2);
1474
1475 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1476 NearLabel safe_div;
1477 cmpl(rax, Immediate(Smi::kMinValue));
1478 j(not_equal, &safe_div);
1479 cmpl(src2, Immediate(-1));
1480 j(not_equal, &safe_div);
1481 // Retag inputs and go slow case.
1482 Integer32ToSmi(src2, src2);
1483 if (src1.is(rax)) {
1484 movq(src1, kScratchRegister);
1485 }
1486 jmp(on_not_smi_result);
1487 bind(&safe_div);
1488
1489 // Sign extend eax into edx:eax.
1490 cdq();
1491 idivl(src2);
1492 // Restore smi tags on inputs.
1493 Integer32ToSmi(src2, src2);
1494 if (src1.is(rax)) {
1495 movq(src1, kScratchRegister);
1496 }
1497 // Check for a negative zero result. If the result is zero, and the
1498 // dividend is negative, go slow to return a floating point negative zero.
1499 NearLabel smi_result;
1500 testl(rdx, rdx);
1501 j(not_zero, &smi_result);
1502 testq(src1, src1);
1503 j(negative, on_not_smi_result);
1504 bind(&smi_result);
1505 Integer32ToSmi(dst, rdx);
1506}
1507
1508
1509template <typename LabelType>
1510void MacroAssembler::SmiShiftLogicalRightConstant(
1511 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1512 // Logic right shift interprets its result as an *unsigned* number.
1513 if (dst.is(src)) {
1514 UNIMPLEMENTED(); // Not used.
1515 } else {
1516 movq(dst, src);
1517 if (shift_value == 0) {
1518 testq(dst, dst);
1519 j(negative, on_not_smi_result);
1520 }
1521 shr(dst, Immediate(shift_value + kSmiShift));
1522 shl(dst, Immediate(kSmiShift));
1523 }
1524}
1525
1526
1527template <typename LabelType>
1528void MacroAssembler::SmiShiftLogicalRight(Register dst,
1529 Register src1,
1530 Register src2,
1531 LabelType* on_not_smi_result) {
1532 ASSERT(!dst.is(kScratchRegister));
1533 ASSERT(!src1.is(kScratchRegister));
1534 ASSERT(!src2.is(kScratchRegister));
1535 ASSERT(!dst.is(rcx));
Steve Block1e0659c2011-05-24 12:43:12 +01001536 // dst and src1 can be the same, because the one case that bails out
1537 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001538 NearLabel result_ok;
1539 if (src1.is(rcx) || src2.is(rcx)) {
1540 movq(kScratchRegister, rcx);
1541 }
1542 if (!dst.is(src1)) {
1543 movq(dst, src1);
1544 }
1545 SmiToInteger32(rcx, src2);
1546 orl(rcx, Immediate(kSmiShift));
1547 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1548 shl(dst, Immediate(kSmiShift));
1549 testq(dst, dst);
1550 if (src1.is(rcx) || src2.is(rcx)) {
1551 NearLabel positive_result;
1552 j(positive, &positive_result);
1553 if (src1.is(rcx)) {
1554 movq(src1, kScratchRegister);
1555 } else {
1556 movq(src2, kScratchRegister);
1557 }
1558 jmp(on_not_smi_result);
1559 bind(&positive_result);
1560 } else {
1561 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1562 }
1563}
1564
1565
1566template <typename LabelType>
1567void MacroAssembler::SelectNonSmi(Register dst,
1568 Register src1,
1569 Register src2,
1570 LabelType* on_not_smis) {
1571 ASSERT(!dst.is(kScratchRegister));
1572 ASSERT(!src1.is(kScratchRegister));
1573 ASSERT(!src2.is(kScratchRegister));
1574 ASSERT(!dst.is(src1));
1575 ASSERT(!dst.is(src2));
1576 // Both operands must not be smis.
1577#ifdef DEBUG
1578 if (allow_stub_calls()) { // Check contains a stub call.
1579 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1580 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1581 }
1582#endif
1583 ASSERT_EQ(0, kSmiTag);
1584 ASSERT_EQ(0, Smi::FromInt(0));
1585 movl(kScratchRegister, Immediate(kSmiTagMask));
1586 and_(kScratchRegister, src1);
1587 testl(kScratchRegister, src2);
1588 // If non-zero then both are smis.
1589 j(not_zero, on_not_smis);
1590
1591 // Exactly one operand is a smi.
1592 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1593 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1594 subq(kScratchRegister, Immediate(1));
1595 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1596 movq(dst, src1);
1597 xor_(dst, src2);
1598 and_(dst, kScratchRegister);
1599 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1600 xor_(dst, src1);
1601 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1602}
1603
1604
1605template <typename LabelType>
1606void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1607 ASSERT_EQ(0, kSmiTag);
1608 Condition smi = CheckSmi(src);
1609 j(smi, on_smi);
1610}
1611
1612
1613template <typename LabelType>
1614void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1615 Condition smi = CheckSmi(src);
1616 j(NegateCondition(smi), on_not_smi);
1617}
1618
1619
1620template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001621void MacroAssembler::JumpUnlessNonNegativeSmi(
1622 Register src, LabelType* on_not_smi_or_negative) {
1623 Condition non_negative_smi = CheckNonNegativeSmi(src);
1624 j(NegateCondition(non_negative_smi), on_not_smi_or_negative);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001625}
1626
1627
1628template <typename LabelType>
1629void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1630 Smi* constant,
1631 LabelType* on_equals) {
1632 SmiCompare(src, constant);
1633 j(equal, on_equals);
1634}
1635
1636
1637template <typename LabelType>
1638void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1639 LabelType* on_invalid) {
1640 Condition is_valid = CheckInteger32ValidSmiValue(src);
1641 j(NegateCondition(is_valid), on_invalid);
1642}
1643
1644
1645template <typename LabelType>
1646void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1647 LabelType* on_invalid) {
1648 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1649 j(NegateCondition(is_valid), on_invalid);
1650}
1651
1652
1653template <typename LabelType>
1654void MacroAssembler::JumpIfNotBothSmi(Register src1,
1655 Register src2,
1656 LabelType* on_not_both_smi) {
1657 Condition both_smi = CheckBothSmi(src1, src2);
1658 j(NegateCondition(both_smi), on_not_both_smi);
1659}
1660
1661
1662template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001663void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1664 Register src2,
1665 LabelType* on_not_both_smi) {
1666 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001667 j(NegateCondition(both_smi), on_not_both_smi);
1668}
1669
1670
1671template <typename LabelType>
Steve Block1e0659c2011-05-24 12:43:12 +01001672void MacroAssembler::JumpIfNotString(Register object,
1673 Register object_map,
1674 LabelType* not_string) {
1675 Condition is_smi = CheckSmi(object);
1676 j(is_smi, not_string);
1677 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
1678 j(above_equal, not_string);
1679}
1680
1681
1682template <typename LabelType>
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001683void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1684 Register second_object,
1685 Register scratch1,
1686 Register scratch2,
1687 LabelType* on_fail) {
1688 // Check that both objects are not smis.
1689 Condition either_smi = CheckEitherSmi(first_object, second_object);
1690 j(either_smi, on_fail);
1691
1692 // Load instance type for both strings.
1693 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1694 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1695 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1696 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1697
1698 // Check that both are flat ascii strings.
1699 ASSERT(kNotStringTag != 0);
1700 const int kFlatAsciiStringMask =
1701 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1702 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1703
1704 andl(scratch1, Immediate(kFlatAsciiStringMask));
1705 andl(scratch2, Immediate(kFlatAsciiStringMask));
1706 // Interleave the bits to check both scratch1 and scratch2 in one test.
1707 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1708 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1709 cmpl(scratch1,
1710 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1711 j(not_equal, on_fail);
1712}
1713
1714
1715template <typename LabelType>
1716void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1717 Register instance_type,
1718 Register scratch,
1719 LabelType *failure) {
1720 if (!scratch.is(instance_type)) {
1721 movl(scratch, instance_type);
1722 }
1723
1724 const int kFlatAsciiStringMask =
1725 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1726
1727 andl(scratch, Immediate(kFlatAsciiStringMask));
1728 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1729 j(not_equal, failure);
1730}
1731
1732
1733template <typename LabelType>
1734void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1735 Register first_object_instance_type,
1736 Register second_object_instance_type,
1737 Register scratch1,
1738 Register scratch2,
1739 LabelType* on_fail) {
1740 // Load instance type for both strings.
1741 movq(scratch1, first_object_instance_type);
1742 movq(scratch2, second_object_instance_type);
1743
1744 // Check that both are flat ascii strings.
1745 ASSERT(kNotStringTag != 0);
1746 const int kFlatAsciiStringMask =
1747 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1748 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1749
1750 andl(scratch1, Immediate(kFlatAsciiStringMask));
1751 andl(scratch2, Immediate(kFlatAsciiStringMask));
1752 // Interleave the bits to check both scratch1 and scratch2 in one test.
1753 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1754 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1755 cmpl(scratch1,
1756 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1757 j(not_equal, on_fail);
1758}
1759
1760
1761template <typename LabelType>
1762void MacroAssembler::InNewSpace(Register object,
1763 Register scratch,
1764 Condition cc,
1765 LabelType* branch) {
1766 if (Serializer::enabled()) {
1767 // Can't do arithmetic on external references if it might get serialized.
1768 // The mask isn't really an address. We load it as an external reference in
1769 // case the size of the new space is different between the snapshot maker
1770 // and the running system.
1771 if (scratch.is(object)) {
1772 movq(kScratchRegister, ExternalReference::new_space_mask());
1773 and_(scratch, kScratchRegister);
1774 } else {
1775 movq(scratch, ExternalReference::new_space_mask());
1776 and_(scratch, object);
1777 }
1778 movq(kScratchRegister, ExternalReference::new_space_start());
1779 cmpq(scratch, kScratchRegister);
1780 j(cc, branch);
1781 } else {
1782 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
1783 intptr_t new_space_start =
1784 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
1785 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1786 if (scratch.is(object)) {
1787 addq(scratch, kScratchRegister);
1788 } else {
1789 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1790 }
1791 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
1792 j(cc, branch);
1793 }
1794}
1795
1796
1797template <typename LabelType>
1798void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1799 const ParameterCount& actual,
1800 Handle<Code> code_constant,
1801 Register code_register,
1802 LabelType* done,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001803 InvokeFlag flag,
1804 PostCallGenerator* post_call_generator) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001805 bool definitely_matches = false;
1806 NearLabel invoke;
1807 if (expected.is_immediate()) {
1808 ASSERT(actual.is_immediate());
1809 if (expected.immediate() == actual.immediate()) {
1810 definitely_matches = true;
1811 } else {
1812 Set(rax, actual.immediate());
1813 if (expected.immediate() ==
1814 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1815 // Don't worry about adapting arguments for built-ins that
1816 // don't want that done. Skip adaption code by making it look
1817 // like we have a match between expected and actual number of
1818 // arguments.
1819 definitely_matches = true;
1820 } else {
1821 Set(rbx, expected.immediate());
1822 }
1823 }
1824 } else {
1825 if (actual.is_immediate()) {
1826 // Expected is in register, actual is immediate. This is the
1827 // case when we invoke function values without going through the
1828 // IC mechanism.
1829 cmpq(expected.reg(), Immediate(actual.immediate()));
1830 j(equal, &invoke);
1831 ASSERT(expected.reg().is(rbx));
1832 Set(rax, actual.immediate());
1833 } else if (!expected.reg().is(actual.reg())) {
1834 // Both expected and actual are in (different) registers. This
1835 // is the case when we invoke functions using call and apply.
1836 cmpq(expected.reg(), actual.reg());
1837 j(equal, &invoke);
1838 ASSERT(actual.reg().is(rax));
1839 ASSERT(expected.reg().is(rbx));
1840 }
1841 }
1842
1843 if (!definitely_matches) {
1844 Handle<Code> adaptor =
1845 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1846 if (!code_constant.is_null()) {
1847 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1848 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1849 } else if (!code_register.is(rdx)) {
1850 movq(rdx, code_register);
1851 }
1852
1853 if (flag == CALL_FUNCTION) {
1854 Call(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001855 if (post_call_generator != NULL) post_call_generator->Generate();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001856 jmp(done);
1857 } else {
1858 Jump(adaptor, RelocInfo::CODE_TARGET);
1859 }
1860 bind(&invoke);
1861 }
1862}
1863
Steve Blocka7e24c12009-10-30 11:49:00 +00001864
1865} } // namespace v8::internal
1866
1867#endif // V8_X64_MACRO_ASSEMBLER_X64_H_