blob: 1ee0fe0204f13cad8eb8627f1418240a3a9f00c1 [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Steve Block8defd9f2010-07-08 12:39:36 +010050static const Register kScratchRegister = { 10 }; // r10.
Steve Block44f0eee2011-05-26 01:26:41 +010051static const Register kSmiConstantRegister = { 12 }; // r12 (callee save).
Steve Block8defd9f2010-07-08 12:39:36 +010052static const Register kRootRegister = { 13 }; // r13 (callee save).
53// Value of smi in kSmiConstantRegister.
54static const int kSmiConstantRegisterValue = 1;
Ben Murdoche0cee9b2011-05-25 10:26:03 +010055// Actual value of root register is offset from the root array's start
56// to take advantage of negitive 8-bit displacement values.
57static const int kRootRegisterBias = 128;
Steve Blocka7e24c12009-10-30 11:49:00 +000058
Leon Clarkee46be812010-01-19 14:06:41 +000059// Convenience for platform-independent signatures.
60typedef Operand MemOperand;
61
Steve Blocka7e24c12009-10-30 11:49:00 +000062// Forward declaration.
63class JumpTarget;
Steve Block44f0eee2011-05-26 01:26:41 +010064class CallWrapper;
Steve Blocka7e24c12009-10-30 11:49:00 +000065
66struct SmiIndex {
67 SmiIndex(Register index_register, ScaleFactor scale)
68 : reg(index_register),
69 scale(scale) {}
70 Register reg;
71 ScaleFactor scale;
72};
73
74// MacroAssembler implements a collection of frequently used macros.
75class MacroAssembler: public Assembler {
76 public:
77 MacroAssembler(void* buffer, int size);
78
Steve Block44f0eee2011-05-26 01:26:41 +010079 // Prevent the use of the RootArray during the lifetime of this
80 // scope object.
81 class NoRootArrayScope BASE_EMBEDDED {
82 public:
83 explicit NoRootArrayScope(MacroAssembler* assembler)
84 : variable_(&assembler->root_array_available_),
85 old_value_(assembler->root_array_available_) {
86 assembler->root_array_available_ = false;
87 }
88 ~NoRootArrayScope() {
89 *variable_ = old_value_;
90 }
91 private:
92 bool* variable_;
93 bool old_value_;
94 };
95
96 // Operand pointing to an external reference.
97 // May emit code to set up the scratch register. The operand is
98 // only guaranteed to be correct as long as the scratch register
99 // isn't changed.
100 // If the operand is used more than once, use a scratch register
101 // that is guaranteed not to be clobbered.
102 Operand ExternalOperand(ExternalReference reference,
103 Register scratch = kScratchRegister);
104 // Loads and stores the value of an external reference.
105 // Special case code for load and store to take advantage of
106 // load_rax/store_rax if possible/necessary.
107 // For other operations, just use:
108 // Operand operand = ExternalOperand(extref);
109 // operation(operand, ..);
110 void Load(Register destination, ExternalReference source);
111 void Store(ExternalReference destination, Register source);
112 // Loads the address of the external reference into the destination
113 // register.
114 void LoadAddress(Register destination, ExternalReference source);
115 // Returns the size of the code generated by LoadAddress.
116 // Used by CallSize(ExternalReference) to find the size of a call.
117 int LoadAddressSize(ExternalReference source);
118
119 // Operations on roots in the root-array.
Steve Blocka7e24c12009-10-30 11:49:00 +0000120 void LoadRoot(Register destination, Heap::RootListIndex index);
Steve Block44f0eee2011-05-26 01:26:41 +0100121 void StoreRoot(Register source, Heap::RootListIndex index);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100122 // Load a root value where the index (or part of it) is variable.
123 // The variable_offset register is added to the fixed_offset value
124 // to get the index into the root-array.
125 void LoadRootIndexed(Register destination,
126 Register variable_offset,
127 int fixed_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 void CompareRoot(Register with, Heap::RootListIndex index);
Steve Block1e0659c2011-05-24 12:43:12 +0100129 void CompareRoot(const Operand& with, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 void PushRoot(Heap::RootListIndex index);
131
132 // ---------------------------------------------------------------------------
133 // GC Support
134
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100135 // For page containing |object| mark region covering |addr| dirty.
136 // RecordWriteHelper only works if the object is not in new
Steve Block6ded16b2010-05-10 14:33:55 +0100137 // space.
138 void RecordWriteHelper(Register object,
139 Register addr,
140 Register scratch);
141
142 // Check if object is in new space. The condition cc can be equal or
143 // not_equal. If it is equal a jump will be done if the object is on new
144 // space. The register scratch can be object itself, but it will be clobbered.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100145 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100146 void InNewSpace(Register object,
147 Register scratch,
148 Condition cc,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100149 LabelType* branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100150
Steve Block8defd9f2010-07-08 12:39:36 +0100151 // For page containing |object| mark region covering [object+offset]
152 // dirty. |object| is the object being stored into, |value| is the
153 // object being stored. If |offset| is zero, then the |scratch|
154 // register contains the array index into the elements array
Ben Murdochf87a2032010-10-22 12:50:53 +0100155 // represented as an untagged 32-bit integer. All registers are
156 // clobbered by the operation. RecordWrite filters out smis so it
157 // does not update the write barrier if the value is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000158 void RecordWrite(Register object,
159 int offset,
160 Register value,
161 Register scratch);
162
Steve Block8defd9f2010-07-08 12:39:36 +0100163 // For page containing |object| mark region covering [address]
164 // dirty. |object| is the object being stored into, |value| is the
165 // object being stored. All registers are clobbered by the
166 // operation. RecordWrite filters out smis so it does not update
167 // the write barrier if the value is a smi.
168 void RecordWrite(Register object,
169 Register address,
170 Register value);
171
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100172 // For page containing |object| mark region covering [object+offset] dirty.
Steve Block3ce2e202009-11-05 08:53:23 +0000173 // The value is known to not be a smi.
174 // object is the object being stored into, value is the object being stored.
175 // If offset is zero, then the scratch register contains the array index into
Ben Murdochf87a2032010-10-22 12:50:53 +0100176 // the elements array represented as an untagged 32-bit integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000177 // All registers are clobbered by the operation.
178 void RecordWriteNonSmi(Register object,
179 int offset,
180 Register value,
181 Register scratch);
182
Steve Blocka7e24c12009-10-30 11:49:00 +0000183#ifdef ENABLE_DEBUGGER_SUPPORT
184 // ---------------------------------------------------------------------------
185 // Debugger Support
186
Andrei Popescu402d9372010-02-26 13:31:12 +0000187 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000188#endif
189
190 // ---------------------------------------------------------------------------
191 // Activation frames
192
193 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
194 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
195
196 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
197 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
198
Steve Blockd0582a62009-12-15 09:54:21 +0000199 // Enter specific kind of exit frame; either in normal or
200 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 // sets up the number of arguments in register rdi and the pointer
202 // to the first argument in register rsi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800203 //
204 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
205 // accessible via StackSpaceOperand.
Steve Block1e0659c2011-05-24 12:43:12 +0100206 void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000207
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800208 // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
209 // memory (not GCed) on the stack accessible via StackSpaceOperand.
210 void EnterApiExitFrame(int arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +0100211
Steve Blocka7e24c12009-10-30 11:49:00 +0000212 // Leave the current exit frame. Expects/provides the return value in
213 // register rax:rdx (untouched) and the pointer to the first
214 // argument in register rsi.
Steve Block1e0659c2011-05-24 12:43:12 +0100215 void LeaveExitFrame(bool save_doubles = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000216
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800217 // Leave the current exit frame. Expects/provides the return value in
218 // register rax (untouched).
219 void LeaveApiExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000220
Ben Murdochb0fe1622011-05-05 13:52:32 +0100221 // Push and pop the registers that can hold pointers.
Steve Block1e0659c2011-05-24 12:43:12 +0100222 void PushSafepointRegisters() { Pushad(); }
223 void PopSafepointRegisters() { Popad(); }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100224 // Store the value in register src in the safepoint register stack
225 // slot for register dst.
226 void StoreToSafepointRegisterSlot(Register dst, Register src);
227 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100228
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100229 void InitializeRootRegister() {
Steve Block44f0eee2011-05-26 01:26:41 +0100230 ExternalReference roots_address =
231 ExternalReference::roots_address(isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100232 movq(kRootRegister, roots_address);
233 addq(kRootRegister, Immediate(kRootRegisterBias));
234 }
Steve Block1e0659c2011-05-24 12:43:12 +0100235
Steve Blocka7e24c12009-10-30 11:49:00 +0000236 // ---------------------------------------------------------------------------
237 // JavaScript invokes
238
239 // Invoke the JavaScript function code by either calling or jumping.
240 void InvokeCode(Register code,
241 const ParameterCount& expected,
242 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100243 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100244 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000245
246 void InvokeCode(Handle<Code> code,
247 const ParameterCount& expected,
248 const ParameterCount& actual,
249 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100250 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100251 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000252
253 // Invoke the JavaScript function in the given register. Changes the
254 // current context to the context in the function before invoking.
255 void InvokeFunction(Register function,
256 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100257 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100258 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000259
Andrei Popescu402d9372010-02-26 13:31:12 +0000260 void InvokeFunction(JSFunction* function,
261 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100262 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100263 CallWrapper* call_wrapper = NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +0000264
Steve Blocka7e24c12009-10-30 11:49:00 +0000265 // Invoke specified builtin JavaScript function. Adds an entry to
266 // the unresolved list if the name does not resolve.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100267 void InvokeBuiltin(Builtins::JavaScript id,
268 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100269 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000270
Steve Block791712a2010-08-27 10:21:07 +0100271 // Store the function for the given builtin in the target register.
272 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
273
Steve Blocka7e24c12009-10-30 11:49:00 +0000274 // Store the code object for the given builtin in the target register.
275 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
276
277
278 // ---------------------------------------------------------------------------
279 // Smi tagging, untagging and operations on tagged smis.
280
Steve Block8defd9f2010-07-08 12:39:36 +0100281 void InitializeSmiConstantRegister() {
282 movq(kSmiConstantRegister,
283 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
284 RelocInfo::NONE);
285 }
286
Steve Blocka7e24c12009-10-30 11:49:00 +0000287 // Conversions between tagged smi values and non-tagged integer values.
288
289 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000290 // Only uses the low 32 bits of the src register. Sets the N and Z flags
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100291 // based on the value of the resulting smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 void Integer32ToSmi(Register dst, Register src);
293
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100294 // Stores an integer32 value into a memory field that already holds a smi.
295 void Integer32ToSmiField(const Operand& dst, Register src);
296
Steve Blocka7e24c12009-10-30 11:49:00 +0000297 // Adds constant to src and tags the result as a smi.
298 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000299 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000300
301 // Convert smi to 32-bit integer. I.e., not sign extended into
302 // high 32 bits of destination.
303 void SmiToInteger32(Register dst, Register src);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100304 void SmiToInteger32(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000305
306 // Convert smi to 64-bit integer (sign extended if necessary).
307 void SmiToInteger64(Register dst, Register src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100308 void SmiToInteger64(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000309
310 // Multiply a positive smi's integer value by a power of two.
311 // Provides result as 64-bit integer value.
312 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
313 Register src,
314 int power);
315
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100316 // Divide a positive smi's integer value by a power of two.
317 // Provides result as 32-bit integer value.
318 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
319 Register src,
320 int power);
321
322
Steve Block44f0eee2011-05-26 01:26:41 +0100323 // Simple comparison of smis. Both sides must be known smis to use these,
324 // otherwise use Cmp.
325 void SmiCompare(Register smi1, Register smi2);
Steve Block3ce2e202009-11-05 08:53:23 +0000326 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100327 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000328 void SmiCompare(const Operand& dst, Register src);
329 void SmiCompare(const Operand& dst, Smi* src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100330 // Compare the int32 in src register to the value of the smi stored at dst.
331 void SmiCompareInteger32(const Operand& dst, Register src);
Steve Block3ce2e202009-11-05 08:53:23 +0000332 // Sets sign and zero flags depending on value of smi in register.
333 void SmiTest(Register src);
334
Steve Blocka7e24c12009-10-30 11:49:00 +0000335 // Functions performing a check on a known or potential smi. Returns
336 // a condition that is satisfied if the check is successful.
337
338 // Is the value a tagged smi.
339 Condition CheckSmi(Register src);
Steve Block1e0659c2011-05-24 12:43:12 +0100340 Condition CheckSmi(const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000341
Ben Murdochf87a2032010-10-22 12:50:53 +0100342 // Is the value a non-negative tagged smi.
343 Condition CheckNonNegativeSmi(Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000344
Leon Clarkee46be812010-01-19 14:06:41 +0000345 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000346 Condition CheckBothSmi(Register first, Register second);
347
Ben Murdochf87a2032010-10-22 12:50:53 +0100348 // Are both values non-negative tagged smis.
349 Condition CheckBothNonNegativeSmi(Register first, Register second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000350
Leon Clarkee46be812010-01-19 14:06:41 +0000351 // Are either value a tagged smi.
Ben Murdochbb769b22010-08-11 14:56:33 +0100352 Condition CheckEitherSmi(Register first,
353 Register second,
354 Register scratch = kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +0000355
Steve Blocka7e24c12009-10-30 11:49:00 +0000356 // Is the value the minimum smi value (since we are using
357 // two's complement numbers, negating the value is known to yield
358 // a non-smi value).
359 Condition CheckIsMinSmi(Register src);
360
Steve Blocka7e24c12009-10-30 11:49:00 +0000361 // Checks whether an 32-bit integer value is a valid for conversion
362 // to a smi.
363 Condition CheckInteger32ValidSmiValue(Register src);
364
Steve Block3ce2e202009-11-05 08:53:23 +0000365 // Checks whether an 32-bit unsigned integer value is a valid for
366 // conversion to a smi.
367 Condition CheckUInteger32ValidSmiValue(Register src);
368
Steve Block1e0659c2011-05-24 12:43:12 +0100369 // Check whether src is a Smi, and set dst to zero if it is a smi,
370 // and to one if it isn't.
371 void CheckSmiToIndicator(Register dst, Register src);
372 void CheckSmiToIndicator(Register dst, const Operand& src);
373
Steve Blocka7e24c12009-10-30 11:49:00 +0000374 // Test-and-jump functions. Typically combines a check function
375 // above with a conditional jump.
376
377 // Jump if the value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100378 template <typename LabelType>
379 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Blocka7e24c12009-10-30 11:49:00 +0000380
Steve Block3ce2e202009-11-05 08:53:23 +0000381 // Jump if the unsigned integer value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100382 template <typename LabelType>
383 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Block3ce2e202009-11-05 08:53:23 +0000384
Steve Blocka7e24c12009-10-30 11:49:00 +0000385 // Jump to label if the value is a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100386 template <typename LabelType>
387 void JumpIfSmi(Register src, LabelType* on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000388
389 // Jump to label if the value is not a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100390 template <typename LabelType>
391 void JumpIfNotSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000392
Ben Murdochf87a2032010-10-22 12:50:53 +0100393 // Jump to label if the value is not a non-negative tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100394 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100395 void JumpUnlessNonNegativeSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000396
Steve Block3ce2e202009-11-05 08:53:23 +0000397 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000398 // to the constant.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100399 template <typename LabelType>
400 void JumpIfSmiEqualsConstant(Register src,
401 Smi* constant,
402 LabelType* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000403
404 // Jump if either or both register are not smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100405 template <typename LabelType>
406 void JumpIfNotBothSmi(Register src1,
407 Register src2,
408 LabelType* on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000409
Ben Murdochf87a2032010-10-22 12:50:53 +0100410 // Jump if either or both register are not non-negative smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100411 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100412 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2,
413 LabelType* on_not_both_smi);
Leon Clarked91b9f72010-01-27 17:25:45 +0000414
Steve Blocka7e24c12009-10-30 11:49:00 +0000415 // Operations on tagged smi values.
416
417 // Smis represent a subset of integers. The subset is always equivalent to
418 // a two's complement interpretation of a fixed number of bits.
419
420 // Optimistically adds an integer constant to a supposed smi.
421 // If the src is not a smi, or the result is not a smi, jump to
422 // the label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100423 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000424 void SmiTryAddConstant(Register dst,
425 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000426 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100427 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000428
Steve Block3ce2e202009-11-05 08:53:23 +0000429 // Add an integer constant to a tagged smi, giving a tagged smi as result.
430 // No overflow testing on the result is done.
431 void SmiAddConstant(Register dst, Register src, Smi* constant);
432
Leon Clarkef7060e22010-06-03 12:02:55 +0100433 // Add an integer constant to a tagged smi, giving a tagged smi as result.
434 // No overflow testing on the result is done.
435 void SmiAddConstant(const Operand& dst, Smi* constant);
436
Steve Blocka7e24c12009-10-30 11:49:00 +0000437 // Add an integer constant to a tagged smi, giving a tagged smi as result,
438 // or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100439 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000440 void SmiAddConstant(Register dst,
441 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000442 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100443 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000444
445 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100446 // result. No testing on the result is done. Sets the N and Z flags
447 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000448 void SmiSubConstant(Register dst, Register src, Smi* constant);
449
450 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000451 // result, or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100452 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000453 void SmiSubConstant(Register dst,
454 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000455 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100456 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000457
458 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000459 // NOTICE: This operation jumps on success, not failure!
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100460 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000461 void SmiNeg(Register dst,
462 Register src,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100463 LabelType* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000464
465 // Adds smi values and return the result as a smi.
466 // If dst is src1, then src1 will be destroyed, even if
467 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100468 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000469 void SmiAdd(Register dst,
470 Register src1,
471 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100472 LabelType* on_not_smi_result);
Steve Block44f0eee2011-05-26 01:26:41 +0100473 template <typename LabelType>
474 void SmiAdd(Register dst,
475 Register src1,
476 const Operand& src2,
477 LabelType* on_not_smi_result);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100478
479 void SmiAdd(Register dst,
480 Register src1,
481 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000482
483 // Subtracts smi values and return the result as a smi.
484 // If dst is src1, then src1 will be destroyed, even if
485 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100486 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000487 void SmiSub(Register dst,
488 Register src1,
489 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100490 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000491
Steve Block6ded16b2010-05-10 14:33:55 +0100492 void SmiSub(Register dst,
493 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100494 Register src2);
495
496 template <typename LabelType>
497 void SmiSub(Register dst,
498 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100499 const Operand& src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100500 LabelType* on_not_smi_result);
501
502 void SmiSub(Register dst,
503 Register src1,
504 const Operand& src2);
Steve Block6ded16b2010-05-10 14:33:55 +0100505
Steve Blocka7e24c12009-10-30 11:49:00 +0000506 // Multiplies smi values and return the result as a smi,
507 // if possible.
508 // If dst is src1, then src1 will be destroyed, even if
509 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100510 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000511 void SmiMul(Register dst,
512 Register src1,
513 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100514 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000515
516 // Divides one smi by another and returns the quotient.
517 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100518 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000519 void SmiDiv(Register dst,
520 Register src1,
521 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100522 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000523
524 // Divides one smi by another and returns the remainder.
525 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100526 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000527 void SmiMod(Register dst,
528 Register src1,
529 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100530 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000531
532 // Bitwise operations.
533 void SmiNot(Register dst, Register src);
534 void SmiAnd(Register dst, Register src1, Register src2);
535 void SmiOr(Register dst, Register src1, Register src2);
536 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000537 void SmiAndConstant(Register dst, Register src1, Smi* constant);
538 void SmiOrConstant(Register dst, Register src1, Smi* constant);
539 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000540
541 void SmiShiftLeftConstant(Register dst,
542 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100543 int shift_value);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100544 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000545 void SmiShiftLogicalRightConstant(Register dst,
546 Register src,
547 int shift_value,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100548 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000549 void SmiShiftArithmeticRightConstant(Register dst,
550 Register src,
551 int shift_value);
552
553 // Shifts a smi value to the left, and returns the result if that is a smi.
554 // Uses and clobbers rcx, so dst may not be rcx.
555 void SmiShiftLeft(Register dst,
556 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100557 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000558 // Shifts a smi value to the right, shifting in zero bits at the top, and
559 // returns the unsigned intepretation of the result if that is a smi.
560 // Uses and clobbers rcx, so dst may not be rcx.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100561 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000562 void SmiShiftLogicalRight(Register dst,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100563 Register src1,
564 Register src2,
565 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566 // Shifts a smi value to the right, sign extending the top, and
567 // returns the signed intepretation of the result. That will always
568 // be a valid smi value, since it's numerically smaller than the
569 // original.
570 // Uses and clobbers rcx, so dst may not be rcx.
571 void SmiShiftArithmeticRight(Register dst,
572 Register src1,
573 Register src2);
574
575 // Specialized operations
576
577 // Select the non-smi register of two registers where exactly one is a
578 // smi. If neither are smis, jump to the failure label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100579 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000580 void SelectNonSmi(Register dst,
581 Register src1,
582 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100583 LabelType* on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +0000584
585 // Converts, if necessary, a smi to a combination of number and
586 // multiplier to be used as a scaled index.
587 // The src register contains a *positive* smi value. The shift is the
588 // power of two to multiply the index value by (e.g.
589 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
590 // The returned index register may be either src or dst, depending
591 // on what is most efficient. If src and dst are different registers,
592 // src is always unchanged.
593 SmiIndex SmiToIndex(Register dst, Register src, int shift);
594
595 // Converts a positive smi to a negative index.
596 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
597
Steve Block44f0eee2011-05-26 01:26:41 +0100598 // Add the value of a smi in memory to an int32 register.
599 // Sets flags as a normal add.
600 void AddSmiField(Register dst, const Operand& src);
601
Steve Block3ce2e202009-11-05 08:53:23 +0000602 // Basic Smi operations.
603 void Move(Register dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100604 LoadSmiConstant(dst, source);
Steve Block3ce2e202009-11-05 08:53:23 +0000605 }
606
607 void Move(const Operand& dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100608 Register constant = GetSmiConstant(source);
609 movq(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000610 }
611
612 void Push(Smi* smi);
613 void Test(const Operand& dst, Smi* source);
614
Steve Blocka7e24c12009-10-30 11:49:00 +0000615 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000616 // String macros.
Steve Block1e0659c2011-05-24 12:43:12 +0100617
618 // If object is a string, its map is loaded into object_map.
619 template <typename LabelType>
620 void JumpIfNotString(Register object,
621 Register object_map,
622 LabelType* not_string);
623
624
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100625 template <typename LabelType>
Leon Clarkee46be812010-01-19 14:06:41 +0000626 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
627 Register second_object,
628 Register scratch1,
629 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100630 LabelType* on_not_both_flat_ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000631
Steve Block6ded16b2010-05-10 14:33:55 +0100632 // Check whether the instance type represents a flat ascii string. Jump to the
633 // label if not. If the instance type can be scratched specify same register
634 // for both instance type and scratch.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100635 template <typename LabelType>
636 void JumpIfInstanceTypeIsNotSequentialAscii(
637 Register instance_type,
638 Register scratch,
639 LabelType *on_not_flat_ascii_string);
Steve Block6ded16b2010-05-10 14:33:55 +0100640
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100641 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100642 void JumpIfBothInstanceTypesAreNotSequentialAscii(
643 Register first_object_instance_type,
644 Register second_object_instance_type,
645 Register scratch1,
646 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100647 LabelType* on_fail);
Steve Block6ded16b2010-05-10 14:33:55 +0100648
Leon Clarkee46be812010-01-19 14:06:41 +0000649 // ---------------------------------------------------------------------------
650 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000651
Steve Block3ce2e202009-11-05 08:53:23 +0000652 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000653 void Set(Register dst, int64_t x);
654 void Set(const Operand& dst, int64_t x);
655
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100656 // Move if the registers are not identical.
657 void Move(Register target, Register source);
658
Steve Blocka7e24c12009-10-30 11:49:00 +0000659 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000660 void Move(Register dst, Handle<Object> source);
661 void Move(const Operand& dst, Handle<Object> source);
662 void Cmp(Register dst, Handle<Object> source);
663 void Cmp(const Operand& dst, Handle<Object> source);
Steve Block44f0eee2011-05-26 01:26:41 +0100664 void Cmp(Register dst, Smi* src);
665 void Cmp(const Operand& dst, Smi* src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000666 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000667
Leon Clarkee46be812010-01-19 14:06:41 +0000668 // Emit code to discard a non-negative number of pointer-sized elements
669 // from the stack, clobbering only the rsp register.
670 void Drop(int stack_elements);
671
672 void Call(Label* target) { call(target); }
673
Steve Blocka7e24c12009-10-30 11:49:00 +0000674 // Control Flow
675 void Jump(Address destination, RelocInfo::Mode rmode);
676 void Jump(ExternalReference ext);
677 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
678
679 void Call(Address destination, RelocInfo::Mode rmode);
680 void Call(ExternalReference ext);
681 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
682
Steve Block44f0eee2011-05-26 01:26:41 +0100683 // The size of the code generated for different call instructions.
684 int CallSize(Address destination, RelocInfo::Mode rmode) {
685 return kCallInstructionLength;
686 }
687 int CallSize(ExternalReference ext);
688 int CallSize(Handle<Code> code_object) {
689 // Code calls use 32-bit relative addressing.
690 return kShortCallInstructionLength;
691 }
692 int CallSize(Register target) {
693 // Opcode: REX_opt FF /2 m64
694 return (target.high_bit() != 0) ? 3 : 2;
695 }
696 int CallSize(const Operand& target) {
697 // Opcode: REX_opt FF /2 m64
698 return (target.requires_rex() ? 2 : 1) + target.operand_size();
699 }
700
Steve Block1e0659c2011-05-24 12:43:12 +0100701 // Emit call to the code we are currently generating.
702 void CallSelf() {
703 Handle<Code> self(reinterpret_cast<Code**>(CodeObject().location()));
704 Call(self, RelocInfo::CODE_TARGET);
705 }
706
707 // Non-x64 instructions.
708 // Push/pop all general purpose registers.
709 // Does not push rsp/rbp nor any of the assembler's special purpose registers
710 // (kScratchRegister, kSmiConstantRegister, kRootRegister).
711 void Pushad();
712 void Popad();
713 // Sets the stack as after performing Popad, without actually loading the
714 // registers.
715 void Dropad();
716
Steve Blocka7e24c12009-10-30 11:49:00 +0000717 // Compare object type for heap object.
718 // Always use unsigned comparisons: above and below, not less and greater.
719 // Incoming register is heap_object and outgoing register is map.
720 // They may be the same register, and may be kScratchRegister.
721 void CmpObjectType(Register heap_object, InstanceType type, Register map);
722
723 // Compare instance type for map.
724 // Always use unsigned comparisons: above and below, not less and greater.
725 void CmpInstanceType(Register map, InstanceType type);
726
Andrei Popescu31002712010-02-23 13:46:05 +0000727 // Check if the map of an object is equal to a specified map and
728 // branch to label if not. Skip the smi check if not required
729 // (object is known to be a heap object)
730 void CheckMap(Register obj,
731 Handle<Map> map,
732 Label* fail,
733 bool is_heap_object);
734
Leon Clarked91b9f72010-01-27 17:25:45 +0000735 // Check if the object in register heap_object is a string. Afterwards the
736 // register map contains the object map and the register instance_type
737 // contains the instance_type. The registers map and instance_type can be the
738 // same in which case it contains the instance type afterwards. Either of the
739 // registers map and instance_type can be the same as heap_object.
740 Condition IsObjectStringType(Register heap_object,
741 Register map,
742 Register instance_type);
743
Steve Block8defd9f2010-07-08 12:39:36 +0100744 // FCmp compares and pops the two values on top of the FPU stack.
745 // The flag results are similar to integer cmp, but requires unsigned
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
747 void FCmp();
748
Andrei Popescu402d9372010-02-26 13:31:12 +0000749 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100750 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000751
Iain Merrick75681382010-08-19 15:07:18 +0100752 // Abort execution if argument is a smi. Used in debug code.
753 void AbortIfSmi(Register object);
754
Steve Block6ded16b2010-05-10 14:33:55 +0100755 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100756 void AbortIfNotSmi(Register object);
Steve Block44f0eee2011-05-26 01:26:41 +0100757 void AbortIfNotSmi(const Operand& object);
Steve Block6ded16b2010-05-10 14:33:55 +0100758
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100759 // Abort execution if argument is a string. Used in debug code.
760 void AbortIfNotString(Register object);
761
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100762 // Abort execution if argument is not the root value with the given index.
763 void AbortIfNotRootValue(Register src,
764 Heap::RootListIndex root_value_index,
765 const char* message);
766
Steve Blocka7e24c12009-10-30 11:49:00 +0000767 // ---------------------------------------------------------------------------
768 // Exception handling
769
770 // Push a new try handler and link into try handler chain. The return
771 // address must be pushed before calling this helper.
772 void PushTryHandler(CodeLocation try_location, HandlerType type);
773
Leon Clarkee46be812010-01-19 14:06:41 +0000774 // Unlink the stack handler on top of the stack from the try handler chain.
775 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000776
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100777 // Activate the top handler in the try hander chain and pass the
778 // thrown value.
779 void Throw(Register value);
780
781 // Propagate an uncatchable exception out of the current JS stack.
782 void ThrowUncatchable(UncatchableExceptionType type, Register value);
783
Steve Blocka7e24c12009-10-30 11:49:00 +0000784 // ---------------------------------------------------------------------------
785 // Inline caching support
786
Steve Blocka7e24c12009-10-30 11:49:00 +0000787 // Generate code for checking access rights - used for security checks
788 // on access to global objects across environments. The holder register
789 // is left untouched, but the scratch register and kScratchRegister,
790 // which must be different, are clobbered.
791 void CheckAccessGlobalProxy(Register holder_reg,
792 Register scratch,
793 Label* miss);
794
795
796 // ---------------------------------------------------------------------------
797 // Allocation support
798
799 // Allocate an object in new space. If the new space is exhausted control
800 // continues at the gc_required label. The allocated object is returned in
801 // result and end of the new object is returned in result_end. The register
802 // scratch can be passed as no_reg in which case an additional object
803 // reference will be added to the reloc info. The returned pointers in result
804 // and result_end have not yet been tagged as heap objects. If
805 // result_contains_top_on_entry is true the content of result is known to be
806 // the allocation top on entry (could be result_end from a previous call to
807 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
808 // should be no_reg as it is never used.
809 void AllocateInNewSpace(int object_size,
810 Register result,
811 Register result_end,
812 Register scratch,
813 Label* gc_required,
814 AllocationFlags flags);
815
816 void AllocateInNewSpace(int header_size,
817 ScaleFactor element_size,
818 Register element_count,
819 Register result,
820 Register result_end,
821 Register scratch,
822 Label* gc_required,
823 AllocationFlags flags);
824
825 void AllocateInNewSpace(Register object_size,
826 Register result,
827 Register result_end,
828 Register scratch,
829 Label* gc_required,
830 AllocationFlags flags);
831
832 // Undo allocation in new space. The object passed and objects allocated after
833 // it will no longer be allocated. Make sure that no pointers are left to the
834 // object(s) no longer allocated as they would be invalid when allocation is
835 // un-done.
836 void UndoAllocationInNewSpace(Register object);
837
Steve Block3ce2e202009-11-05 08:53:23 +0000838 // Allocate a heap number in new space with undefined value. Returns
839 // tagged pointer in result register, or jumps to gc_required if new
840 // space is full.
841 void AllocateHeapNumber(Register result,
842 Register scratch,
843 Label* gc_required);
844
Leon Clarkee46be812010-01-19 14:06:41 +0000845 // Allocate a sequential string. All the header fields of the string object
846 // are initialized.
847 void AllocateTwoByteString(Register result,
848 Register length,
849 Register scratch1,
850 Register scratch2,
851 Register scratch3,
852 Label* gc_required);
853 void AllocateAsciiString(Register result,
854 Register length,
855 Register scratch1,
856 Register scratch2,
857 Register scratch3,
858 Label* gc_required);
859
860 // Allocate a raw cons string object. Only the map field of the result is
861 // initialized.
862 void AllocateConsString(Register result,
863 Register scratch1,
864 Register scratch2,
865 Label* gc_required);
866 void AllocateAsciiConsString(Register result,
867 Register scratch1,
868 Register scratch2,
869 Label* gc_required);
870
Steve Blocka7e24c12009-10-30 11:49:00 +0000871 // ---------------------------------------------------------------------------
872 // Support functions.
873
874 // Check if result is zero and op is negative.
875 void NegativeZeroTest(Register result, Register op, Label* then_label);
876
877 // Check if result is zero and op is negative in code using jump targets.
878 void NegativeZeroTest(CodeGenerator* cgen,
879 Register result,
880 Register op,
881 JumpTarget* then_target);
882
883 // Check if result is zero and any of op1 and op2 are negative.
884 // Register scratch is destroyed, and it must be different from op2.
885 void NegativeZeroTest(Register result, Register op1, Register op2,
886 Register scratch, Label* then_label);
887
888 // Try to get function prototype of a function and puts the value in
889 // the result register. Checks that the function really is a
890 // function and jumps to the miss label if the fast checks fail. The
891 // function register will be untouched; the other register may be
892 // clobbered.
893 void TryGetFunctionPrototype(Register function,
894 Register result,
895 Label* miss);
896
897 // Generates code for reporting that an illegal operation has
898 // occurred.
899 void IllegalOperation(int num_arguments);
900
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100901 // Picks out an array index from the hash field.
902 // Register use:
903 // hash - holds the index's hash. Clobbered.
904 // index - holds the overwritten index on exit.
905 void IndexFromHash(Register hash, Register index);
906
Steve Blockd0582a62009-12-15 09:54:21 +0000907 // Find the function context up the context chain.
908 void LoadContext(Register dst, int context_chain_length);
909
Ben Murdochb0fe1622011-05-05 13:52:32 +0100910 // Load the global function with the given index.
911 void LoadGlobalFunction(int index, Register function);
912
913 // Load the initial map from the global function. The registers
914 // function and map can be the same.
915 void LoadGlobalFunctionInitialMap(Register function, Register map);
916
Steve Blocka7e24c12009-10-30 11:49:00 +0000917 // ---------------------------------------------------------------------------
918 // Runtime calls
919
920 // Call a code stub.
921 void CallStub(CodeStub* stub);
922
Ben Murdochbb769b22010-08-11 14:56:33 +0100923 // Call a code stub and return the code object called. Try to generate
924 // the code if necessary. Do not perform a GC but instead return a retry
925 // after GC failure.
John Reck59135872010-11-02 12:39:01 -0700926 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100927
Leon Clarkee46be812010-01-19 14:06:41 +0000928 // Tail call a code stub (jump).
929 void TailCallStub(CodeStub* stub);
930
Ben Murdochbb769b22010-08-11 14:56:33 +0100931 // Tail call a code stub (jump) and return the code object called. Try to
932 // generate the code if necessary. Do not perform a GC but instead return
933 // a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700934 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100935
Steve Blocka7e24c12009-10-30 11:49:00 +0000936 // Return from a code stub after popping its arguments.
937 void StubReturn(int argc);
938
939 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100940 void CallRuntime(const Runtime::Function* f, int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000941
Steve Block1e0659c2011-05-24 12:43:12 +0100942 // Call a runtime function and save the value of XMM registers.
943 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
944
Ben Murdochbb769b22010-08-11 14:56:33 +0100945 // Call a runtime function, returning the CodeStub object called.
946 // Try to generate the stub code if necessary. Do not perform a GC
947 // but instead return a retry after GC failure.
Steve Block44f0eee2011-05-26 01:26:41 +0100948 MUST_USE_RESULT MaybeObject* TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -0700949 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100950
Steve Blocka7e24c12009-10-30 11:49:00 +0000951 // Convenience function: Same as above, but takes the fid instead.
952 void CallRuntime(Runtime::FunctionId id, int num_arguments);
953
Ben Murdochbb769b22010-08-11 14:56:33 +0100954 // Convenience function: Same as above, but takes the fid instead.
John Reck59135872010-11-02 12:39:01 -0700955 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::FunctionId id,
956 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100957
Andrei Popescu402d9372010-02-26 13:31:12 +0000958 // Convenience function: call an external reference.
959 void CallExternalReference(const ExternalReference& ext,
960 int num_arguments);
961
Steve Blocka7e24c12009-10-30 11:49:00 +0000962 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100963 // Like JumpToExternalReference, but also takes care of passing the number
964 // of parameters.
965 void TailCallExternalReference(const ExternalReference& ext,
966 int num_arguments,
967 int result_size);
968
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800969 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
970 const ExternalReference& ext, int num_arguments, int result_size);
971
Steve Block6ded16b2010-05-10 14:33:55 +0100972 // Convenience function: tail call a runtime routine (jump).
973 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000974 int num_arguments,
975 int result_size);
976
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800977 MUST_USE_RESULT MaybeObject* TryTailCallRuntime(Runtime::FunctionId fid,
978 int num_arguments,
979 int result_size);
980
Steve Blocka7e24c12009-10-30 11:49:00 +0000981 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100982 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000983
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800984 // Jump to a runtime routine.
985 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext,
986 int result_size);
John Reck59135872010-11-02 12:39:01 -0700987
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800988 // Prepares stack to put arguments (aligns and so on).
989 // WIN64 calling convention requires to put the pointer to the return value
990 // slot into rcx (rcx must be preserverd until TryCallApiFunctionAndReturn).
991 // Saves context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
992 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
993 void PrepareCallApiFunction(int arg_stack_space);
994
995 // Calls an API function. Allocates HandleScope, extracts
996 // returned value from handle and propagates exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +0100997 // Clobbers r14, r15, rbx and caller-save registers. Restores context.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800998 // On return removes stack_space * kPointerSize (GCed).
999 MUST_USE_RESULT MaybeObject* TryCallApiFunctionAndReturn(
1000 ApiFunction* function, int stack_space);
John Reck59135872010-11-02 12:39:01 -07001001
Leon Clarke4515c472010-02-03 11:58:03 +00001002 // Before calling a C-function from generated code, align arguments on stack.
1003 // After aligning the frame, arguments must be stored in esp[0], esp[4],
1004 // etc., not pushed. The argument count assumes all arguments are word sized.
1005 // The number of slots reserved for arguments depends on platform. On Windows
1006 // stack slots are reserved for the arguments passed in registers. On other
1007 // platforms stack slots are only reserved for the arguments actually passed
1008 // on the stack.
1009 void PrepareCallCFunction(int num_arguments);
1010
1011 // Calls a C function and cleans up the space for arguments allocated
1012 // by PrepareCallCFunction. The called function is not allowed to trigger a
1013 // garbage collection, since that might move the code and invalidate the
1014 // return address (unless this is somehow accounted for by the called
1015 // function).
1016 void CallCFunction(ExternalReference function, int num_arguments);
1017 void CallCFunction(Register function, int num_arguments);
1018
1019 // Calculate the number of stack slots to reserve for arguments when calling a
1020 // C function.
1021 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001022
1023 // ---------------------------------------------------------------------------
1024 // Utilities
1025
1026 void Ret();
1027
Steve Block1e0659c2011-05-24 12:43:12 +01001028 // Return and drop arguments from stack, where the number of arguments
1029 // may be bigger than 2^16 - 1. Requires a scratch register.
1030 void Ret(int bytes_dropped, Register scratch);
1031
Steve Blocka7e24c12009-10-30 11:49:00 +00001032 Handle<Object> CodeObject() { return code_object_; }
1033
Steve Block44f0eee2011-05-26 01:26:41 +01001034 // Copy length bytes from source to destination.
1035 // Uses scratch register internally (if you have a low-eight register
1036 // free, do use it, otherwise kScratchRegister will be used).
1037 // The min_length is a minimum limit on the value that length will have.
1038 // The algorithm has some special cases that might be omitted if the string
1039 // is known to always be long.
1040 void CopyBytes(Register destination,
1041 Register source,
1042 Register length,
1043 int min_length = 0,
1044 Register scratch = kScratchRegister);
1045
Steve Blocka7e24c12009-10-30 11:49:00 +00001046
1047 // ---------------------------------------------------------------------------
1048 // StatsCounter support
1049
1050 void SetCounter(StatsCounter* counter, int value);
1051 void IncrementCounter(StatsCounter* counter, int value);
1052 void DecrementCounter(StatsCounter* counter, int value);
1053
1054
1055 // ---------------------------------------------------------------------------
1056 // Debugging
1057
1058 // Calls Abort(msg) if the condition cc is not satisfied.
1059 // Use --debug_code to enable.
1060 void Assert(Condition cc, const char* msg);
1061
Iain Merrick75681382010-08-19 15:07:18 +01001062 void AssertFastElements(Register elements);
1063
Steve Blocka7e24c12009-10-30 11:49:00 +00001064 // Like Assert(), but always enabled.
1065 void Check(Condition cc, const char* msg);
1066
1067 // Print a message to stdout and abort execution.
1068 void Abort(const char* msg);
1069
Steve Block6ded16b2010-05-10 14:33:55 +01001070 // Check that the stack is aligned.
1071 void CheckStackAlignment();
1072
Steve Blocka7e24c12009-10-30 11:49:00 +00001073 // Verify restrictions about code generated in stubs.
1074 void set_generating_stub(bool value) { generating_stub_ = value; }
1075 bool generating_stub() { return generating_stub_; }
1076 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
1077 bool allow_stub_calls() { return allow_stub_calls_; }
1078
1079 private:
Steve Block1e0659c2011-05-24 12:43:12 +01001080 // Order general registers are pushed by Pushad.
Steve Block44f0eee2011-05-26 01:26:41 +01001081 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Steve Block1e0659c2011-05-24 12:43:12 +01001082 static int kSafepointPushRegisterIndices[Register::kNumRegisters];
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001083 static const int kNumSafepointSavedRegisters = 11;
1084
Steve Blocka7e24c12009-10-30 11:49:00 +00001085 bool generating_stub_;
1086 bool allow_stub_calls_;
Steve Block44f0eee2011-05-26 01:26:41 +01001087 bool root_array_available_;
Steve Block8defd9f2010-07-08 12:39:36 +01001088
1089 // Returns a register holding the smi value. The register MUST NOT be
1090 // modified. It may be the "smi 1 constant" register.
1091 Register GetSmiConstant(Smi* value);
1092
1093 // Moves the smi value to the destination register.
1094 void LoadSmiConstant(Register dst, Smi* value);
1095
Andrei Popescu31002712010-02-23 13:46:05 +00001096 // This handle will be patched with the code object on installation.
1097 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001098
1099 // Helper functions for generating invokes.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001100 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +00001101 void InvokePrologue(const ParameterCount& expected,
1102 const ParameterCount& actual,
1103 Handle<Code> code_constant,
1104 Register code_register,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001105 LabelType* done,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001106 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01001107 CallWrapper* call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001108
Steve Blocka7e24c12009-10-30 11:49:00 +00001109 // Activation support.
1110 void EnterFrame(StackFrame::Type type);
1111 void LeaveFrame(StackFrame::Type type);
1112
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001113 void EnterExitFramePrologue(bool save_rax);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001114
1115 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
1116 // accessible via StackSpaceOperand.
Steve Block1e0659c2011-05-24 12:43:12 +01001117 void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001118
1119 void LeaveExitFrameEpilogue();
Ben Murdochbb769b22010-08-11 14:56:33 +01001120
Steve Blocka7e24c12009-10-30 11:49:00 +00001121 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +01001122 // Loads the top of new-space into the result register.
Steve Block6ded16b2010-05-10 14:33:55 +01001123 // Otherwise the address of the new-space top is loaded into scratch (if
1124 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +00001125 void LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001126 Register scratch,
1127 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +01001128 // Update allocation top with value in result_end register.
1129 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +01001131
1132 // Helper for PopHandleScope. Allowed to perform a GC and returns
1133 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
1134 // possibly returns a failure object indicating an allocation failure.
1135 Object* PopHandleScopeHelper(Register saved,
1136 Register scratch,
1137 bool gc_allowed);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001138
1139
1140 // Compute memory operands for safepoint stack slots.
1141 Operand SafepointRegisterSlot(Register reg);
1142 static int SafepointRegisterStackIndex(int reg_code) {
1143 return kNumSafepointRegisters - kSafepointPushRegisterIndices[reg_code] - 1;
1144 }
1145
1146 // Needs access to SafepointRegisterStackIndex for optimized frame
1147 // traversal.
1148 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +00001149};
1150
1151
1152// The code patcher is used to patch (typically) small parts of code e.g. for
1153// debugging and other types of instrumentation. When using the code patcher
1154// the exact number of bytes specified must be emitted. Is not legal to emit
1155// relocation information. If any of these constraints are violated it causes
1156// an assertion.
1157class CodePatcher {
1158 public:
1159 CodePatcher(byte* address, int size);
1160 virtual ~CodePatcher();
1161
1162 // Macro assembler to emit code.
1163 MacroAssembler* masm() { return &masm_; }
1164
1165 private:
1166 byte* address_; // The address of the code being patched.
1167 int size_; // Number of bytes of the expected patch size.
1168 MacroAssembler masm_; // Macro assembler used to generate the code.
1169};
1170
1171
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001172// Helper class for generating code or data associated with the code
Steve Block44f0eee2011-05-26 01:26:41 +01001173// right before or after a call instruction. As an example this can be used to
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001174// generate safepoint data after calls for crankshaft.
Steve Block44f0eee2011-05-26 01:26:41 +01001175class CallWrapper {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001176 public:
Steve Block44f0eee2011-05-26 01:26:41 +01001177 CallWrapper() { }
1178 virtual ~CallWrapper() { }
1179 // Called just before emitting a call. Argument is the size of the generated
1180 // call code.
1181 virtual void BeforeCall(int call_size) = 0;
1182 // Called just after emitting a call, i.e., at the return site for the call.
1183 virtual void AfterCall() = 0;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001184};
1185
1186
Steve Blocka7e24c12009-10-30 11:49:00 +00001187// -----------------------------------------------------------------------------
1188// Static helper functions.
1189
1190// Generate an Operand for loading a field from an object.
1191static inline Operand FieldOperand(Register object, int offset) {
1192 return Operand(object, offset - kHeapObjectTag);
1193}
1194
1195
1196// Generate an Operand for loading an indexed field from an object.
1197static inline Operand FieldOperand(Register object,
1198 Register index,
1199 ScaleFactor scale,
1200 int offset) {
1201 return Operand(object, index, scale, offset - kHeapObjectTag);
1202}
1203
1204
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001205static inline Operand ContextOperand(Register context, int index) {
1206 return Operand(context, Context::SlotOffset(index));
1207}
1208
1209
1210static inline Operand GlobalObjectOperand() {
1211 return ContextOperand(rsi, Context::GLOBAL_INDEX);
1212}
1213
1214
1215// Provides access to exit frame stack space (not GCed).
1216static inline Operand StackSpaceOperand(int index) {
1217#ifdef _WIN64
1218 const int kShaddowSpace = 4;
1219 return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
1220#else
1221 return Operand(rsp, index * kPointerSize);
1222#endif
1223}
1224
1225
1226
Steve Blocka7e24c12009-10-30 11:49:00 +00001227#ifdef GENERATED_CODE_COVERAGE
1228extern void LogGeneratedCodeCoverage(const char* file_line);
1229#define CODE_COVERAGE_STRINGIFY(x) #x
1230#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1231#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1232#define ACCESS_MASM(masm) { \
1233 byte* x64_coverage_function = \
1234 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
1235 masm->pushfd(); \
1236 masm->pushad(); \
1237 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
1238 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1239 masm->pop(rax); \
1240 masm->popad(); \
1241 masm->popfd(); \
1242 } \
1243 masm->
1244#else
1245#define ACCESS_MASM(masm) masm->
1246#endif
1247
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001248// -----------------------------------------------------------------------------
1249// Template implementations.
1250
1251static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1252
1253
1254template <typename LabelType>
1255void MacroAssembler::SmiNeg(Register dst,
1256 Register src,
1257 LabelType* on_smi_result) {
1258 if (dst.is(src)) {
1259 ASSERT(!dst.is(kScratchRegister));
1260 movq(kScratchRegister, src);
1261 neg(dst); // Low 32 bits are retained as zero by negation.
1262 // Test if result is zero or Smi::kMinValue.
1263 cmpq(dst, kScratchRegister);
1264 j(not_equal, on_smi_result);
1265 movq(src, kScratchRegister);
1266 } else {
1267 movq(dst, src);
1268 neg(dst);
1269 cmpq(dst, src);
1270 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1271 j(not_equal, on_smi_result);
1272 }
1273}
1274
1275
1276template <typename LabelType>
1277void MacroAssembler::SmiAdd(Register dst,
1278 Register src1,
1279 Register src2,
1280 LabelType* on_not_smi_result) {
1281 ASSERT_NOT_NULL(on_not_smi_result);
1282 ASSERT(!dst.is(src2));
1283 if (dst.is(src1)) {
1284 movq(kScratchRegister, src1);
1285 addq(kScratchRegister, src2);
1286 j(overflow, on_not_smi_result);
1287 movq(dst, kScratchRegister);
1288 } else {
1289 movq(dst, src1);
1290 addq(dst, src2);
1291 j(overflow, on_not_smi_result);
1292 }
1293}
1294
1295
1296template <typename LabelType>
Steve Block44f0eee2011-05-26 01:26:41 +01001297void MacroAssembler::SmiAdd(Register dst,
1298 Register src1,
1299 const Operand& src2,
1300 LabelType* on_not_smi_result) {
1301 ASSERT_NOT_NULL(on_not_smi_result);
1302 if (dst.is(src1)) {
1303 movq(kScratchRegister, src1);
1304 addq(kScratchRegister, src2);
1305 j(overflow, on_not_smi_result);
1306 movq(dst, kScratchRegister);
1307 } else {
1308 ASSERT(!src2.AddressUsesRegister(dst));
1309 movq(dst, src1);
1310 addq(dst, src2);
1311 j(overflow, on_not_smi_result);
1312 }
1313}
1314
1315
1316template <typename LabelType>
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001317void MacroAssembler::SmiSub(Register dst,
1318 Register src1,
1319 Register src2,
1320 LabelType* on_not_smi_result) {
1321 ASSERT_NOT_NULL(on_not_smi_result);
1322 ASSERT(!dst.is(src2));
1323 if (dst.is(src1)) {
1324 cmpq(dst, src2);
1325 j(overflow, on_not_smi_result);
1326 subq(dst, src2);
1327 } else {
1328 movq(dst, src1);
1329 subq(dst, src2);
1330 j(overflow, on_not_smi_result);
1331 }
1332}
1333
1334
1335template <typename LabelType>
1336void MacroAssembler::SmiSub(Register dst,
1337 Register src1,
1338 const Operand& src2,
1339 LabelType* on_not_smi_result) {
1340 ASSERT_NOT_NULL(on_not_smi_result);
1341 if (dst.is(src1)) {
1342 movq(kScratchRegister, src2);
1343 cmpq(src1, kScratchRegister);
1344 j(overflow, on_not_smi_result);
1345 subq(src1, kScratchRegister);
1346 } else {
1347 movq(dst, src1);
1348 subq(dst, src2);
1349 j(overflow, on_not_smi_result);
1350 }
1351}
1352
1353
1354template <typename LabelType>
1355void MacroAssembler::SmiMul(Register dst,
1356 Register src1,
1357 Register src2,
1358 LabelType* on_not_smi_result) {
1359 ASSERT(!dst.is(src2));
1360 ASSERT(!dst.is(kScratchRegister));
1361 ASSERT(!src1.is(kScratchRegister));
1362 ASSERT(!src2.is(kScratchRegister));
1363
1364 if (dst.is(src1)) {
1365 NearLabel failure, zero_correct_result;
1366 movq(kScratchRegister, src1); // Create backup for later testing.
1367 SmiToInteger64(dst, src1);
1368 imul(dst, src2);
1369 j(overflow, &failure);
1370
1371 // Check for negative zero result. If product is zero, and one
1372 // argument is negative, go to slow case.
1373 NearLabel correct_result;
1374 testq(dst, dst);
1375 j(not_zero, &correct_result);
1376
1377 movq(dst, kScratchRegister);
1378 xor_(dst, src2);
1379 j(positive, &zero_correct_result); // Result was positive zero.
1380
1381 bind(&failure); // Reused failure exit, restores src1.
1382 movq(src1, kScratchRegister);
1383 jmp(on_not_smi_result);
1384
1385 bind(&zero_correct_result);
Steve Block9fac8402011-05-12 15:51:54 +01001386 Set(dst, 0);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001387
1388 bind(&correct_result);
1389 } else {
1390 SmiToInteger64(dst, src1);
1391 imul(dst, src2);
1392 j(overflow, on_not_smi_result);
1393 // Check for negative zero result. If product is zero, and one
1394 // argument is negative, go to slow case.
1395 NearLabel correct_result;
1396 testq(dst, dst);
1397 j(not_zero, &correct_result);
1398 // One of src1 and src2 is zero, the check whether the other is
1399 // negative.
1400 movq(kScratchRegister, src1);
1401 xor_(kScratchRegister, src2);
1402 j(negative, on_not_smi_result);
1403 bind(&correct_result);
1404 }
1405}
1406
1407
1408template <typename LabelType>
1409void MacroAssembler::SmiTryAddConstant(Register dst,
1410 Register src,
1411 Smi* constant,
1412 LabelType* on_not_smi_result) {
1413 // Does not assume that src is a smi.
1414 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1415 ASSERT_EQ(0, kSmiTag);
1416 ASSERT(!dst.is(kScratchRegister));
1417 ASSERT(!src.is(kScratchRegister));
1418
1419 JumpIfNotSmi(src, on_not_smi_result);
1420 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1421 LoadSmiConstant(tmp, constant);
1422 addq(tmp, src);
1423 j(overflow, on_not_smi_result);
1424 if (dst.is(src)) {
1425 movq(dst, tmp);
1426 }
1427}
1428
1429
1430template <typename LabelType>
1431void MacroAssembler::SmiAddConstant(Register dst,
1432 Register src,
1433 Smi* constant,
1434 LabelType* on_not_smi_result) {
1435 if (constant->value() == 0) {
1436 if (!dst.is(src)) {
1437 movq(dst, src);
1438 }
1439 } else if (dst.is(src)) {
1440 ASSERT(!dst.is(kScratchRegister));
1441
1442 LoadSmiConstant(kScratchRegister, constant);
1443 addq(kScratchRegister, src);
1444 j(overflow, on_not_smi_result);
1445 movq(dst, kScratchRegister);
1446 } else {
1447 LoadSmiConstant(dst, constant);
1448 addq(dst, src);
1449 j(overflow, on_not_smi_result);
1450 }
1451}
1452
1453
1454template <typename LabelType>
1455void MacroAssembler::SmiSubConstant(Register dst,
1456 Register src,
1457 Smi* constant,
1458 LabelType* on_not_smi_result) {
1459 if (constant->value() == 0) {
1460 if (!dst.is(src)) {
1461 movq(dst, src);
1462 }
1463 } else if (dst.is(src)) {
1464 ASSERT(!dst.is(kScratchRegister));
1465 if (constant->value() == Smi::kMinValue) {
1466 // Subtracting min-value from any non-negative value will overflow.
1467 // We test the non-negativeness before doing the subtraction.
1468 testq(src, src);
1469 j(not_sign, on_not_smi_result);
1470 LoadSmiConstant(kScratchRegister, constant);
1471 subq(dst, kScratchRegister);
1472 } else {
1473 // Subtract by adding the negation.
1474 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1475 addq(kScratchRegister, dst);
1476 j(overflow, on_not_smi_result);
1477 movq(dst, kScratchRegister);
1478 }
1479 } else {
1480 if (constant->value() == Smi::kMinValue) {
1481 // Subtracting min-value from any non-negative value will overflow.
1482 // We test the non-negativeness before doing the subtraction.
1483 testq(src, src);
1484 j(not_sign, on_not_smi_result);
1485 LoadSmiConstant(dst, constant);
1486 // Adding and subtracting the min-value gives the same result, it only
1487 // differs on the overflow bit, which we don't check here.
1488 addq(dst, src);
1489 } else {
1490 // Subtract by adding the negation.
1491 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1492 addq(dst, src);
1493 j(overflow, on_not_smi_result);
1494 }
1495 }
1496}
1497
1498
1499template <typename LabelType>
1500void MacroAssembler::SmiDiv(Register dst,
1501 Register src1,
1502 Register src2,
1503 LabelType* on_not_smi_result) {
1504 ASSERT(!src1.is(kScratchRegister));
1505 ASSERT(!src2.is(kScratchRegister));
1506 ASSERT(!dst.is(kScratchRegister));
1507 ASSERT(!src2.is(rax));
1508 ASSERT(!src2.is(rdx));
1509 ASSERT(!src1.is(rdx));
1510
1511 // Check for 0 divisor (result is +/-Infinity).
1512 NearLabel positive_divisor;
1513 testq(src2, src2);
1514 j(zero, on_not_smi_result);
1515
1516 if (src1.is(rax)) {
1517 movq(kScratchRegister, src1);
1518 }
1519 SmiToInteger32(rax, src1);
1520 // We need to rule out dividing Smi::kMinValue by -1, since that would
1521 // overflow in idiv and raise an exception.
1522 // We combine this with negative zero test (negative zero only happens
1523 // when dividing zero by a negative number).
1524
1525 // We overshoot a little and go to slow case if we divide min-value
1526 // by any negative value, not just -1.
1527 NearLabel safe_div;
1528 testl(rax, Immediate(0x7fffffff));
1529 j(not_zero, &safe_div);
1530 testq(src2, src2);
1531 if (src1.is(rax)) {
1532 j(positive, &safe_div);
1533 movq(src1, kScratchRegister);
1534 jmp(on_not_smi_result);
1535 } else {
1536 j(negative, on_not_smi_result);
1537 }
1538 bind(&safe_div);
1539
1540 SmiToInteger32(src2, src2);
1541 // Sign extend src1 into edx:eax.
1542 cdq();
1543 idivl(src2);
1544 Integer32ToSmi(src2, src2);
1545 // Check that the remainder is zero.
1546 testl(rdx, rdx);
1547 if (src1.is(rax)) {
1548 NearLabel smi_result;
1549 j(zero, &smi_result);
1550 movq(src1, kScratchRegister);
1551 jmp(on_not_smi_result);
1552 bind(&smi_result);
1553 } else {
1554 j(not_zero, on_not_smi_result);
1555 }
1556 if (!dst.is(src1) && src1.is(rax)) {
1557 movq(src1, kScratchRegister);
1558 }
1559 Integer32ToSmi(dst, rax);
1560}
1561
1562
1563template <typename LabelType>
1564void MacroAssembler::SmiMod(Register dst,
1565 Register src1,
1566 Register src2,
1567 LabelType* on_not_smi_result) {
1568 ASSERT(!dst.is(kScratchRegister));
1569 ASSERT(!src1.is(kScratchRegister));
1570 ASSERT(!src2.is(kScratchRegister));
1571 ASSERT(!src2.is(rax));
1572 ASSERT(!src2.is(rdx));
1573 ASSERT(!src1.is(rdx));
1574 ASSERT(!src1.is(src2));
1575
1576 testq(src2, src2);
1577 j(zero, on_not_smi_result);
1578
1579 if (src1.is(rax)) {
1580 movq(kScratchRegister, src1);
1581 }
1582 SmiToInteger32(rax, src1);
1583 SmiToInteger32(src2, src2);
1584
1585 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1586 NearLabel safe_div;
1587 cmpl(rax, Immediate(Smi::kMinValue));
1588 j(not_equal, &safe_div);
1589 cmpl(src2, Immediate(-1));
1590 j(not_equal, &safe_div);
1591 // Retag inputs and go slow case.
1592 Integer32ToSmi(src2, src2);
1593 if (src1.is(rax)) {
1594 movq(src1, kScratchRegister);
1595 }
1596 jmp(on_not_smi_result);
1597 bind(&safe_div);
1598
1599 // Sign extend eax into edx:eax.
1600 cdq();
1601 idivl(src2);
1602 // Restore smi tags on inputs.
1603 Integer32ToSmi(src2, src2);
1604 if (src1.is(rax)) {
1605 movq(src1, kScratchRegister);
1606 }
1607 // Check for a negative zero result. If the result is zero, and the
1608 // dividend is negative, go slow to return a floating point negative zero.
1609 NearLabel smi_result;
1610 testl(rdx, rdx);
1611 j(not_zero, &smi_result);
1612 testq(src1, src1);
1613 j(negative, on_not_smi_result);
1614 bind(&smi_result);
1615 Integer32ToSmi(dst, rdx);
1616}
1617
1618
1619template <typename LabelType>
1620void MacroAssembler::SmiShiftLogicalRightConstant(
1621 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1622 // Logic right shift interprets its result as an *unsigned* number.
1623 if (dst.is(src)) {
1624 UNIMPLEMENTED(); // Not used.
1625 } else {
1626 movq(dst, src);
1627 if (shift_value == 0) {
1628 testq(dst, dst);
1629 j(negative, on_not_smi_result);
1630 }
1631 shr(dst, Immediate(shift_value + kSmiShift));
1632 shl(dst, Immediate(kSmiShift));
1633 }
1634}
1635
1636
1637template <typename LabelType>
1638void MacroAssembler::SmiShiftLogicalRight(Register dst,
1639 Register src1,
1640 Register src2,
1641 LabelType* on_not_smi_result) {
1642 ASSERT(!dst.is(kScratchRegister));
1643 ASSERT(!src1.is(kScratchRegister));
1644 ASSERT(!src2.is(kScratchRegister));
1645 ASSERT(!dst.is(rcx));
Steve Block1e0659c2011-05-24 12:43:12 +01001646 // dst and src1 can be the same, because the one case that bails out
1647 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001648 NearLabel result_ok;
1649 if (src1.is(rcx) || src2.is(rcx)) {
1650 movq(kScratchRegister, rcx);
1651 }
1652 if (!dst.is(src1)) {
1653 movq(dst, src1);
1654 }
1655 SmiToInteger32(rcx, src2);
1656 orl(rcx, Immediate(kSmiShift));
1657 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1658 shl(dst, Immediate(kSmiShift));
1659 testq(dst, dst);
1660 if (src1.is(rcx) || src2.is(rcx)) {
1661 NearLabel positive_result;
1662 j(positive, &positive_result);
1663 if (src1.is(rcx)) {
1664 movq(src1, kScratchRegister);
1665 } else {
1666 movq(src2, kScratchRegister);
1667 }
1668 jmp(on_not_smi_result);
1669 bind(&positive_result);
1670 } else {
1671 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1672 }
1673}
1674
1675
1676template <typename LabelType>
1677void MacroAssembler::SelectNonSmi(Register dst,
1678 Register src1,
1679 Register src2,
1680 LabelType* on_not_smis) {
1681 ASSERT(!dst.is(kScratchRegister));
1682 ASSERT(!src1.is(kScratchRegister));
1683 ASSERT(!src2.is(kScratchRegister));
1684 ASSERT(!dst.is(src1));
1685 ASSERT(!dst.is(src2));
1686 // Both operands must not be smis.
1687#ifdef DEBUG
1688 if (allow_stub_calls()) { // Check contains a stub call.
1689 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1690 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1691 }
1692#endif
1693 ASSERT_EQ(0, kSmiTag);
1694 ASSERT_EQ(0, Smi::FromInt(0));
1695 movl(kScratchRegister, Immediate(kSmiTagMask));
1696 and_(kScratchRegister, src1);
1697 testl(kScratchRegister, src2);
1698 // If non-zero then both are smis.
1699 j(not_zero, on_not_smis);
1700
1701 // Exactly one operand is a smi.
1702 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1703 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1704 subq(kScratchRegister, Immediate(1));
1705 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1706 movq(dst, src1);
1707 xor_(dst, src2);
1708 and_(dst, kScratchRegister);
1709 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1710 xor_(dst, src1);
1711 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1712}
1713
1714
1715template <typename LabelType>
1716void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1717 ASSERT_EQ(0, kSmiTag);
1718 Condition smi = CheckSmi(src);
1719 j(smi, on_smi);
1720}
1721
1722
1723template <typename LabelType>
1724void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1725 Condition smi = CheckSmi(src);
1726 j(NegateCondition(smi), on_not_smi);
1727}
1728
1729
1730template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001731void MacroAssembler::JumpUnlessNonNegativeSmi(
1732 Register src, LabelType* on_not_smi_or_negative) {
1733 Condition non_negative_smi = CheckNonNegativeSmi(src);
1734 j(NegateCondition(non_negative_smi), on_not_smi_or_negative);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001735}
1736
1737
1738template <typename LabelType>
1739void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1740 Smi* constant,
1741 LabelType* on_equals) {
1742 SmiCompare(src, constant);
1743 j(equal, on_equals);
1744}
1745
1746
1747template <typename LabelType>
1748void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1749 LabelType* on_invalid) {
1750 Condition is_valid = CheckInteger32ValidSmiValue(src);
1751 j(NegateCondition(is_valid), on_invalid);
1752}
1753
1754
1755template <typename LabelType>
1756void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1757 LabelType* on_invalid) {
1758 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1759 j(NegateCondition(is_valid), on_invalid);
1760}
1761
1762
1763template <typename LabelType>
1764void MacroAssembler::JumpIfNotBothSmi(Register src1,
1765 Register src2,
1766 LabelType* on_not_both_smi) {
1767 Condition both_smi = CheckBothSmi(src1, src2);
1768 j(NegateCondition(both_smi), on_not_both_smi);
1769}
1770
1771
1772template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001773void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1774 Register src2,
1775 LabelType* on_not_both_smi) {
1776 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001777 j(NegateCondition(both_smi), on_not_both_smi);
1778}
1779
1780
1781template <typename LabelType>
Steve Block1e0659c2011-05-24 12:43:12 +01001782void MacroAssembler::JumpIfNotString(Register object,
1783 Register object_map,
1784 LabelType* not_string) {
1785 Condition is_smi = CheckSmi(object);
1786 j(is_smi, not_string);
1787 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
1788 j(above_equal, not_string);
1789}
1790
1791
1792template <typename LabelType>
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001793void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1794 Register second_object,
1795 Register scratch1,
1796 Register scratch2,
1797 LabelType* on_fail) {
1798 // Check that both objects are not smis.
1799 Condition either_smi = CheckEitherSmi(first_object, second_object);
1800 j(either_smi, on_fail);
1801
1802 // Load instance type for both strings.
1803 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1804 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1805 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1806 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1807
1808 // Check that both are flat ascii strings.
1809 ASSERT(kNotStringTag != 0);
1810 const int kFlatAsciiStringMask =
1811 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1812 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1813
1814 andl(scratch1, Immediate(kFlatAsciiStringMask));
1815 andl(scratch2, Immediate(kFlatAsciiStringMask));
1816 // Interleave the bits to check both scratch1 and scratch2 in one test.
1817 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1818 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1819 cmpl(scratch1,
1820 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1821 j(not_equal, on_fail);
1822}
1823
1824
1825template <typename LabelType>
1826void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1827 Register instance_type,
1828 Register scratch,
1829 LabelType *failure) {
1830 if (!scratch.is(instance_type)) {
1831 movl(scratch, instance_type);
1832 }
1833
1834 const int kFlatAsciiStringMask =
1835 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1836
1837 andl(scratch, Immediate(kFlatAsciiStringMask));
1838 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1839 j(not_equal, failure);
1840}
1841
1842
1843template <typename LabelType>
1844void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1845 Register first_object_instance_type,
1846 Register second_object_instance_type,
1847 Register scratch1,
1848 Register scratch2,
1849 LabelType* on_fail) {
1850 // Load instance type for both strings.
1851 movq(scratch1, first_object_instance_type);
1852 movq(scratch2, second_object_instance_type);
1853
1854 // Check that both are flat ascii strings.
1855 ASSERT(kNotStringTag != 0);
1856 const int kFlatAsciiStringMask =
1857 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1858 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1859
1860 andl(scratch1, Immediate(kFlatAsciiStringMask));
1861 andl(scratch2, Immediate(kFlatAsciiStringMask));
1862 // Interleave the bits to check both scratch1 and scratch2 in one test.
1863 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1864 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1865 cmpl(scratch1,
1866 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1867 j(not_equal, on_fail);
1868}
1869
1870
1871template <typename LabelType>
1872void MacroAssembler::InNewSpace(Register object,
1873 Register scratch,
1874 Condition cc,
1875 LabelType* branch) {
1876 if (Serializer::enabled()) {
1877 // Can't do arithmetic on external references if it might get serialized.
1878 // The mask isn't really an address. We load it as an external reference in
1879 // case the size of the new space is different between the snapshot maker
1880 // and the running system.
1881 if (scratch.is(object)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001882 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001883 and_(scratch, kScratchRegister);
1884 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001885 movq(scratch, ExternalReference::new_space_mask(isolate()));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001886 and_(scratch, object);
1887 }
Steve Block44f0eee2011-05-26 01:26:41 +01001888 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001889 cmpq(scratch, kScratchRegister);
1890 j(cc, branch);
1891 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001892 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001893 intptr_t new_space_start =
Steve Block44f0eee2011-05-26 01:26:41 +01001894 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001895 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1896 if (scratch.is(object)) {
1897 addq(scratch, kScratchRegister);
1898 } else {
1899 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1900 }
Steve Block44f0eee2011-05-26 01:26:41 +01001901 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001902 j(cc, branch);
1903 }
1904}
1905
1906
1907template <typename LabelType>
1908void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1909 const ParameterCount& actual,
1910 Handle<Code> code_constant,
1911 Register code_register,
1912 LabelType* done,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001913 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01001914 CallWrapper* call_wrapper) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001915 bool definitely_matches = false;
1916 NearLabel invoke;
1917 if (expected.is_immediate()) {
1918 ASSERT(actual.is_immediate());
1919 if (expected.immediate() == actual.immediate()) {
1920 definitely_matches = true;
1921 } else {
1922 Set(rax, actual.immediate());
1923 if (expected.immediate() ==
1924 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1925 // Don't worry about adapting arguments for built-ins that
1926 // don't want that done. Skip adaption code by making it look
1927 // like we have a match between expected and actual number of
1928 // arguments.
1929 definitely_matches = true;
1930 } else {
1931 Set(rbx, expected.immediate());
1932 }
1933 }
1934 } else {
1935 if (actual.is_immediate()) {
1936 // Expected is in register, actual is immediate. This is the
1937 // case when we invoke function values without going through the
1938 // IC mechanism.
1939 cmpq(expected.reg(), Immediate(actual.immediate()));
1940 j(equal, &invoke);
1941 ASSERT(expected.reg().is(rbx));
1942 Set(rax, actual.immediate());
1943 } else if (!expected.reg().is(actual.reg())) {
1944 // Both expected and actual are in (different) registers. This
1945 // is the case when we invoke functions using call and apply.
1946 cmpq(expected.reg(), actual.reg());
1947 j(equal, &invoke);
1948 ASSERT(actual.reg().is(rax));
1949 ASSERT(expected.reg().is(rbx));
1950 }
1951 }
1952
1953 if (!definitely_matches) {
Steve Block44f0eee2011-05-26 01:26:41 +01001954 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001955 if (!code_constant.is_null()) {
1956 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1957 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1958 } else if (!code_register.is(rdx)) {
1959 movq(rdx, code_register);
1960 }
1961
1962 if (flag == CALL_FUNCTION) {
Steve Block44f0eee2011-05-26 01:26:41 +01001963 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(adaptor));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001964 Call(adaptor, RelocInfo::CODE_TARGET);
Steve Block44f0eee2011-05-26 01:26:41 +01001965 if (call_wrapper != NULL) call_wrapper->AfterCall();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001966 jmp(done);
1967 } else {
1968 Jump(adaptor, RelocInfo::CODE_TARGET);
1969 }
1970 bind(&invoke);
1971 }
1972}
1973
Steve Blocka7e24c12009-10-30 11:49:00 +00001974
1975} } // namespace v8::internal
1976
1977#endif // V8_X64_MACRO_ASSEMBLER_X64_H_