blob: 4c177205b6be4f143933f751678641b9dbc14f52 [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Steve Block8defd9f2010-07-08 12:39:36 +010050static const Register kScratchRegister = { 10 }; // r10.
Steve Block44f0eee2011-05-26 01:26:41 +010051static const Register kSmiConstantRegister = { 12 }; // r12 (callee save).
Steve Block8defd9f2010-07-08 12:39:36 +010052static const Register kRootRegister = { 13 }; // r13 (callee save).
53// Value of smi in kSmiConstantRegister.
54static const int kSmiConstantRegisterValue = 1;
Ben Murdoche0cee9b2011-05-25 10:26:03 +010055// Actual value of root register is offset from the root array's start
56// to take advantage of negitive 8-bit displacement values.
57static const int kRootRegisterBias = 128;
Steve Blocka7e24c12009-10-30 11:49:00 +000058
Leon Clarkee46be812010-01-19 14:06:41 +000059// Convenience for platform-independent signatures.
60typedef Operand MemOperand;
61
Steve Blocka7e24c12009-10-30 11:49:00 +000062// Forward declaration.
63class JumpTarget;
Steve Block44f0eee2011-05-26 01:26:41 +010064class CallWrapper;
Steve Blocka7e24c12009-10-30 11:49:00 +000065
66struct SmiIndex {
67 SmiIndex(Register index_register, ScaleFactor scale)
68 : reg(index_register),
69 scale(scale) {}
70 Register reg;
71 ScaleFactor scale;
72};
73
74// MacroAssembler implements a collection of frequently used macros.
75class MacroAssembler: public Assembler {
76 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010077 // The isolate parameter can be NULL if the macro assembler should
78 // not use isolate-dependent functionality. In this case, it's the
79 // responsibility of the caller to never invoke such function on the
80 // macro assembler.
81 MacroAssembler(Isolate* isolate, void* buffer, int size);
Steve Blocka7e24c12009-10-30 11:49:00 +000082
Steve Block44f0eee2011-05-26 01:26:41 +010083 // Prevent the use of the RootArray during the lifetime of this
84 // scope object.
85 class NoRootArrayScope BASE_EMBEDDED {
86 public:
87 explicit NoRootArrayScope(MacroAssembler* assembler)
88 : variable_(&assembler->root_array_available_),
89 old_value_(assembler->root_array_available_) {
90 assembler->root_array_available_ = false;
91 }
92 ~NoRootArrayScope() {
93 *variable_ = old_value_;
94 }
95 private:
96 bool* variable_;
97 bool old_value_;
98 };
99
100 // Operand pointing to an external reference.
101 // May emit code to set up the scratch register. The operand is
102 // only guaranteed to be correct as long as the scratch register
103 // isn't changed.
104 // If the operand is used more than once, use a scratch register
105 // that is guaranteed not to be clobbered.
106 Operand ExternalOperand(ExternalReference reference,
107 Register scratch = kScratchRegister);
108 // Loads and stores the value of an external reference.
109 // Special case code for load and store to take advantage of
110 // load_rax/store_rax if possible/necessary.
111 // For other operations, just use:
112 // Operand operand = ExternalOperand(extref);
113 // operation(operand, ..);
114 void Load(Register destination, ExternalReference source);
115 void Store(ExternalReference destination, Register source);
116 // Loads the address of the external reference into the destination
117 // register.
118 void LoadAddress(Register destination, ExternalReference source);
119 // Returns the size of the code generated by LoadAddress.
120 // Used by CallSize(ExternalReference) to find the size of a call.
121 int LoadAddressSize(ExternalReference source);
122
123 // Operations on roots in the root-array.
Steve Blocka7e24c12009-10-30 11:49:00 +0000124 void LoadRoot(Register destination, Heap::RootListIndex index);
Steve Block44f0eee2011-05-26 01:26:41 +0100125 void StoreRoot(Register source, Heap::RootListIndex index);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100126 // Load a root value where the index (or part of it) is variable.
127 // The variable_offset register is added to the fixed_offset value
128 // to get the index into the root-array.
129 void LoadRootIndexed(Register destination,
130 Register variable_offset,
131 int fixed_offset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000132 void CompareRoot(Register with, Heap::RootListIndex index);
Steve Block1e0659c2011-05-24 12:43:12 +0100133 void CompareRoot(const Operand& with, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000134 void PushRoot(Heap::RootListIndex index);
135
136 // ---------------------------------------------------------------------------
137 // GC Support
138
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100139 // For page containing |object| mark region covering |addr| dirty.
140 // RecordWriteHelper only works if the object is not in new
Steve Block6ded16b2010-05-10 14:33:55 +0100141 // space.
142 void RecordWriteHelper(Register object,
143 Register addr,
144 Register scratch);
145
146 // Check if object is in new space. The condition cc can be equal or
147 // not_equal. If it is equal a jump will be done if the object is on new
148 // space. The register scratch can be object itself, but it will be clobbered.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100149 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100150 void InNewSpace(Register object,
151 Register scratch,
152 Condition cc,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100153 LabelType* branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100154
Steve Block8defd9f2010-07-08 12:39:36 +0100155 // For page containing |object| mark region covering [object+offset]
156 // dirty. |object| is the object being stored into, |value| is the
157 // object being stored. If |offset| is zero, then the |scratch|
158 // register contains the array index into the elements array
Ben Murdochf87a2032010-10-22 12:50:53 +0100159 // represented as an untagged 32-bit integer. All registers are
160 // clobbered by the operation. RecordWrite filters out smis so it
161 // does not update the write barrier if the value is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000162 void RecordWrite(Register object,
163 int offset,
164 Register value,
165 Register scratch);
166
Steve Block8defd9f2010-07-08 12:39:36 +0100167 // For page containing |object| mark region covering [address]
168 // dirty. |object| is the object being stored into, |value| is the
169 // object being stored. All registers are clobbered by the
170 // operation. RecordWrite filters out smis so it does not update
171 // the write barrier if the value is a smi.
172 void RecordWrite(Register object,
173 Register address,
174 Register value);
175
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100176 // For page containing |object| mark region covering [object+offset] dirty.
Steve Block3ce2e202009-11-05 08:53:23 +0000177 // The value is known to not be a smi.
178 // object is the object being stored into, value is the object being stored.
179 // If offset is zero, then the scratch register contains the array index into
Ben Murdochf87a2032010-10-22 12:50:53 +0100180 // the elements array represented as an untagged 32-bit integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000181 // All registers are clobbered by the operation.
182 void RecordWriteNonSmi(Register object,
183 int offset,
184 Register value,
185 Register scratch);
186
Steve Blocka7e24c12009-10-30 11:49:00 +0000187#ifdef ENABLE_DEBUGGER_SUPPORT
188 // ---------------------------------------------------------------------------
189 // Debugger Support
190
Andrei Popescu402d9372010-02-26 13:31:12 +0000191 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000192#endif
193
194 // ---------------------------------------------------------------------------
195 // Activation frames
196
197 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
198 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
199
200 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
201 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
202
Steve Blockd0582a62009-12-15 09:54:21 +0000203 // Enter specific kind of exit frame; either in normal or
204 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000205 // sets up the number of arguments in register rdi and the pointer
206 // to the first argument in register rsi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800207 //
208 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
209 // accessible via StackSpaceOperand.
Steve Block1e0659c2011-05-24 12:43:12 +0100210 void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000211
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800212 // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
213 // memory (not GCed) on the stack accessible via StackSpaceOperand.
214 void EnterApiExitFrame(int arg_stack_space);
Ben Murdochbb769b22010-08-11 14:56:33 +0100215
Steve Blocka7e24c12009-10-30 11:49:00 +0000216 // Leave the current exit frame. Expects/provides the return value in
217 // register rax:rdx (untouched) and the pointer to the first
218 // argument in register rsi.
Steve Block1e0659c2011-05-24 12:43:12 +0100219 void LeaveExitFrame(bool save_doubles = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000220
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800221 // Leave the current exit frame. Expects/provides the return value in
222 // register rax (untouched).
223 void LeaveApiExitFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000224
Ben Murdochb0fe1622011-05-05 13:52:32 +0100225 // Push and pop the registers that can hold pointers.
Steve Block1e0659c2011-05-24 12:43:12 +0100226 void PushSafepointRegisters() { Pushad(); }
227 void PopSafepointRegisters() { Popad(); }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100228 // Store the value in register src in the safepoint register stack
229 // slot for register dst.
230 void StoreToSafepointRegisterSlot(Register dst, Register src);
231 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100232
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100233 void InitializeRootRegister() {
Steve Block44f0eee2011-05-26 01:26:41 +0100234 ExternalReference roots_address =
235 ExternalReference::roots_address(isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100236 movq(kRootRegister, roots_address);
237 addq(kRootRegister, Immediate(kRootRegisterBias));
238 }
Steve Block1e0659c2011-05-24 12:43:12 +0100239
Steve Blocka7e24c12009-10-30 11:49:00 +0000240 // ---------------------------------------------------------------------------
241 // JavaScript invokes
242
243 // Invoke the JavaScript function code by either calling or jumping.
244 void InvokeCode(Register code,
245 const ParameterCount& expected,
246 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100247 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100248 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000249
250 void InvokeCode(Handle<Code> code,
251 const ParameterCount& expected,
252 const ParameterCount& actual,
253 RelocInfo::Mode rmode,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100254 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100255 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000256
257 // Invoke the JavaScript function in the given register. Changes the
258 // current context to the context in the function before invoking.
259 void InvokeFunction(Register function,
260 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100261 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100262 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000263
Andrei Popescu402d9372010-02-26 13:31:12 +0000264 void InvokeFunction(JSFunction* function,
265 const ParameterCount& actual,
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100266 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100267 CallWrapper* call_wrapper = NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +0000268
Steve Blocka7e24c12009-10-30 11:49:00 +0000269 // Invoke specified builtin JavaScript function. Adds an entry to
270 // the unresolved list if the name does not resolve.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100271 void InvokeBuiltin(Builtins::JavaScript id,
272 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +0100273 CallWrapper* call_wrapper = NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000274
Steve Block791712a2010-08-27 10:21:07 +0100275 // Store the function for the given builtin in the target register.
276 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
277
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 // Store the code object for the given builtin in the target register.
279 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
280
281
282 // ---------------------------------------------------------------------------
283 // Smi tagging, untagging and operations on tagged smis.
284
Steve Block8defd9f2010-07-08 12:39:36 +0100285 void InitializeSmiConstantRegister() {
286 movq(kSmiConstantRegister,
287 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
288 RelocInfo::NONE);
289 }
290
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 // Conversions between tagged smi values and non-tagged integer values.
292
293 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000294 // Only uses the low 32 bits of the src register. Sets the N and Z flags
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100295 // based on the value of the resulting smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000296 void Integer32ToSmi(Register dst, Register src);
297
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100298 // Stores an integer32 value into a memory field that already holds a smi.
299 void Integer32ToSmiField(const Operand& dst, Register src);
300
Steve Blocka7e24c12009-10-30 11:49:00 +0000301 // Adds constant to src and tags the result as a smi.
302 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000303 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000304
305 // Convert smi to 32-bit integer. I.e., not sign extended into
306 // high 32 bits of destination.
307 void SmiToInteger32(Register dst, Register src);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100308 void SmiToInteger32(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000309
310 // Convert smi to 64-bit integer (sign extended if necessary).
311 void SmiToInteger64(Register dst, Register src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100312 void SmiToInteger64(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000313
314 // Multiply a positive smi's integer value by a power of two.
315 // Provides result as 64-bit integer value.
316 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
317 Register src,
318 int power);
319
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100320 // Divide a positive smi's integer value by a power of two.
321 // Provides result as 32-bit integer value.
322 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
323 Register src,
324 int power);
325
Ben Murdoch8b112d22011-06-08 16:22:53 +0100326 // Perform the logical or of two smi values and return a smi value.
327 // If either argument is not a smi, jump to on_not_smis and retain
328 // the original values of source registers. The destination register
329 // may be changed if it's not one of the source registers.
330 template <typename LabelType>
331 void SmiOrIfSmis(Register dst,
332 Register src1,
333 Register src2,
334 LabelType* on_not_smis);
335
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100336
Steve Block44f0eee2011-05-26 01:26:41 +0100337 // Simple comparison of smis. Both sides must be known smis to use these,
338 // otherwise use Cmp.
339 void SmiCompare(Register smi1, Register smi2);
Steve Block3ce2e202009-11-05 08:53:23 +0000340 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100341 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000342 void SmiCompare(const Operand& dst, Register src);
343 void SmiCompare(const Operand& dst, Smi* src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100344 // Compare the int32 in src register to the value of the smi stored at dst.
345 void SmiCompareInteger32(const Operand& dst, Register src);
Steve Block3ce2e202009-11-05 08:53:23 +0000346 // Sets sign and zero flags depending on value of smi in register.
347 void SmiTest(Register src);
348
Steve Blocka7e24c12009-10-30 11:49:00 +0000349 // Functions performing a check on a known or potential smi. Returns
350 // a condition that is satisfied if the check is successful.
351
352 // Is the value a tagged smi.
353 Condition CheckSmi(Register src);
Steve Block1e0659c2011-05-24 12:43:12 +0100354 Condition CheckSmi(const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000355
Ben Murdochf87a2032010-10-22 12:50:53 +0100356 // Is the value a non-negative tagged smi.
357 Condition CheckNonNegativeSmi(Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000358
Leon Clarkee46be812010-01-19 14:06:41 +0000359 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000360 Condition CheckBothSmi(Register first, Register second);
361
Ben Murdochf87a2032010-10-22 12:50:53 +0100362 // Are both values non-negative tagged smis.
363 Condition CheckBothNonNegativeSmi(Register first, Register second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000364
Leon Clarkee46be812010-01-19 14:06:41 +0000365 // Are either value a tagged smi.
Ben Murdochbb769b22010-08-11 14:56:33 +0100366 Condition CheckEitherSmi(Register first,
367 Register second,
368 Register scratch = kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +0000369
Steve Blocka7e24c12009-10-30 11:49:00 +0000370 // Is the value the minimum smi value (since we are using
371 // two's complement numbers, negating the value is known to yield
372 // a non-smi value).
373 Condition CheckIsMinSmi(Register src);
374
Steve Blocka7e24c12009-10-30 11:49:00 +0000375 // Checks whether an 32-bit integer value is a valid for conversion
376 // to a smi.
377 Condition CheckInteger32ValidSmiValue(Register src);
378
Steve Block3ce2e202009-11-05 08:53:23 +0000379 // Checks whether an 32-bit unsigned integer value is a valid for
380 // conversion to a smi.
381 Condition CheckUInteger32ValidSmiValue(Register src);
382
Steve Block1e0659c2011-05-24 12:43:12 +0100383 // Check whether src is a Smi, and set dst to zero if it is a smi,
384 // and to one if it isn't.
385 void CheckSmiToIndicator(Register dst, Register src);
386 void CheckSmiToIndicator(Register dst, const Operand& src);
387
Steve Blocka7e24c12009-10-30 11:49:00 +0000388 // Test-and-jump functions. Typically combines a check function
389 // above with a conditional jump.
390
391 // Jump if the value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100392 template <typename LabelType>
393 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Blocka7e24c12009-10-30 11:49:00 +0000394
Steve Block3ce2e202009-11-05 08:53:23 +0000395 // Jump if the unsigned integer value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100396 template <typename LabelType>
397 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Block3ce2e202009-11-05 08:53:23 +0000398
Steve Blocka7e24c12009-10-30 11:49:00 +0000399 // Jump to label if the value is a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100400 template <typename LabelType>
401 void JumpIfSmi(Register src, LabelType* on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000402
403 // Jump to label if the value is not a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100404 template <typename LabelType>
405 void JumpIfNotSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000406
Ben Murdochf87a2032010-10-22 12:50:53 +0100407 // Jump to label if the value is not a non-negative tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100408 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100409 void JumpUnlessNonNegativeSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000410
Steve Block3ce2e202009-11-05 08:53:23 +0000411 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000412 // to the constant.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100413 template <typename LabelType>
414 void JumpIfSmiEqualsConstant(Register src,
415 Smi* constant,
416 LabelType* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000417
418 // Jump if either or both register are not smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100419 template <typename LabelType>
420 void JumpIfNotBothSmi(Register src1,
421 Register src2,
422 LabelType* on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000423
Ben Murdochf87a2032010-10-22 12:50:53 +0100424 // Jump if either or both register are not non-negative smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100425 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100426 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2,
427 LabelType* on_not_both_smi);
Leon Clarked91b9f72010-01-27 17:25:45 +0000428
Steve Blocka7e24c12009-10-30 11:49:00 +0000429 // Operations on tagged smi values.
430
431 // Smis represent a subset of integers. The subset is always equivalent to
432 // a two's complement interpretation of a fixed number of bits.
433
434 // Optimistically adds an integer constant to a supposed smi.
435 // If the src is not a smi, or the result is not a smi, jump to
436 // the label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100437 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000438 void SmiTryAddConstant(Register dst,
439 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000440 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100441 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000442
Steve Block3ce2e202009-11-05 08:53:23 +0000443 // Add an integer constant to a tagged smi, giving a tagged smi as result.
444 // No overflow testing on the result is done.
445 void SmiAddConstant(Register dst, Register src, Smi* constant);
446
Leon Clarkef7060e22010-06-03 12:02:55 +0100447 // Add an integer constant to a tagged smi, giving a tagged smi as result.
448 // No overflow testing on the result is done.
449 void SmiAddConstant(const Operand& dst, Smi* constant);
450
Steve Blocka7e24c12009-10-30 11:49:00 +0000451 // Add an integer constant to a tagged smi, giving a tagged smi as result,
452 // or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100453 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000454 void SmiAddConstant(Register dst,
455 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000456 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100457 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000458
459 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100460 // result. No testing on the result is done. Sets the N and Z flags
461 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000462 void SmiSubConstant(Register dst, Register src, Smi* constant);
463
464 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 // result, or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100466 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 void SmiSubConstant(Register dst,
468 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000469 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100470 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471
472 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000473 // NOTICE: This operation jumps on success, not failure!
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100474 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000475 void SmiNeg(Register dst,
476 Register src,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100477 LabelType* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000478
479 // Adds smi values and return the result as a smi.
480 // If dst is src1, then src1 will be destroyed, even if
481 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100482 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 void SmiAdd(Register dst,
484 Register src1,
485 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100486 LabelType* on_not_smi_result);
Steve Block44f0eee2011-05-26 01:26:41 +0100487 template <typename LabelType>
488 void SmiAdd(Register dst,
489 Register src1,
490 const Operand& src2,
491 LabelType* on_not_smi_result);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100492
493 void SmiAdd(Register dst,
494 Register src1,
495 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000496
497 // Subtracts smi values and return the result as a smi.
498 // If dst is src1, then src1 will be destroyed, even if
499 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100500 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000501 void SmiSub(Register dst,
502 Register src1,
503 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100504 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000505
Steve Block6ded16b2010-05-10 14:33:55 +0100506 void SmiSub(Register dst,
507 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100508 Register src2);
509
510 template <typename LabelType>
511 void SmiSub(Register dst,
512 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100513 const Operand& src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100514 LabelType* on_not_smi_result);
515
516 void SmiSub(Register dst,
517 Register src1,
518 const Operand& src2);
Steve Block6ded16b2010-05-10 14:33:55 +0100519
Steve Blocka7e24c12009-10-30 11:49:00 +0000520 // Multiplies smi values and return the result as a smi,
521 // if possible.
522 // If dst is src1, then src1 will be destroyed, even if
523 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100524 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000525 void SmiMul(Register dst,
526 Register src1,
527 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100528 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000529
530 // Divides one smi by another and returns the quotient.
531 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100532 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000533 void SmiDiv(Register dst,
534 Register src1,
535 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100536 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000537
538 // Divides one smi by another and returns the remainder.
539 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100540 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000541 void SmiMod(Register dst,
542 Register src1,
543 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100544 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000545
546 // Bitwise operations.
547 void SmiNot(Register dst, Register src);
548 void SmiAnd(Register dst, Register src1, Register src2);
549 void SmiOr(Register dst, Register src1, Register src2);
550 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000551 void SmiAndConstant(Register dst, Register src1, Smi* constant);
552 void SmiOrConstant(Register dst, Register src1, Smi* constant);
553 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000554
555 void SmiShiftLeftConstant(Register dst,
556 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100557 int shift_value);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100558 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000559 void SmiShiftLogicalRightConstant(Register dst,
560 Register src,
561 int shift_value,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100562 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 void SmiShiftArithmeticRightConstant(Register dst,
564 Register src,
565 int shift_value);
566
567 // Shifts a smi value to the left, and returns the result if that is a smi.
568 // Uses and clobbers rcx, so dst may not be rcx.
569 void SmiShiftLeft(Register dst,
570 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100571 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000572 // Shifts a smi value to the right, shifting in zero bits at the top, and
573 // returns the unsigned intepretation of the result if that is a smi.
574 // Uses and clobbers rcx, so dst may not be rcx.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100575 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000576 void SmiShiftLogicalRight(Register dst,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100577 Register src1,
578 Register src2,
579 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000580 // Shifts a smi value to the right, sign extending the top, and
581 // returns the signed intepretation of the result. That will always
582 // be a valid smi value, since it's numerically smaller than the
583 // original.
584 // Uses and clobbers rcx, so dst may not be rcx.
585 void SmiShiftArithmeticRight(Register dst,
586 Register src1,
587 Register src2);
588
589 // Specialized operations
590
591 // Select the non-smi register of two registers where exactly one is a
592 // smi. If neither are smis, jump to the failure label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100593 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 void SelectNonSmi(Register dst,
595 Register src1,
596 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100597 LabelType* on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +0000598
599 // Converts, if necessary, a smi to a combination of number and
600 // multiplier to be used as a scaled index.
601 // The src register contains a *positive* smi value. The shift is the
602 // power of two to multiply the index value by (e.g.
603 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
604 // The returned index register may be either src or dst, depending
605 // on what is most efficient. If src and dst are different registers,
606 // src is always unchanged.
607 SmiIndex SmiToIndex(Register dst, Register src, int shift);
608
609 // Converts a positive smi to a negative index.
610 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
611
Steve Block44f0eee2011-05-26 01:26:41 +0100612 // Add the value of a smi in memory to an int32 register.
613 // Sets flags as a normal add.
614 void AddSmiField(Register dst, const Operand& src);
615
Steve Block3ce2e202009-11-05 08:53:23 +0000616 // Basic Smi operations.
617 void Move(Register dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100618 LoadSmiConstant(dst, source);
Steve Block3ce2e202009-11-05 08:53:23 +0000619 }
620
621 void Move(const Operand& dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100622 Register constant = GetSmiConstant(source);
623 movq(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000624 }
625
626 void Push(Smi* smi);
627 void Test(const Operand& dst, Smi* source);
628
Steve Blocka7e24c12009-10-30 11:49:00 +0000629 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000630 // String macros.
Steve Block1e0659c2011-05-24 12:43:12 +0100631
632 // If object is a string, its map is loaded into object_map.
633 template <typename LabelType>
634 void JumpIfNotString(Register object,
635 Register object_map,
636 LabelType* not_string);
637
638
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100639 template <typename LabelType>
Leon Clarkee46be812010-01-19 14:06:41 +0000640 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
641 Register second_object,
642 Register scratch1,
643 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100644 LabelType* on_not_both_flat_ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000645
Steve Block6ded16b2010-05-10 14:33:55 +0100646 // Check whether the instance type represents a flat ascii string. Jump to the
647 // label if not. If the instance type can be scratched specify same register
648 // for both instance type and scratch.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100649 template <typename LabelType>
650 void JumpIfInstanceTypeIsNotSequentialAscii(
651 Register instance_type,
652 Register scratch,
653 LabelType *on_not_flat_ascii_string);
Steve Block6ded16b2010-05-10 14:33:55 +0100654
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100655 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100656 void JumpIfBothInstanceTypesAreNotSequentialAscii(
657 Register first_object_instance_type,
658 Register second_object_instance_type,
659 Register scratch1,
660 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100661 LabelType* on_fail);
Steve Block6ded16b2010-05-10 14:33:55 +0100662
Leon Clarkee46be812010-01-19 14:06:41 +0000663 // ---------------------------------------------------------------------------
664 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000665
Steve Block3ce2e202009-11-05 08:53:23 +0000666 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000667 void Set(Register dst, int64_t x);
668 void Set(const Operand& dst, int64_t x);
669
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100670 // Move if the registers are not identical.
671 void Move(Register target, Register source);
672
Steve Blocka7e24c12009-10-30 11:49:00 +0000673 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000674 void Move(Register dst, Handle<Object> source);
675 void Move(const Operand& dst, Handle<Object> source);
676 void Cmp(Register dst, Handle<Object> source);
677 void Cmp(const Operand& dst, Handle<Object> source);
Steve Block44f0eee2011-05-26 01:26:41 +0100678 void Cmp(Register dst, Smi* src);
679 void Cmp(const Operand& dst, Smi* src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000681
Leon Clarkee46be812010-01-19 14:06:41 +0000682 // Emit code to discard a non-negative number of pointer-sized elements
683 // from the stack, clobbering only the rsp register.
684 void Drop(int stack_elements);
685
686 void Call(Label* target) { call(target); }
687
Steve Blocka7e24c12009-10-30 11:49:00 +0000688 // Control Flow
689 void Jump(Address destination, RelocInfo::Mode rmode);
690 void Jump(ExternalReference ext);
691 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
692
693 void Call(Address destination, RelocInfo::Mode rmode);
694 void Call(ExternalReference ext);
695 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
696
Steve Block44f0eee2011-05-26 01:26:41 +0100697 // The size of the code generated for different call instructions.
698 int CallSize(Address destination, RelocInfo::Mode rmode) {
699 return kCallInstructionLength;
700 }
701 int CallSize(ExternalReference ext);
702 int CallSize(Handle<Code> code_object) {
703 // Code calls use 32-bit relative addressing.
704 return kShortCallInstructionLength;
705 }
706 int CallSize(Register target) {
707 // Opcode: REX_opt FF /2 m64
708 return (target.high_bit() != 0) ? 3 : 2;
709 }
710 int CallSize(const Operand& target) {
711 // Opcode: REX_opt FF /2 m64
712 return (target.requires_rex() ? 2 : 1) + target.operand_size();
713 }
714
Steve Block1e0659c2011-05-24 12:43:12 +0100715 // Emit call to the code we are currently generating.
716 void CallSelf() {
717 Handle<Code> self(reinterpret_cast<Code**>(CodeObject().location()));
718 Call(self, RelocInfo::CODE_TARGET);
719 }
720
721 // Non-x64 instructions.
722 // Push/pop all general purpose registers.
723 // Does not push rsp/rbp nor any of the assembler's special purpose registers
724 // (kScratchRegister, kSmiConstantRegister, kRootRegister).
725 void Pushad();
726 void Popad();
727 // Sets the stack as after performing Popad, without actually loading the
728 // registers.
729 void Dropad();
730
Steve Blocka7e24c12009-10-30 11:49:00 +0000731 // Compare object type for heap object.
732 // Always use unsigned comparisons: above and below, not less and greater.
733 // Incoming register is heap_object and outgoing register is map.
734 // They may be the same register, and may be kScratchRegister.
735 void CmpObjectType(Register heap_object, InstanceType type, Register map);
736
737 // Compare instance type for map.
738 // Always use unsigned comparisons: above and below, not less and greater.
739 void CmpInstanceType(Register map, InstanceType type);
740
Andrei Popescu31002712010-02-23 13:46:05 +0000741 // Check if the map of an object is equal to a specified map and
742 // branch to label if not. Skip the smi check if not required
743 // (object is known to be a heap object)
744 void CheckMap(Register obj,
745 Handle<Map> map,
746 Label* fail,
747 bool is_heap_object);
748
Leon Clarked91b9f72010-01-27 17:25:45 +0000749 // Check if the object in register heap_object is a string. Afterwards the
750 // register map contains the object map and the register instance_type
751 // contains the instance_type. The registers map and instance_type can be the
752 // same in which case it contains the instance type afterwards. Either of the
753 // registers map and instance_type can be the same as heap_object.
754 Condition IsObjectStringType(Register heap_object,
755 Register map,
756 Register instance_type);
757
Steve Block8defd9f2010-07-08 12:39:36 +0100758 // FCmp compares and pops the two values on top of the FPU stack.
759 // The flag results are similar to integer cmp, but requires unsigned
Steve Blocka7e24c12009-10-30 11:49:00 +0000760 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
761 void FCmp();
762
Andrei Popescu402d9372010-02-26 13:31:12 +0000763 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100764 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000765
Iain Merrick75681382010-08-19 15:07:18 +0100766 // Abort execution if argument is a smi. Used in debug code.
767 void AbortIfSmi(Register object);
768
Steve Block6ded16b2010-05-10 14:33:55 +0100769 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100770 void AbortIfNotSmi(Register object);
Steve Block44f0eee2011-05-26 01:26:41 +0100771 void AbortIfNotSmi(const Operand& object);
Steve Block6ded16b2010-05-10 14:33:55 +0100772
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100773 // Abort execution if argument is a string. Used in debug code.
774 void AbortIfNotString(Register object);
775
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100776 // Abort execution if argument is not the root value with the given index.
777 void AbortIfNotRootValue(Register src,
778 Heap::RootListIndex root_value_index,
779 const char* message);
780
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 // ---------------------------------------------------------------------------
782 // Exception handling
783
784 // Push a new try handler and link into try handler chain. The return
785 // address must be pushed before calling this helper.
786 void PushTryHandler(CodeLocation try_location, HandlerType type);
787
Leon Clarkee46be812010-01-19 14:06:41 +0000788 // Unlink the stack handler on top of the stack from the try handler chain.
789 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000790
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100791 // Activate the top handler in the try hander chain and pass the
792 // thrown value.
793 void Throw(Register value);
794
795 // Propagate an uncatchable exception out of the current JS stack.
796 void ThrowUncatchable(UncatchableExceptionType type, Register value);
797
Steve Blocka7e24c12009-10-30 11:49:00 +0000798 // ---------------------------------------------------------------------------
799 // Inline caching support
800
Steve Blocka7e24c12009-10-30 11:49:00 +0000801 // Generate code for checking access rights - used for security checks
802 // on access to global objects across environments. The holder register
803 // is left untouched, but the scratch register and kScratchRegister,
804 // which must be different, are clobbered.
805 void CheckAccessGlobalProxy(Register holder_reg,
806 Register scratch,
807 Label* miss);
808
809
810 // ---------------------------------------------------------------------------
811 // Allocation support
812
813 // Allocate an object in new space. If the new space is exhausted control
814 // continues at the gc_required label. The allocated object is returned in
815 // result and end of the new object is returned in result_end. The register
816 // scratch can be passed as no_reg in which case an additional object
817 // reference will be added to the reloc info. The returned pointers in result
818 // and result_end have not yet been tagged as heap objects. If
819 // result_contains_top_on_entry is true the content of result is known to be
820 // the allocation top on entry (could be result_end from a previous call to
821 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
822 // should be no_reg as it is never used.
823 void AllocateInNewSpace(int object_size,
824 Register result,
825 Register result_end,
826 Register scratch,
827 Label* gc_required,
828 AllocationFlags flags);
829
830 void AllocateInNewSpace(int header_size,
831 ScaleFactor element_size,
832 Register element_count,
833 Register result,
834 Register result_end,
835 Register scratch,
836 Label* gc_required,
837 AllocationFlags flags);
838
839 void AllocateInNewSpace(Register object_size,
840 Register result,
841 Register result_end,
842 Register scratch,
843 Label* gc_required,
844 AllocationFlags flags);
845
846 // Undo allocation in new space. The object passed and objects allocated after
847 // it will no longer be allocated. Make sure that no pointers are left to the
848 // object(s) no longer allocated as they would be invalid when allocation is
849 // un-done.
850 void UndoAllocationInNewSpace(Register object);
851
Steve Block3ce2e202009-11-05 08:53:23 +0000852 // Allocate a heap number in new space with undefined value. Returns
853 // tagged pointer in result register, or jumps to gc_required if new
854 // space is full.
855 void AllocateHeapNumber(Register result,
856 Register scratch,
857 Label* gc_required);
858
Leon Clarkee46be812010-01-19 14:06:41 +0000859 // Allocate a sequential string. All the header fields of the string object
860 // are initialized.
861 void AllocateTwoByteString(Register result,
862 Register length,
863 Register scratch1,
864 Register scratch2,
865 Register scratch3,
866 Label* gc_required);
867 void AllocateAsciiString(Register result,
868 Register length,
869 Register scratch1,
870 Register scratch2,
871 Register scratch3,
872 Label* gc_required);
873
874 // Allocate a raw cons string object. Only the map field of the result is
875 // initialized.
876 void AllocateConsString(Register result,
877 Register scratch1,
878 Register scratch2,
879 Label* gc_required);
880 void AllocateAsciiConsString(Register result,
881 Register scratch1,
882 Register scratch2,
883 Label* gc_required);
884
Steve Blocka7e24c12009-10-30 11:49:00 +0000885 // ---------------------------------------------------------------------------
886 // Support functions.
887
888 // Check if result is zero and op is negative.
889 void NegativeZeroTest(Register result, Register op, Label* then_label);
890
891 // Check if result is zero and op is negative in code using jump targets.
892 void NegativeZeroTest(CodeGenerator* cgen,
893 Register result,
894 Register op,
895 JumpTarget* then_target);
896
897 // Check if result is zero and any of op1 and op2 are negative.
898 // Register scratch is destroyed, and it must be different from op2.
899 void NegativeZeroTest(Register result, Register op1, Register op2,
900 Register scratch, Label* then_label);
901
902 // Try to get function prototype of a function and puts the value in
903 // the result register. Checks that the function really is a
904 // function and jumps to the miss label if the fast checks fail. The
905 // function register will be untouched; the other register may be
906 // clobbered.
907 void TryGetFunctionPrototype(Register function,
908 Register result,
909 Label* miss);
910
911 // Generates code for reporting that an illegal operation has
912 // occurred.
913 void IllegalOperation(int num_arguments);
914
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100915 // Picks out an array index from the hash field.
916 // Register use:
917 // hash - holds the index's hash. Clobbered.
918 // index - holds the overwritten index on exit.
919 void IndexFromHash(Register hash, Register index);
920
Steve Blockd0582a62009-12-15 09:54:21 +0000921 // Find the function context up the context chain.
922 void LoadContext(Register dst, int context_chain_length);
923
Ben Murdochb0fe1622011-05-05 13:52:32 +0100924 // Load the global function with the given index.
925 void LoadGlobalFunction(int index, Register function);
926
927 // Load the initial map from the global function. The registers
928 // function and map can be the same.
929 void LoadGlobalFunctionInitialMap(Register function, Register map);
930
Steve Blocka7e24c12009-10-30 11:49:00 +0000931 // ---------------------------------------------------------------------------
932 // Runtime calls
933
934 // Call a code stub.
935 void CallStub(CodeStub* stub);
936
Ben Murdochbb769b22010-08-11 14:56:33 +0100937 // Call a code stub and return the code object called. Try to generate
938 // the code if necessary. Do not perform a GC but instead return a retry
939 // after GC failure.
John Reck59135872010-11-02 12:39:01 -0700940 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100941
Leon Clarkee46be812010-01-19 14:06:41 +0000942 // Tail call a code stub (jump).
943 void TailCallStub(CodeStub* stub);
944
Ben Murdochbb769b22010-08-11 14:56:33 +0100945 // Tail call a code stub (jump) and return the code object called. Try to
946 // generate the code if necessary. Do not perform a GC but instead return
947 // a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700948 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100949
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 // Return from a code stub after popping its arguments.
951 void StubReturn(int argc);
952
953 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100954 void CallRuntime(const Runtime::Function* f, int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000955
Steve Block1e0659c2011-05-24 12:43:12 +0100956 // Call a runtime function and save the value of XMM registers.
957 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
958
Ben Murdochbb769b22010-08-11 14:56:33 +0100959 // Call a runtime function, returning the CodeStub object called.
960 // Try to generate the stub code if necessary. Do not perform a GC
961 // but instead return a retry after GC failure.
Steve Block44f0eee2011-05-26 01:26:41 +0100962 MUST_USE_RESULT MaybeObject* TryCallRuntime(const Runtime::Function* f,
John Reck59135872010-11-02 12:39:01 -0700963 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100964
Steve Blocka7e24c12009-10-30 11:49:00 +0000965 // Convenience function: Same as above, but takes the fid instead.
966 void CallRuntime(Runtime::FunctionId id, int num_arguments);
967
Ben Murdochbb769b22010-08-11 14:56:33 +0100968 // Convenience function: Same as above, but takes the fid instead.
John Reck59135872010-11-02 12:39:01 -0700969 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::FunctionId id,
970 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100971
Andrei Popescu402d9372010-02-26 13:31:12 +0000972 // Convenience function: call an external reference.
973 void CallExternalReference(const ExternalReference& ext,
974 int num_arguments);
975
Steve Blocka7e24c12009-10-30 11:49:00 +0000976 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100977 // Like JumpToExternalReference, but also takes care of passing the number
978 // of parameters.
979 void TailCallExternalReference(const ExternalReference& ext,
980 int num_arguments,
981 int result_size);
982
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800983 MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
984 const ExternalReference& ext, int num_arguments, int result_size);
985
Steve Block6ded16b2010-05-10 14:33:55 +0100986 // Convenience function: tail call a runtime routine (jump).
987 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000988 int num_arguments,
989 int result_size);
990
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800991 MUST_USE_RESULT MaybeObject* TryTailCallRuntime(Runtime::FunctionId fid,
992 int num_arguments,
993 int result_size);
994
Steve Blocka7e24c12009-10-30 11:49:00 +0000995 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100996 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000997
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800998 // Jump to a runtime routine.
999 MaybeObject* TryJumpToExternalReference(const ExternalReference& ext,
1000 int result_size);
John Reck59135872010-11-02 12:39:01 -07001001
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001002 // Prepares stack to put arguments (aligns and so on).
1003 // WIN64 calling convention requires to put the pointer to the return value
1004 // slot into rcx (rcx must be preserverd until TryCallApiFunctionAndReturn).
1005 // Saves context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
1006 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
1007 void PrepareCallApiFunction(int arg_stack_space);
1008
1009 // Calls an API function. Allocates HandleScope, extracts
1010 // returned value from handle and propagates exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01001011 // Clobbers r14, r15, rbx and caller-save registers. Restores context.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001012 // On return removes stack_space * kPointerSize (GCed).
1013 MUST_USE_RESULT MaybeObject* TryCallApiFunctionAndReturn(
1014 ApiFunction* function, int stack_space);
John Reck59135872010-11-02 12:39:01 -07001015
Leon Clarke4515c472010-02-03 11:58:03 +00001016 // Before calling a C-function from generated code, align arguments on stack.
1017 // After aligning the frame, arguments must be stored in esp[0], esp[4],
1018 // etc., not pushed. The argument count assumes all arguments are word sized.
1019 // The number of slots reserved for arguments depends on platform. On Windows
1020 // stack slots are reserved for the arguments passed in registers. On other
1021 // platforms stack slots are only reserved for the arguments actually passed
1022 // on the stack.
1023 void PrepareCallCFunction(int num_arguments);
1024
1025 // Calls a C function and cleans up the space for arguments allocated
1026 // by PrepareCallCFunction. The called function is not allowed to trigger a
1027 // garbage collection, since that might move the code and invalidate the
1028 // return address (unless this is somehow accounted for by the called
1029 // function).
1030 void CallCFunction(ExternalReference function, int num_arguments);
1031 void CallCFunction(Register function, int num_arguments);
1032
1033 // Calculate the number of stack slots to reserve for arguments when calling a
1034 // C function.
1035 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001036
1037 // ---------------------------------------------------------------------------
1038 // Utilities
1039
1040 void Ret();
1041
Steve Block1e0659c2011-05-24 12:43:12 +01001042 // Return and drop arguments from stack, where the number of arguments
1043 // may be bigger than 2^16 - 1. Requires a scratch register.
1044 void Ret(int bytes_dropped, Register scratch);
1045
Ben Murdoch8b112d22011-06-08 16:22:53 +01001046 Handle<Object> CodeObject() {
1047 ASSERT(!code_object_.is_null());
1048 return code_object_;
1049 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001050
Steve Block44f0eee2011-05-26 01:26:41 +01001051 // Copy length bytes from source to destination.
1052 // Uses scratch register internally (if you have a low-eight register
1053 // free, do use it, otherwise kScratchRegister will be used).
1054 // The min_length is a minimum limit on the value that length will have.
1055 // The algorithm has some special cases that might be omitted if the string
1056 // is known to always be long.
1057 void CopyBytes(Register destination,
1058 Register source,
1059 Register length,
1060 int min_length = 0,
1061 Register scratch = kScratchRegister);
1062
Steve Blocka7e24c12009-10-30 11:49:00 +00001063
1064 // ---------------------------------------------------------------------------
1065 // StatsCounter support
1066
1067 void SetCounter(StatsCounter* counter, int value);
1068 void IncrementCounter(StatsCounter* counter, int value);
1069 void DecrementCounter(StatsCounter* counter, int value);
1070
1071
1072 // ---------------------------------------------------------------------------
1073 // Debugging
1074
1075 // Calls Abort(msg) if the condition cc is not satisfied.
1076 // Use --debug_code to enable.
1077 void Assert(Condition cc, const char* msg);
1078
Iain Merrick75681382010-08-19 15:07:18 +01001079 void AssertFastElements(Register elements);
1080
Steve Blocka7e24c12009-10-30 11:49:00 +00001081 // Like Assert(), but always enabled.
1082 void Check(Condition cc, const char* msg);
1083
1084 // Print a message to stdout and abort execution.
1085 void Abort(const char* msg);
1086
Steve Block6ded16b2010-05-10 14:33:55 +01001087 // Check that the stack is aligned.
1088 void CheckStackAlignment();
1089
Steve Blocka7e24c12009-10-30 11:49:00 +00001090 // Verify restrictions about code generated in stubs.
1091 void set_generating_stub(bool value) { generating_stub_ = value; }
1092 bool generating_stub() { return generating_stub_; }
1093 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
1094 bool allow_stub_calls() { return allow_stub_calls_; }
1095
Ben Murdoch8b112d22011-06-08 16:22:53 +01001096 static int SafepointRegisterStackIndex(Register reg) {
1097 return SafepointRegisterStackIndex(reg.code());
1098 }
1099
Steve Blocka7e24c12009-10-30 11:49:00 +00001100 private:
Steve Block1e0659c2011-05-24 12:43:12 +01001101 // Order general registers are pushed by Pushad.
Steve Block44f0eee2011-05-26 01:26:41 +01001102 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
Steve Block1e0659c2011-05-24 12:43:12 +01001103 static int kSafepointPushRegisterIndices[Register::kNumRegisters];
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001104 static const int kNumSafepointSavedRegisters = 11;
1105
Steve Blocka7e24c12009-10-30 11:49:00 +00001106 bool generating_stub_;
1107 bool allow_stub_calls_;
Steve Block44f0eee2011-05-26 01:26:41 +01001108 bool root_array_available_;
Steve Block8defd9f2010-07-08 12:39:36 +01001109
1110 // Returns a register holding the smi value. The register MUST NOT be
1111 // modified. It may be the "smi 1 constant" register.
1112 Register GetSmiConstant(Smi* value);
1113
1114 // Moves the smi value to the destination register.
1115 void LoadSmiConstant(Register dst, Smi* value);
1116
Andrei Popescu31002712010-02-23 13:46:05 +00001117 // This handle will be patched with the code object on installation.
1118 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001119
1120 // Helper functions for generating invokes.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001121 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +00001122 void InvokePrologue(const ParameterCount& expected,
1123 const ParameterCount& actual,
1124 Handle<Code> code_constant,
1125 Register code_register,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001126 LabelType* done,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001127 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01001128 CallWrapper* call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001129
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 // Activation support.
1131 void EnterFrame(StackFrame::Type type);
1132 void LeaveFrame(StackFrame::Type type);
1133
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001134 void EnterExitFramePrologue(bool save_rax);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001135
1136 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
1137 // accessible via StackSpaceOperand.
Steve Block1e0659c2011-05-24 12:43:12 +01001138 void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001139
1140 void LeaveExitFrameEpilogue();
Ben Murdochbb769b22010-08-11 14:56:33 +01001141
Steve Blocka7e24c12009-10-30 11:49:00 +00001142 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +01001143 // Loads the top of new-space into the result register.
Steve Block6ded16b2010-05-10 14:33:55 +01001144 // Otherwise the address of the new-space top is loaded into scratch (if
1145 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +00001146 void LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +00001147 Register scratch,
1148 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +01001149 // Update allocation top with value in result_end register.
1150 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +00001151 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +01001152
1153 // Helper for PopHandleScope. Allowed to perform a GC and returns
1154 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
1155 // possibly returns a failure object indicating an allocation failure.
1156 Object* PopHandleScopeHelper(Register saved,
1157 Register scratch,
1158 bool gc_allowed);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001159
1160
1161 // Compute memory operands for safepoint stack slots.
1162 Operand SafepointRegisterSlot(Register reg);
1163 static int SafepointRegisterStackIndex(int reg_code) {
1164 return kNumSafepointRegisters - kSafepointPushRegisterIndices[reg_code] - 1;
1165 }
1166
1167 // Needs access to SafepointRegisterStackIndex for optimized frame
1168 // traversal.
1169 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +00001170};
1171
1172
1173// The code patcher is used to patch (typically) small parts of code e.g. for
1174// debugging and other types of instrumentation. When using the code patcher
1175// the exact number of bytes specified must be emitted. Is not legal to emit
1176// relocation information. If any of these constraints are violated it causes
1177// an assertion.
1178class CodePatcher {
1179 public:
1180 CodePatcher(byte* address, int size);
1181 virtual ~CodePatcher();
1182
1183 // Macro assembler to emit code.
1184 MacroAssembler* masm() { return &masm_; }
1185
1186 private:
1187 byte* address_; // The address of the code being patched.
1188 int size_; // Number of bytes of the expected patch size.
1189 MacroAssembler masm_; // Macro assembler used to generate the code.
1190};
1191
1192
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001193// Helper class for generating code or data associated with the code
Steve Block44f0eee2011-05-26 01:26:41 +01001194// right before or after a call instruction. As an example this can be used to
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001195// generate safepoint data after calls for crankshaft.
Steve Block44f0eee2011-05-26 01:26:41 +01001196class CallWrapper {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001197 public:
Steve Block44f0eee2011-05-26 01:26:41 +01001198 CallWrapper() { }
1199 virtual ~CallWrapper() { }
1200 // Called just before emitting a call. Argument is the size of the generated
1201 // call code.
1202 virtual void BeforeCall(int call_size) = 0;
1203 // Called just after emitting a call, i.e., at the return site for the call.
1204 virtual void AfterCall() = 0;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001205};
1206
1207
Steve Blocka7e24c12009-10-30 11:49:00 +00001208// -----------------------------------------------------------------------------
1209// Static helper functions.
1210
1211// Generate an Operand for loading a field from an object.
1212static inline Operand FieldOperand(Register object, int offset) {
1213 return Operand(object, offset - kHeapObjectTag);
1214}
1215
1216
1217// Generate an Operand for loading an indexed field from an object.
1218static inline Operand FieldOperand(Register object,
1219 Register index,
1220 ScaleFactor scale,
1221 int offset) {
1222 return Operand(object, index, scale, offset - kHeapObjectTag);
1223}
1224
1225
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001226static inline Operand ContextOperand(Register context, int index) {
1227 return Operand(context, Context::SlotOffset(index));
1228}
1229
1230
1231static inline Operand GlobalObjectOperand() {
1232 return ContextOperand(rsi, Context::GLOBAL_INDEX);
1233}
1234
1235
1236// Provides access to exit frame stack space (not GCed).
1237static inline Operand StackSpaceOperand(int index) {
1238#ifdef _WIN64
1239 const int kShaddowSpace = 4;
1240 return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
1241#else
1242 return Operand(rsp, index * kPointerSize);
1243#endif
1244}
1245
1246
1247
Steve Blocka7e24c12009-10-30 11:49:00 +00001248#ifdef GENERATED_CODE_COVERAGE
1249extern void LogGeneratedCodeCoverage(const char* file_line);
1250#define CODE_COVERAGE_STRINGIFY(x) #x
1251#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
1252#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
1253#define ACCESS_MASM(masm) { \
1254 byte* x64_coverage_function = \
1255 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
1256 masm->pushfd(); \
1257 masm->pushad(); \
1258 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
1259 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1260 masm->pop(rax); \
1261 masm->popad(); \
1262 masm->popfd(); \
1263 } \
1264 masm->
1265#else
1266#define ACCESS_MASM(masm) masm->
1267#endif
1268
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001269// -----------------------------------------------------------------------------
1270// Template implementations.
1271
1272static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1273
1274
1275template <typename LabelType>
1276void MacroAssembler::SmiNeg(Register dst,
1277 Register src,
1278 LabelType* on_smi_result) {
1279 if (dst.is(src)) {
1280 ASSERT(!dst.is(kScratchRegister));
1281 movq(kScratchRegister, src);
1282 neg(dst); // Low 32 bits are retained as zero by negation.
1283 // Test if result is zero or Smi::kMinValue.
1284 cmpq(dst, kScratchRegister);
1285 j(not_equal, on_smi_result);
1286 movq(src, kScratchRegister);
1287 } else {
1288 movq(dst, src);
1289 neg(dst);
1290 cmpq(dst, src);
1291 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1292 j(not_equal, on_smi_result);
1293 }
1294}
1295
1296
1297template <typename LabelType>
1298void MacroAssembler::SmiAdd(Register dst,
1299 Register src1,
1300 Register src2,
1301 LabelType* on_not_smi_result) {
1302 ASSERT_NOT_NULL(on_not_smi_result);
1303 ASSERT(!dst.is(src2));
1304 if (dst.is(src1)) {
1305 movq(kScratchRegister, src1);
1306 addq(kScratchRegister, src2);
1307 j(overflow, on_not_smi_result);
1308 movq(dst, kScratchRegister);
1309 } else {
1310 movq(dst, src1);
1311 addq(dst, src2);
1312 j(overflow, on_not_smi_result);
1313 }
1314}
1315
1316
1317template <typename LabelType>
Steve Block44f0eee2011-05-26 01:26:41 +01001318void MacroAssembler::SmiAdd(Register dst,
1319 Register src1,
1320 const Operand& src2,
1321 LabelType* on_not_smi_result) {
1322 ASSERT_NOT_NULL(on_not_smi_result);
1323 if (dst.is(src1)) {
1324 movq(kScratchRegister, src1);
1325 addq(kScratchRegister, src2);
1326 j(overflow, on_not_smi_result);
1327 movq(dst, kScratchRegister);
1328 } else {
1329 ASSERT(!src2.AddressUsesRegister(dst));
1330 movq(dst, src1);
1331 addq(dst, src2);
1332 j(overflow, on_not_smi_result);
1333 }
1334}
1335
1336
1337template <typename LabelType>
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001338void MacroAssembler::SmiSub(Register dst,
1339 Register src1,
1340 Register src2,
1341 LabelType* on_not_smi_result) {
1342 ASSERT_NOT_NULL(on_not_smi_result);
1343 ASSERT(!dst.is(src2));
1344 if (dst.is(src1)) {
1345 cmpq(dst, src2);
1346 j(overflow, on_not_smi_result);
1347 subq(dst, src2);
1348 } else {
1349 movq(dst, src1);
1350 subq(dst, src2);
1351 j(overflow, on_not_smi_result);
1352 }
1353}
1354
1355
1356template <typename LabelType>
1357void MacroAssembler::SmiSub(Register dst,
1358 Register src1,
1359 const Operand& src2,
1360 LabelType* on_not_smi_result) {
1361 ASSERT_NOT_NULL(on_not_smi_result);
1362 if (dst.is(src1)) {
1363 movq(kScratchRegister, src2);
1364 cmpq(src1, kScratchRegister);
1365 j(overflow, on_not_smi_result);
1366 subq(src1, kScratchRegister);
1367 } else {
1368 movq(dst, src1);
1369 subq(dst, src2);
1370 j(overflow, on_not_smi_result);
1371 }
1372}
1373
1374
1375template <typename LabelType>
1376void MacroAssembler::SmiMul(Register dst,
1377 Register src1,
1378 Register src2,
1379 LabelType* on_not_smi_result) {
1380 ASSERT(!dst.is(src2));
1381 ASSERT(!dst.is(kScratchRegister));
1382 ASSERT(!src1.is(kScratchRegister));
1383 ASSERT(!src2.is(kScratchRegister));
1384
1385 if (dst.is(src1)) {
1386 NearLabel failure, zero_correct_result;
1387 movq(kScratchRegister, src1); // Create backup for later testing.
1388 SmiToInteger64(dst, src1);
1389 imul(dst, src2);
1390 j(overflow, &failure);
1391
1392 // Check for negative zero result. If product is zero, and one
1393 // argument is negative, go to slow case.
1394 NearLabel correct_result;
1395 testq(dst, dst);
1396 j(not_zero, &correct_result);
1397
1398 movq(dst, kScratchRegister);
1399 xor_(dst, src2);
1400 j(positive, &zero_correct_result); // Result was positive zero.
1401
1402 bind(&failure); // Reused failure exit, restores src1.
1403 movq(src1, kScratchRegister);
1404 jmp(on_not_smi_result);
1405
1406 bind(&zero_correct_result);
Steve Block9fac8402011-05-12 15:51:54 +01001407 Set(dst, 0);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001408
1409 bind(&correct_result);
1410 } else {
1411 SmiToInteger64(dst, src1);
1412 imul(dst, src2);
1413 j(overflow, on_not_smi_result);
1414 // Check for negative zero result. If product is zero, and one
1415 // argument is negative, go to slow case.
1416 NearLabel correct_result;
1417 testq(dst, dst);
1418 j(not_zero, &correct_result);
1419 // One of src1 and src2 is zero, the check whether the other is
1420 // negative.
1421 movq(kScratchRegister, src1);
1422 xor_(kScratchRegister, src2);
1423 j(negative, on_not_smi_result);
1424 bind(&correct_result);
1425 }
1426}
1427
1428
1429template <typename LabelType>
1430void MacroAssembler::SmiTryAddConstant(Register dst,
1431 Register src,
1432 Smi* constant,
1433 LabelType* on_not_smi_result) {
1434 // Does not assume that src is a smi.
1435 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1436 ASSERT_EQ(0, kSmiTag);
1437 ASSERT(!dst.is(kScratchRegister));
1438 ASSERT(!src.is(kScratchRegister));
1439
1440 JumpIfNotSmi(src, on_not_smi_result);
1441 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1442 LoadSmiConstant(tmp, constant);
1443 addq(tmp, src);
1444 j(overflow, on_not_smi_result);
1445 if (dst.is(src)) {
1446 movq(dst, tmp);
1447 }
1448}
1449
1450
1451template <typename LabelType>
1452void MacroAssembler::SmiAddConstant(Register dst,
1453 Register src,
1454 Smi* constant,
1455 LabelType* on_not_smi_result) {
1456 if (constant->value() == 0) {
1457 if (!dst.is(src)) {
1458 movq(dst, src);
1459 }
1460 } else if (dst.is(src)) {
1461 ASSERT(!dst.is(kScratchRegister));
1462
1463 LoadSmiConstant(kScratchRegister, constant);
1464 addq(kScratchRegister, src);
1465 j(overflow, on_not_smi_result);
1466 movq(dst, kScratchRegister);
1467 } else {
1468 LoadSmiConstant(dst, constant);
1469 addq(dst, src);
1470 j(overflow, on_not_smi_result);
1471 }
1472}
1473
1474
1475template <typename LabelType>
1476void MacroAssembler::SmiSubConstant(Register dst,
1477 Register src,
1478 Smi* constant,
1479 LabelType* on_not_smi_result) {
1480 if (constant->value() == 0) {
1481 if (!dst.is(src)) {
1482 movq(dst, src);
1483 }
1484 } else if (dst.is(src)) {
1485 ASSERT(!dst.is(kScratchRegister));
1486 if (constant->value() == Smi::kMinValue) {
1487 // Subtracting min-value from any non-negative value will overflow.
1488 // We test the non-negativeness before doing the subtraction.
1489 testq(src, src);
1490 j(not_sign, on_not_smi_result);
1491 LoadSmiConstant(kScratchRegister, constant);
1492 subq(dst, kScratchRegister);
1493 } else {
1494 // Subtract by adding the negation.
1495 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1496 addq(kScratchRegister, dst);
1497 j(overflow, on_not_smi_result);
1498 movq(dst, kScratchRegister);
1499 }
1500 } else {
1501 if (constant->value() == Smi::kMinValue) {
1502 // Subtracting min-value from any non-negative value will overflow.
1503 // We test the non-negativeness before doing the subtraction.
1504 testq(src, src);
1505 j(not_sign, on_not_smi_result);
1506 LoadSmiConstant(dst, constant);
1507 // Adding and subtracting the min-value gives the same result, it only
1508 // differs on the overflow bit, which we don't check here.
1509 addq(dst, src);
1510 } else {
1511 // Subtract by adding the negation.
1512 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1513 addq(dst, src);
1514 j(overflow, on_not_smi_result);
1515 }
1516 }
1517}
1518
1519
1520template <typename LabelType>
1521void MacroAssembler::SmiDiv(Register dst,
1522 Register src1,
1523 Register src2,
1524 LabelType* on_not_smi_result) {
1525 ASSERT(!src1.is(kScratchRegister));
1526 ASSERT(!src2.is(kScratchRegister));
1527 ASSERT(!dst.is(kScratchRegister));
1528 ASSERT(!src2.is(rax));
1529 ASSERT(!src2.is(rdx));
1530 ASSERT(!src1.is(rdx));
1531
1532 // Check for 0 divisor (result is +/-Infinity).
1533 NearLabel positive_divisor;
1534 testq(src2, src2);
1535 j(zero, on_not_smi_result);
1536
1537 if (src1.is(rax)) {
1538 movq(kScratchRegister, src1);
1539 }
1540 SmiToInteger32(rax, src1);
1541 // We need to rule out dividing Smi::kMinValue by -1, since that would
1542 // overflow in idiv and raise an exception.
1543 // We combine this with negative zero test (negative zero only happens
1544 // when dividing zero by a negative number).
1545
1546 // We overshoot a little and go to slow case if we divide min-value
1547 // by any negative value, not just -1.
1548 NearLabel safe_div;
1549 testl(rax, Immediate(0x7fffffff));
1550 j(not_zero, &safe_div);
1551 testq(src2, src2);
1552 if (src1.is(rax)) {
1553 j(positive, &safe_div);
1554 movq(src1, kScratchRegister);
1555 jmp(on_not_smi_result);
1556 } else {
1557 j(negative, on_not_smi_result);
1558 }
1559 bind(&safe_div);
1560
1561 SmiToInteger32(src2, src2);
1562 // Sign extend src1 into edx:eax.
1563 cdq();
1564 idivl(src2);
1565 Integer32ToSmi(src2, src2);
1566 // Check that the remainder is zero.
1567 testl(rdx, rdx);
1568 if (src1.is(rax)) {
1569 NearLabel smi_result;
1570 j(zero, &smi_result);
1571 movq(src1, kScratchRegister);
1572 jmp(on_not_smi_result);
1573 bind(&smi_result);
1574 } else {
1575 j(not_zero, on_not_smi_result);
1576 }
1577 if (!dst.is(src1) && src1.is(rax)) {
1578 movq(src1, kScratchRegister);
1579 }
1580 Integer32ToSmi(dst, rax);
1581}
1582
1583
1584template <typename LabelType>
1585void MacroAssembler::SmiMod(Register dst,
1586 Register src1,
1587 Register src2,
1588 LabelType* on_not_smi_result) {
1589 ASSERT(!dst.is(kScratchRegister));
1590 ASSERT(!src1.is(kScratchRegister));
1591 ASSERT(!src2.is(kScratchRegister));
1592 ASSERT(!src2.is(rax));
1593 ASSERT(!src2.is(rdx));
1594 ASSERT(!src1.is(rdx));
1595 ASSERT(!src1.is(src2));
1596
1597 testq(src2, src2);
1598 j(zero, on_not_smi_result);
1599
1600 if (src1.is(rax)) {
1601 movq(kScratchRegister, src1);
1602 }
1603 SmiToInteger32(rax, src1);
1604 SmiToInteger32(src2, src2);
1605
1606 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1607 NearLabel safe_div;
1608 cmpl(rax, Immediate(Smi::kMinValue));
1609 j(not_equal, &safe_div);
1610 cmpl(src2, Immediate(-1));
1611 j(not_equal, &safe_div);
1612 // Retag inputs and go slow case.
1613 Integer32ToSmi(src2, src2);
1614 if (src1.is(rax)) {
1615 movq(src1, kScratchRegister);
1616 }
1617 jmp(on_not_smi_result);
1618 bind(&safe_div);
1619
1620 // Sign extend eax into edx:eax.
1621 cdq();
1622 idivl(src2);
1623 // Restore smi tags on inputs.
1624 Integer32ToSmi(src2, src2);
1625 if (src1.is(rax)) {
1626 movq(src1, kScratchRegister);
1627 }
1628 // Check for a negative zero result. If the result is zero, and the
1629 // dividend is negative, go slow to return a floating point negative zero.
1630 NearLabel smi_result;
1631 testl(rdx, rdx);
1632 j(not_zero, &smi_result);
1633 testq(src1, src1);
1634 j(negative, on_not_smi_result);
1635 bind(&smi_result);
1636 Integer32ToSmi(dst, rdx);
1637}
1638
1639
1640template <typename LabelType>
1641void MacroAssembler::SmiShiftLogicalRightConstant(
1642 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1643 // Logic right shift interprets its result as an *unsigned* number.
1644 if (dst.is(src)) {
1645 UNIMPLEMENTED(); // Not used.
1646 } else {
1647 movq(dst, src);
1648 if (shift_value == 0) {
1649 testq(dst, dst);
1650 j(negative, on_not_smi_result);
1651 }
1652 shr(dst, Immediate(shift_value + kSmiShift));
1653 shl(dst, Immediate(kSmiShift));
1654 }
1655}
1656
1657
1658template <typename LabelType>
1659void MacroAssembler::SmiShiftLogicalRight(Register dst,
1660 Register src1,
1661 Register src2,
1662 LabelType* on_not_smi_result) {
1663 ASSERT(!dst.is(kScratchRegister));
1664 ASSERT(!src1.is(kScratchRegister));
1665 ASSERT(!src2.is(kScratchRegister));
1666 ASSERT(!dst.is(rcx));
Steve Block1e0659c2011-05-24 12:43:12 +01001667 // dst and src1 can be the same, because the one case that bails out
1668 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001669 NearLabel result_ok;
1670 if (src1.is(rcx) || src2.is(rcx)) {
1671 movq(kScratchRegister, rcx);
1672 }
1673 if (!dst.is(src1)) {
1674 movq(dst, src1);
1675 }
1676 SmiToInteger32(rcx, src2);
1677 orl(rcx, Immediate(kSmiShift));
1678 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1679 shl(dst, Immediate(kSmiShift));
1680 testq(dst, dst);
1681 if (src1.is(rcx) || src2.is(rcx)) {
1682 NearLabel positive_result;
1683 j(positive, &positive_result);
1684 if (src1.is(rcx)) {
1685 movq(src1, kScratchRegister);
1686 } else {
1687 movq(src2, kScratchRegister);
1688 }
1689 jmp(on_not_smi_result);
1690 bind(&positive_result);
1691 } else {
1692 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1693 }
1694}
1695
1696
1697template <typename LabelType>
1698void MacroAssembler::SelectNonSmi(Register dst,
1699 Register src1,
1700 Register src2,
1701 LabelType* on_not_smis) {
1702 ASSERT(!dst.is(kScratchRegister));
1703 ASSERT(!src1.is(kScratchRegister));
1704 ASSERT(!src2.is(kScratchRegister));
1705 ASSERT(!dst.is(src1));
1706 ASSERT(!dst.is(src2));
1707 // Both operands must not be smis.
1708#ifdef DEBUG
1709 if (allow_stub_calls()) { // Check contains a stub call.
1710 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1711 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1712 }
1713#endif
1714 ASSERT_EQ(0, kSmiTag);
1715 ASSERT_EQ(0, Smi::FromInt(0));
1716 movl(kScratchRegister, Immediate(kSmiTagMask));
1717 and_(kScratchRegister, src1);
1718 testl(kScratchRegister, src2);
1719 // If non-zero then both are smis.
1720 j(not_zero, on_not_smis);
1721
1722 // Exactly one operand is a smi.
1723 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1724 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1725 subq(kScratchRegister, Immediate(1));
1726 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1727 movq(dst, src1);
1728 xor_(dst, src2);
1729 and_(dst, kScratchRegister);
1730 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1731 xor_(dst, src1);
1732 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1733}
1734
1735
1736template <typename LabelType>
1737void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1738 ASSERT_EQ(0, kSmiTag);
1739 Condition smi = CheckSmi(src);
1740 j(smi, on_smi);
1741}
1742
1743
1744template <typename LabelType>
1745void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1746 Condition smi = CheckSmi(src);
1747 j(NegateCondition(smi), on_not_smi);
1748}
1749
1750
1751template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001752void MacroAssembler::JumpUnlessNonNegativeSmi(
1753 Register src, LabelType* on_not_smi_or_negative) {
1754 Condition non_negative_smi = CheckNonNegativeSmi(src);
1755 j(NegateCondition(non_negative_smi), on_not_smi_or_negative);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001756}
1757
1758
1759template <typename LabelType>
1760void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1761 Smi* constant,
1762 LabelType* on_equals) {
1763 SmiCompare(src, constant);
1764 j(equal, on_equals);
1765}
1766
1767
1768template <typename LabelType>
1769void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1770 LabelType* on_invalid) {
1771 Condition is_valid = CheckInteger32ValidSmiValue(src);
1772 j(NegateCondition(is_valid), on_invalid);
1773}
1774
1775
1776template <typename LabelType>
1777void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1778 LabelType* on_invalid) {
1779 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1780 j(NegateCondition(is_valid), on_invalid);
1781}
1782
1783
1784template <typename LabelType>
1785void MacroAssembler::JumpIfNotBothSmi(Register src1,
1786 Register src2,
1787 LabelType* on_not_both_smi) {
1788 Condition both_smi = CheckBothSmi(src1, src2);
1789 j(NegateCondition(both_smi), on_not_both_smi);
1790}
1791
1792
1793template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001794void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1795 Register src2,
1796 LabelType* on_not_both_smi) {
1797 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001798 j(NegateCondition(both_smi), on_not_both_smi);
1799}
1800
1801
1802template <typename LabelType>
Ben Murdoch8b112d22011-06-08 16:22:53 +01001803void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1804 LabelType* on_not_smis) {
1805 if (dst.is(src1) || dst.is(src2)) {
1806 ASSERT(!src1.is(kScratchRegister));
1807 ASSERT(!src2.is(kScratchRegister));
1808 movq(kScratchRegister, src1);
1809 or_(kScratchRegister, src2);
1810 JumpIfNotSmi(kScratchRegister, on_not_smis);
1811 movq(dst, kScratchRegister);
1812 } else {
1813 movq(dst, src1);
1814 or_(dst, src2);
1815 JumpIfNotSmi(dst, on_not_smis);
1816 }
1817}
1818
1819
1820template <typename LabelType>
Steve Block1e0659c2011-05-24 12:43:12 +01001821void MacroAssembler::JumpIfNotString(Register object,
1822 Register object_map,
1823 LabelType* not_string) {
1824 Condition is_smi = CheckSmi(object);
1825 j(is_smi, not_string);
1826 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
1827 j(above_equal, not_string);
1828}
1829
1830
1831template <typename LabelType>
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001832void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1833 Register second_object,
1834 Register scratch1,
1835 Register scratch2,
1836 LabelType* on_fail) {
1837 // Check that both objects are not smis.
1838 Condition either_smi = CheckEitherSmi(first_object, second_object);
1839 j(either_smi, on_fail);
1840
1841 // Load instance type for both strings.
1842 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1843 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1844 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1845 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1846
1847 // Check that both are flat ascii strings.
1848 ASSERT(kNotStringTag != 0);
1849 const int kFlatAsciiStringMask =
1850 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1851 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1852
1853 andl(scratch1, Immediate(kFlatAsciiStringMask));
1854 andl(scratch2, Immediate(kFlatAsciiStringMask));
1855 // Interleave the bits to check both scratch1 and scratch2 in one test.
1856 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1857 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1858 cmpl(scratch1,
1859 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1860 j(not_equal, on_fail);
1861}
1862
1863
1864template <typename LabelType>
1865void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1866 Register instance_type,
1867 Register scratch,
1868 LabelType *failure) {
1869 if (!scratch.is(instance_type)) {
1870 movl(scratch, instance_type);
1871 }
1872
1873 const int kFlatAsciiStringMask =
1874 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1875
1876 andl(scratch, Immediate(kFlatAsciiStringMask));
1877 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1878 j(not_equal, failure);
1879}
1880
1881
1882template <typename LabelType>
1883void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1884 Register first_object_instance_type,
1885 Register second_object_instance_type,
1886 Register scratch1,
1887 Register scratch2,
1888 LabelType* on_fail) {
1889 // Load instance type for both strings.
1890 movq(scratch1, first_object_instance_type);
1891 movq(scratch2, second_object_instance_type);
1892
1893 // Check that both are flat ascii strings.
1894 ASSERT(kNotStringTag != 0);
1895 const int kFlatAsciiStringMask =
1896 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1897 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1898
1899 andl(scratch1, Immediate(kFlatAsciiStringMask));
1900 andl(scratch2, Immediate(kFlatAsciiStringMask));
1901 // Interleave the bits to check both scratch1 and scratch2 in one test.
1902 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1903 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1904 cmpl(scratch1,
1905 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1906 j(not_equal, on_fail);
1907}
1908
1909
1910template <typename LabelType>
1911void MacroAssembler::InNewSpace(Register object,
1912 Register scratch,
1913 Condition cc,
1914 LabelType* branch) {
1915 if (Serializer::enabled()) {
1916 // Can't do arithmetic on external references if it might get serialized.
1917 // The mask isn't really an address. We load it as an external reference in
1918 // case the size of the new space is different between the snapshot maker
1919 // and the running system.
1920 if (scratch.is(object)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001921 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001922 and_(scratch, kScratchRegister);
1923 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001924 movq(scratch, ExternalReference::new_space_mask(isolate()));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001925 and_(scratch, object);
1926 }
Steve Block44f0eee2011-05-26 01:26:41 +01001927 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001928 cmpq(scratch, kScratchRegister);
1929 j(cc, branch);
1930 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001931 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001932 intptr_t new_space_start =
Steve Block44f0eee2011-05-26 01:26:41 +01001933 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001934 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1935 if (scratch.is(object)) {
1936 addq(scratch, kScratchRegister);
1937 } else {
1938 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1939 }
Steve Block44f0eee2011-05-26 01:26:41 +01001940 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001941 j(cc, branch);
1942 }
1943}
1944
1945
1946template <typename LabelType>
1947void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1948 const ParameterCount& actual,
1949 Handle<Code> code_constant,
1950 Register code_register,
1951 LabelType* done,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001952 InvokeFlag flag,
Steve Block44f0eee2011-05-26 01:26:41 +01001953 CallWrapper* call_wrapper) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001954 bool definitely_matches = false;
1955 NearLabel invoke;
1956 if (expected.is_immediate()) {
1957 ASSERT(actual.is_immediate());
1958 if (expected.immediate() == actual.immediate()) {
1959 definitely_matches = true;
1960 } else {
1961 Set(rax, actual.immediate());
1962 if (expected.immediate() ==
1963 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1964 // Don't worry about adapting arguments for built-ins that
1965 // don't want that done. Skip adaption code by making it look
1966 // like we have a match between expected and actual number of
1967 // arguments.
1968 definitely_matches = true;
1969 } else {
1970 Set(rbx, expected.immediate());
1971 }
1972 }
1973 } else {
1974 if (actual.is_immediate()) {
1975 // Expected is in register, actual is immediate. This is the
1976 // case when we invoke function values without going through the
1977 // IC mechanism.
1978 cmpq(expected.reg(), Immediate(actual.immediate()));
1979 j(equal, &invoke);
1980 ASSERT(expected.reg().is(rbx));
1981 Set(rax, actual.immediate());
1982 } else if (!expected.reg().is(actual.reg())) {
1983 // Both expected and actual are in (different) registers. This
1984 // is the case when we invoke functions using call and apply.
1985 cmpq(expected.reg(), actual.reg());
1986 j(equal, &invoke);
1987 ASSERT(actual.reg().is(rax));
1988 ASSERT(expected.reg().is(rbx));
1989 }
1990 }
1991
1992 if (!definitely_matches) {
Steve Block44f0eee2011-05-26 01:26:41 +01001993 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001994 if (!code_constant.is_null()) {
1995 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1996 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1997 } else if (!code_register.is(rdx)) {
1998 movq(rdx, code_register);
1999 }
2000
2001 if (flag == CALL_FUNCTION) {
Steve Block44f0eee2011-05-26 01:26:41 +01002002 if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(adaptor));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002003 Call(adaptor, RelocInfo::CODE_TARGET);
Steve Block44f0eee2011-05-26 01:26:41 +01002004 if (call_wrapper != NULL) call_wrapper->AfterCall();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002005 jmp(done);
2006 } else {
2007 Jump(adaptor, RelocInfo::CODE_TARGET);
2008 }
2009 bind(&invoke);
2010 }
2011}
2012
Steve Blocka7e24c12009-10-30 11:49:00 +00002013
2014} } // namespace v8::internal
2015
2016#endif // V8_X64_MACRO_ASSEMBLER_X64_H_