blob: 2f6e9561653225bc51a085b7464aaef77a564f66 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Steve Block8defd9f2010-07-08 12:39:36 +010050static const Register kScratchRegister = { 10 }; // r10.
51static const Register kSmiConstantRegister = { 15 }; // r15 (callee save).
52static const Register kRootRegister = { 13 }; // r13 (callee save).
53// Value of smi in kSmiConstantRegister.
54static const int kSmiConstantRegisterValue = 1;
Steve Blocka7e24c12009-10-30 11:49:00 +000055
Leon Clarkee46be812010-01-19 14:06:41 +000056// Convenience for platform-independent signatures.
57typedef Operand MemOperand;
58
Steve Blocka7e24c12009-10-30 11:49:00 +000059// Forward declaration.
60class JumpTarget;
61
62struct SmiIndex {
63 SmiIndex(Register index_register, ScaleFactor scale)
64 : reg(index_register),
65 scale(scale) {}
66 Register reg;
67 ScaleFactor scale;
68};
69
70// MacroAssembler implements a collection of frequently used macros.
71class MacroAssembler: public Assembler {
72 public:
73 MacroAssembler(void* buffer, int size);
74
75 void LoadRoot(Register destination, Heap::RootListIndex index);
76 void CompareRoot(Register with, Heap::RootListIndex index);
77 void CompareRoot(Operand with, Heap::RootListIndex index);
78 void PushRoot(Heap::RootListIndex index);
Kristian Monsen25f61362010-05-21 11:50:48 +010079 void StoreRoot(Register source, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +000080
81 // ---------------------------------------------------------------------------
82 // GC Support
83
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010084 // For page containing |object| mark region covering |addr| dirty.
85 // RecordWriteHelper only works if the object is not in new
Steve Block6ded16b2010-05-10 14:33:55 +010086 // space.
87 void RecordWriteHelper(Register object,
88 Register addr,
89 Register scratch);
90
91 // Check if object is in new space. The condition cc can be equal or
92 // not_equal. If it is equal a jump will be done if the object is on new
93 // space. The register scratch can be object itself, but it will be clobbered.
Kristian Monsen0d5e1162010-09-30 15:31:59 +010094 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +010095 void InNewSpace(Register object,
96 Register scratch,
97 Condition cc,
Kristian Monsen0d5e1162010-09-30 15:31:59 +010098 LabelType* branch);
Steve Block6ded16b2010-05-10 14:33:55 +010099
Steve Block8defd9f2010-07-08 12:39:36 +0100100 // For page containing |object| mark region covering [object+offset]
101 // dirty. |object| is the object being stored into, |value| is the
102 // object being stored. If |offset| is zero, then the |scratch|
103 // register contains the array index into the elements array
Ben Murdochf87a2032010-10-22 12:50:53 +0100104 // represented as an untagged 32-bit integer. All registers are
105 // clobbered by the operation. RecordWrite filters out smis so it
106 // does not update the write barrier if the value is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000107 void RecordWrite(Register object,
108 int offset,
109 Register value,
110 Register scratch);
111
Steve Block8defd9f2010-07-08 12:39:36 +0100112 // For page containing |object| mark region covering [address]
113 // dirty. |object| is the object being stored into, |value| is the
114 // object being stored. All registers are clobbered by the
115 // operation. RecordWrite filters out smis so it does not update
116 // the write barrier if the value is a smi.
117 void RecordWrite(Register object,
118 Register address,
119 Register value);
120
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100121 // For page containing |object| mark region covering [object+offset] dirty.
Steve Block3ce2e202009-11-05 08:53:23 +0000122 // The value is known to not be a smi.
123 // object is the object being stored into, value is the object being stored.
124 // If offset is zero, then the scratch register contains the array index into
Ben Murdochf87a2032010-10-22 12:50:53 +0100125 // the elements array represented as an untagged 32-bit integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000126 // All registers are clobbered by the operation.
127 void RecordWriteNonSmi(Register object,
128 int offset,
129 Register value,
130 Register scratch);
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132#ifdef ENABLE_DEBUGGER_SUPPORT
133 // ---------------------------------------------------------------------------
134 // Debugger Support
135
Andrei Popescu402d9372010-02-26 13:31:12 +0000136 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000137#endif
138
139 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000140 // Stack limit support
141
142 // Do simple test for stack overflow. This doesn't handle an overflow.
143 void StackLimitCheck(Label* on_stack_limit_hit);
144
145 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 // Activation frames
147
148 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
149 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
150
151 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
152 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
153
Steve Blockd0582a62009-12-15 09:54:21 +0000154 // Enter specific kind of exit frame; either in normal or
155 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000156 // sets up the number of arguments in register rdi and the pointer
157 // to the first argument in register rsi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100158 void EnterExitFrame(int result_size = 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000159
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100160 void EnterApiExitFrame(int stack_space,
Ben Murdochbb769b22010-08-11 14:56:33 +0100161 int argc,
162 int result_size = 1);
163
Steve Blocka7e24c12009-10-30 11:49:00 +0000164 // Leave the current exit frame. Expects/provides the return value in
165 // register rax:rdx (untouched) and the pointer to the first
166 // argument in register rsi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100167 void LeaveExitFrame(int result_size = 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000168
169
170 // ---------------------------------------------------------------------------
171 // JavaScript invokes
172
173 // Invoke the JavaScript function code by either calling or jumping.
174 void InvokeCode(Register code,
175 const ParameterCount& expected,
176 const ParameterCount& actual,
177 InvokeFlag flag);
178
179 void InvokeCode(Handle<Code> code,
180 const ParameterCount& expected,
181 const ParameterCount& actual,
182 RelocInfo::Mode rmode,
183 InvokeFlag flag);
184
185 // Invoke the JavaScript function in the given register. Changes the
186 // current context to the context in the function before invoking.
187 void InvokeFunction(Register function,
188 const ParameterCount& actual,
189 InvokeFlag flag);
190
Andrei Popescu402d9372010-02-26 13:31:12 +0000191 void InvokeFunction(JSFunction* function,
192 const ParameterCount& actual,
193 InvokeFlag flag);
194
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 // Invoke specified builtin JavaScript function. Adds an entry to
196 // the unresolved list if the name does not resolve.
197 void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag);
198
Steve Block791712a2010-08-27 10:21:07 +0100199 // Store the function for the given builtin in the target register.
200 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
201
Steve Blocka7e24c12009-10-30 11:49:00 +0000202 // Store the code object for the given builtin in the target register.
203 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
204
205
206 // ---------------------------------------------------------------------------
207 // Smi tagging, untagging and operations on tagged smis.
208
Steve Block8defd9f2010-07-08 12:39:36 +0100209 void InitializeSmiConstantRegister() {
210 movq(kSmiConstantRegister,
211 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
212 RelocInfo::NONE);
213 }
214
Steve Blocka7e24c12009-10-30 11:49:00 +0000215 // Conversions between tagged smi values and non-tagged integer values.
216
217 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000218 // Only uses the low 32 bits of the src register. Sets the N and Z flags
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100219 // based on the value of the resulting smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000220 void Integer32ToSmi(Register dst, Register src);
221
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100222 // Stores an integer32 value into a memory field that already holds a smi.
223 void Integer32ToSmiField(const Operand& dst, Register src);
224
Steve Blocka7e24c12009-10-30 11:49:00 +0000225 // Adds constant to src and tags the result as a smi.
226 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000227 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000228
229 // Convert smi to 32-bit integer. I.e., not sign extended into
230 // high 32 bits of destination.
231 void SmiToInteger32(Register dst, Register src);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100232 void SmiToInteger32(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000233
234 // Convert smi to 64-bit integer (sign extended if necessary).
235 void SmiToInteger64(Register dst, Register src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100236 void SmiToInteger64(Register dst, const Operand& src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000237
238 // Multiply a positive smi's integer value by a power of two.
239 // Provides result as 64-bit integer value.
240 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
241 Register src,
242 int power);
243
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100244 // Divide a positive smi's integer value by a power of two.
245 // Provides result as 32-bit integer value.
246 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
247 Register src,
248 int power);
249
250
Steve Block3ce2e202009-11-05 08:53:23 +0000251 // Simple comparison of smis.
252 void SmiCompare(Register dst, Register src);
253 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100254 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000255 void SmiCompare(const Operand& dst, Register src);
256 void SmiCompare(const Operand& dst, Smi* src);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100257 // Compare the int32 in src register to the value of the smi stored at dst.
258 void SmiCompareInteger32(const Operand& dst, Register src);
Steve Block3ce2e202009-11-05 08:53:23 +0000259 // Sets sign and zero flags depending on value of smi in register.
260 void SmiTest(Register src);
261
Steve Blocka7e24c12009-10-30 11:49:00 +0000262 // Functions performing a check on a known or potential smi. Returns
263 // a condition that is satisfied if the check is successful.
264
265 // Is the value a tagged smi.
266 Condition CheckSmi(Register src);
267
Ben Murdochf87a2032010-10-22 12:50:53 +0100268 // Is the value a non-negative tagged smi.
269 Condition CheckNonNegativeSmi(Register src);
Steve Blocka7e24c12009-10-30 11:49:00 +0000270
Leon Clarkee46be812010-01-19 14:06:41 +0000271 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 Condition CheckBothSmi(Register first, Register second);
273
Ben Murdochf87a2032010-10-22 12:50:53 +0100274 // Are both values non-negative tagged smis.
275 Condition CheckBothNonNegativeSmi(Register first, Register second);
Leon Clarked91b9f72010-01-27 17:25:45 +0000276
Leon Clarkee46be812010-01-19 14:06:41 +0000277 // Are either value a tagged smi.
Ben Murdochbb769b22010-08-11 14:56:33 +0100278 Condition CheckEitherSmi(Register first,
279 Register second,
280 Register scratch = kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +0000281
Steve Blocka7e24c12009-10-30 11:49:00 +0000282 // Is the value the minimum smi value (since we are using
283 // two's complement numbers, negating the value is known to yield
284 // a non-smi value).
285 Condition CheckIsMinSmi(Register src);
286
Steve Blocka7e24c12009-10-30 11:49:00 +0000287 // Checks whether an 32-bit integer value is a valid for conversion
288 // to a smi.
289 Condition CheckInteger32ValidSmiValue(Register src);
290
Steve Block3ce2e202009-11-05 08:53:23 +0000291 // Checks whether an 32-bit unsigned integer value is a valid for
292 // conversion to a smi.
293 Condition CheckUInteger32ValidSmiValue(Register src);
294
Steve Blocka7e24c12009-10-30 11:49:00 +0000295 // Test-and-jump functions. Typically combines a check function
296 // above with a conditional jump.
297
298 // Jump if the value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100299 template <typename LabelType>
300 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Blocka7e24c12009-10-30 11:49:00 +0000301
Steve Block3ce2e202009-11-05 08:53:23 +0000302 // Jump if the unsigned integer value cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100303 template <typename LabelType>
304 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid);
Steve Block3ce2e202009-11-05 08:53:23 +0000305
Steve Blocka7e24c12009-10-30 11:49:00 +0000306 // Jump to label if the value is a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100307 template <typename LabelType>
308 void JumpIfSmi(Register src, LabelType* on_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000309
310 // Jump to label if the value is not a tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100311 template <typename LabelType>
312 void JumpIfNotSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000313
Ben Murdochf87a2032010-10-22 12:50:53 +0100314 // Jump to label if the value is not a non-negative tagged smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100315 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100316 void JumpUnlessNonNegativeSmi(Register src, LabelType* on_not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000317
Steve Block3ce2e202009-11-05 08:53:23 +0000318 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 // to the constant.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100320 template <typename LabelType>
321 void JumpIfSmiEqualsConstant(Register src,
322 Smi* constant,
323 LabelType* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000324
325 // Jump if either or both register are not smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100326 template <typename LabelType>
327 void JumpIfNotBothSmi(Register src1,
328 Register src2,
329 LabelType* on_not_both_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000330
Ben Murdochf87a2032010-10-22 12:50:53 +0100331 // Jump if either or both register are not non-negative smi values.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100332 template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +0100333 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2,
334 LabelType* on_not_both_smi);
Leon Clarked91b9f72010-01-27 17:25:45 +0000335
Steve Blocka7e24c12009-10-30 11:49:00 +0000336 // Operations on tagged smi values.
337
338 // Smis represent a subset of integers. The subset is always equivalent to
339 // a two's complement interpretation of a fixed number of bits.
340
341 // Optimistically adds an integer constant to a supposed smi.
342 // If the src is not a smi, or the result is not a smi, jump to
343 // the label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100344 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000345 void SmiTryAddConstant(Register dst,
346 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000347 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100348 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000349
Steve Block3ce2e202009-11-05 08:53:23 +0000350 // Add an integer constant to a tagged smi, giving a tagged smi as result.
351 // No overflow testing on the result is done.
352 void SmiAddConstant(Register dst, Register src, Smi* constant);
353
Leon Clarkef7060e22010-06-03 12:02:55 +0100354 // Add an integer constant to a tagged smi, giving a tagged smi as result.
355 // No overflow testing on the result is done.
356 void SmiAddConstant(const Operand& dst, Smi* constant);
357
Steve Blocka7e24c12009-10-30 11:49:00 +0000358 // Add an integer constant to a tagged smi, giving a tagged smi as result,
359 // or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100360 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000361 void SmiAddConstant(Register dst,
362 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000363 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100364 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000365
366 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100367 // result. No testing on the result is done. Sets the N and Z flags
368 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000369 void SmiSubConstant(Register dst, Register src, Smi* constant);
370
371 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000372 // result, or jumping to a label if the result cannot be represented by a smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100373 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000374 void SmiSubConstant(Register dst,
375 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000376 Smi* constant,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100377 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000378
379 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000380 // NOTICE: This operation jumps on success, not failure!
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100381 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000382 void SmiNeg(Register dst,
383 Register src,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100384 LabelType* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000385
386 // Adds smi values and return the result as a smi.
387 // If dst is src1, then src1 will be destroyed, even if
388 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100389 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000390 void SmiAdd(Register dst,
391 Register src1,
392 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100393 LabelType* on_not_smi_result);
394
395 void SmiAdd(Register dst,
396 Register src1,
397 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000398
399 // Subtracts smi values and return the result as a smi.
400 // If dst is src1, then src1 will be destroyed, even if
401 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100402 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000403 void SmiSub(Register dst,
404 Register src1,
405 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100406 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407
Steve Block6ded16b2010-05-10 14:33:55 +0100408 void SmiSub(Register dst,
409 Register src1,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100410 Register src2);
411
412 template <typename LabelType>
413 void SmiSub(Register dst,
414 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100415 const Operand& src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100416 LabelType* on_not_smi_result);
417
418 void SmiSub(Register dst,
419 Register src1,
420 const Operand& src2);
Steve Block6ded16b2010-05-10 14:33:55 +0100421
Steve Blocka7e24c12009-10-30 11:49:00 +0000422 // Multiplies smi values and return the result as a smi,
423 // if possible.
424 // If dst is src1, then src1 will be destroyed, even if
425 // the operation is unsuccessful.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100426 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 void SmiMul(Register dst,
428 Register src1,
429 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100430 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000431
432 // Divides one smi by another and returns the quotient.
433 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100434 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000435 void SmiDiv(Register dst,
436 Register src1,
437 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100438 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000439
440 // Divides one smi by another and returns the remainder.
441 // Clobbers rax and rdx registers.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100442 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 void SmiMod(Register dst,
444 Register src1,
445 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100446 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000447
448 // Bitwise operations.
449 void SmiNot(Register dst, Register src);
450 void SmiAnd(Register dst, Register src1, Register src2);
451 void SmiOr(Register dst, Register src1, Register src2);
452 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000453 void SmiAndConstant(Register dst, Register src1, Smi* constant);
454 void SmiOrConstant(Register dst, Register src1, Smi* constant);
455 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000456
457 void SmiShiftLeftConstant(Register dst,
458 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100459 int shift_value);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100460 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000461 void SmiShiftLogicalRightConstant(Register dst,
462 Register src,
463 int shift_value,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100464 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 void SmiShiftArithmeticRightConstant(Register dst,
466 Register src,
467 int shift_value);
468
469 // Shifts a smi value to the left, and returns the result if that is a smi.
470 // Uses and clobbers rcx, so dst may not be rcx.
471 void SmiShiftLeft(Register dst,
472 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100473 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474 // Shifts a smi value to the right, shifting in zero bits at the top, and
475 // returns the unsigned intepretation of the result if that is a smi.
476 // Uses and clobbers rcx, so dst may not be rcx.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100477 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000478 void SmiShiftLogicalRight(Register dst,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100479 Register src1,
480 Register src2,
481 LabelType* on_not_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000482 // Shifts a smi value to the right, sign extending the top, and
483 // returns the signed intepretation of the result. That will always
484 // be a valid smi value, since it's numerically smaller than the
485 // original.
486 // Uses and clobbers rcx, so dst may not be rcx.
487 void SmiShiftArithmeticRight(Register dst,
488 Register src1,
489 Register src2);
490
491 // Specialized operations
492
493 // Select the non-smi register of two registers where exactly one is a
494 // smi. If neither are smis, jump to the failure label.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100495 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000496 void SelectNonSmi(Register dst,
497 Register src1,
498 Register src2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100499 LabelType* on_not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +0000500
501 // Converts, if necessary, a smi to a combination of number and
502 // multiplier to be used as a scaled index.
503 // The src register contains a *positive* smi value. The shift is the
504 // power of two to multiply the index value by (e.g.
505 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
506 // The returned index register may be either src or dst, depending
507 // on what is most efficient. If src and dst are different registers,
508 // src is always unchanged.
509 SmiIndex SmiToIndex(Register dst, Register src, int shift);
510
511 // Converts a positive smi to a negative index.
512 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
513
Steve Block3ce2e202009-11-05 08:53:23 +0000514 // Basic Smi operations.
515 void Move(Register dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100516 LoadSmiConstant(dst, source);
Steve Block3ce2e202009-11-05 08:53:23 +0000517 }
518
519 void Move(const Operand& dst, Smi* source) {
Steve Block8defd9f2010-07-08 12:39:36 +0100520 Register constant = GetSmiConstant(source);
521 movq(dst, constant);
Steve Block3ce2e202009-11-05 08:53:23 +0000522 }
523
524 void Push(Smi* smi);
525 void Test(const Operand& dst, Smi* source);
526
Steve Blocka7e24c12009-10-30 11:49:00 +0000527 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000528 // String macros.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100529 template <typename LabelType>
Leon Clarkee46be812010-01-19 14:06:41 +0000530 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
531 Register second_object,
532 Register scratch1,
533 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100534 LabelType* on_not_both_flat_ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000535
Steve Block6ded16b2010-05-10 14:33:55 +0100536 // Check whether the instance type represents a flat ascii string. Jump to the
537 // label if not. If the instance type can be scratched specify same register
538 // for both instance type and scratch.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100539 template <typename LabelType>
540 void JumpIfInstanceTypeIsNotSequentialAscii(
541 Register instance_type,
542 Register scratch,
543 LabelType *on_not_flat_ascii_string);
Steve Block6ded16b2010-05-10 14:33:55 +0100544
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100545 template <typename LabelType>
Steve Block6ded16b2010-05-10 14:33:55 +0100546 void JumpIfBothInstanceTypesAreNotSequentialAscii(
547 Register first_object_instance_type,
548 Register second_object_instance_type,
549 Register scratch1,
550 Register scratch2,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100551 LabelType* on_fail);
Steve Block6ded16b2010-05-10 14:33:55 +0100552
Leon Clarkee46be812010-01-19 14:06:41 +0000553 // ---------------------------------------------------------------------------
554 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000555
Steve Block3ce2e202009-11-05 08:53:23 +0000556 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000557 void Set(Register dst, int64_t x);
558 void Set(const Operand& dst, int64_t x);
559
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100560 // Move if the registers are not identical.
561 void Move(Register target, Register source);
562
Steve Blocka7e24c12009-10-30 11:49:00 +0000563 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 void Move(Register dst, Handle<Object> source);
565 void Move(const Operand& dst, Handle<Object> source);
566 void Cmp(Register dst, Handle<Object> source);
567 void Cmp(const Operand& dst, Handle<Object> source);
568 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000569
Leon Clarkee46be812010-01-19 14:06:41 +0000570 // Emit code to discard a non-negative number of pointer-sized elements
571 // from the stack, clobbering only the rsp register.
572 void Drop(int stack_elements);
573
574 void Call(Label* target) { call(target); }
575
Steve Blocka7e24c12009-10-30 11:49:00 +0000576 // Control Flow
577 void Jump(Address destination, RelocInfo::Mode rmode);
578 void Jump(ExternalReference ext);
579 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
580
581 void Call(Address destination, RelocInfo::Mode rmode);
582 void Call(ExternalReference ext);
583 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
584
585 // Compare object type for heap object.
586 // Always use unsigned comparisons: above and below, not less and greater.
587 // Incoming register is heap_object and outgoing register is map.
588 // They may be the same register, and may be kScratchRegister.
589 void CmpObjectType(Register heap_object, InstanceType type, Register map);
590
591 // Compare instance type for map.
592 // Always use unsigned comparisons: above and below, not less and greater.
593 void CmpInstanceType(Register map, InstanceType type);
594
Andrei Popescu31002712010-02-23 13:46:05 +0000595 // Check if the map of an object is equal to a specified map and
596 // branch to label if not. Skip the smi check if not required
597 // (object is known to be a heap object)
598 void CheckMap(Register obj,
599 Handle<Map> map,
600 Label* fail,
601 bool is_heap_object);
602
Leon Clarked91b9f72010-01-27 17:25:45 +0000603 // Check if the object in register heap_object is a string. Afterwards the
604 // register map contains the object map and the register instance_type
605 // contains the instance_type. The registers map and instance_type can be the
606 // same in which case it contains the instance type afterwards. Either of the
607 // registers map and instance_type can be the same as heap_object.
608 Condition IsObjectStringType(Register heap_object,
609 Register map,
610 Register instance_type);
611
Steve Block8defd9f2010-07-08 12:39:36 +0100612 // FCmp compares and pops the two values on top of the FPU stack.
613 // The flag results are similar to integer cmp, but requires unsigned
Steve Blocka7e24c12009-10-30 11:49:00 +0000614 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
615 void FCmp();
616
Andrei Popescu402d9372010-02-26 13:31:12 +0000617 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100618 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000619
Iain Merrick75681382010-08-19 15:07:18 +0100620 // Abort execution if argument is a smi. Used in debug code.
621 void AbortIfSmi(Register object);
622
Steve Block6ded16b2010-05-10 14:33:55 +0100623 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100624 void AbortIfNotSmi(Register object);
Steve Block6ded16b2010-05-10 14:33:55 +0100625
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100626 // Abort execution if argument is not the root value with the given index.
627 void AbortIfNotRootValue(Register src,
628 Heap::RootListIndex root_value_index,
629 const char* message);
630
Steve Blocka7e24c12009-10-30 11:49:00 +0000631 // ---------------------------------------------------------------------------
632 // Exception handling
633
634 // Push a new try handler and link into try handler chain. The return
635 // address must be pushed before calling this helper.
636 void PushTryHandler(CodeLocation try_location, HandlerType type);
637
Leon Clarkee46be812010-01-19 14:06:41 +0000638 // Unlink the stack handler on top of the stack from the try handler chain.
639 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000640
641 // ---------------------------------------------------------------------------
642 // Inline caching support
643
Steve Blocka7e24c12009-10-30 11:49:00 +0000644 // Generate code for checking access rights - used for security checks
645 // on access to global objects across environments. The holder register
646 // is left untouched, but the scratch register and kScratchRegister,
647 // which must be different, are clobbered.
648 void CheckAccessGlobalProxy(Register holder_reg,
649 Register scratch,
650 Label* miss);
651
652
653 // ---------------------------------------------------------------------------
654 // Allocation support
655
656 // Allocate an object in new space. If the new space is exhausted control
657 // continues at the gc_required label. The allocated object is returned in
658 // result and end of the new object is returned in result_end. The register
659 // scratch can be passed as no_reg in which case an additional object
660 // reference will be added to the reloc info. The returned pointers in result
661 // and result_end have not yet been tagged as heap objects. If
662 // result_contains_top_on_entry is true the content of result is known to be
663 // the allocation top on entry (could be result_end from a previous call to
664 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
665 // should be no_reg as it is never used.
666 void AllocateInNewSpace(int object_size,
667 Register result,
668 Register result_end,
669 Register scratch,
670 Label* gc_required,
671 AllocationFlags flags);
672
673 void AllocateInNewSpace(int header_size,
674 ScaleFactor element_size,
675 Register element_count,
676 Register result,
677 Register result_end,
678 Register scratch,
679 Label* gc_required,
680 AllocationFlags flags);
681
682 void AllocateInNewSpace(Register object_size,
683 Register result,
684 Register result_end,
685 Register scratch,
686 Label* gc_required,
687 AllocationFlags flags);
688
689 // Undo allocation in new space. The object passed and objects allocated after
690 // it will no longer be allocated. Make sure that no pointers are left to the
691 // object(s) no longer allocated as they would be invalid when allocation is
692 // un-done.
693 void UndoAllocationInNewSpace(Register object);
694
Steve Block3ce2e202009-11-05 08:53:23 +0000695 // Allocate a heap number in new space with undefined value. Returns
696 // tagged pointer in result register, or jumps to gc_required if new
697 // space is full.
698 void AllocateHeapNumber(Register result,
699 Register scratch,
700 Label* gc_required);
701
Leon Clarkee46be812010-01-19 14:06:41 +0000702 // Allocate a sequential string. All the header fields of the string object
703 // are initialized.
704 void AllocateTwoByteString(Register result,
705 Register length,
706 Register scratch1,
707 Register scratch2,
708 Register scratch3,
709 Label* gc_required);
710 void AllocateAsciiString(Register result,
711 Register length,
712 Register scratch1,
713 Register scratch2,
714 Register scratch3,
715 Label* gc_required);
716
717 // Allocate a raw cons string object. Only the map field of the result is
718 // initialized.
719 void AllocateConsString(Register result,
720 Register scratch1,
721 Register scratch2,
722 Label* gc_required);
723 void AllocateAsciiConsString(Register result,
724 Register scratch1,
725 Register scratch2,
726 Label* gc_required);
727
Steve Blocka7e24c12009-10-30 11:49:00 +0000728 // ---------------------------------------------------------------------------
729 // Support functions.
730
731 // Check if result is zero and op is negative.
732 void NegativeZeroTest(Register result, Register op, Label* then_label);
733
734 // Check if result is zero and op is negative in code using jump targets.
735 void NegativeZeroTest(CodeGenerator* cgen,
736 Register result,
737 Register op,
738 JumpTarget* then_target);
739
740 // Check if result is zero and any of op1 and op2 are negative.
741 // Register scratch is destroyed, and it must be different from op2.
742 void NegativeZeroTest(Register result, Register op1, Register op2,
743 Register scratch, Label* then_label);
744
745 // Try to get function prototype of a function and puts the value in
746 // the result register. Checks that the function really is a
747 // function and jumps to the miss label if the fast checks fail. The
748 // function register will be untouched; the other register may be
749 // clobbered.
750 void TryGetFunctionPrototype(Register function,
751 Register result,
752 Label* miss);
753
754 // Generates code for reporting that an illegal operation has
755 // occurred.
756 void IllegalOperation(int num_arguments);
757
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100758 // Picks out an array index from the hash field.
759 // Register use:
760 // hash - holds the index's hash. Clobbered.
761 // index - holds the overwritten index on exit.
762 void IndexFromHash(Register hash, Register index);
763
Steve Blockd0582a62009-12-15 09:54:21 +0000764 // Find the function context up the context chain.
765 void LoadContext(Register dst, int context_chain_length);
766
Steve Blocka7e24c12009-10-30 11:49:00 +0000767 // ---------------------------------------------------------------------------
768 // Runtime calls
769
770 // Call a code stub.
771 void CallStub(CodeStub* stub);
772
Ben Murdochbb769b22010-08-11 14:56:33 +0100773 // Call a code stub and return the code object called. Try to generate
774 // the code if necessary. Do not perform a GC but instead return a retry
775 // after GC failure.
John Reck59135872010-11-02 12:39:01 -0700776 MUST_USE_RESULT MaybeObject* TryCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100777
Leon Clarkee46be812010-01-19 14:06:41 +0000778 // Tail call a code stub (jump).
779 void TailCallStub(CodeStub* stub);
780
Ben Murdochbb769b22010-08-11 14:56:33 +0100781 // Tail call a code stub (jump) and return the code object called. Try to
782 // generate the code if necessary. Do not perform a GC but instead return
783 // a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700784 MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub);
Ben Murdochbb769b22010-08-11 14:56:33 +0100785
Steve Blocka7e24c12009-10-30 11:49:00 +0000786 // Return from a code stub after popping its arguments.
787 void StubReturn(int argc);
788
789 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000790 void CallRuntime(Runtime::Function* f, int num_arguments);
791
Ben Murdochbb769b22010-08-11 14:56:33 +0100792 // Call a runtime function, returning the CodeStub object called.
793 // Try to generate the stub code if necessary. Do not perform a GC
794 // but instead return a retry after GC failure.
John Reck59135872010-11-02 12:39:01 -0700795 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::Function* f,
796 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100797
Steve Blocka7e24c12009-10-30 11:49:00 +0000798 // Convenience function: Same as above, but takes the fid instead.
799 void CallRuntime(Runtime::FunctionId id, int num_arguments);
800
Ben Murdochbb769b22010-08-11 14:56:33 +0100801 // Convenience function: Same as above, but takes the fid instead.
John Reck59135872010-11-02 12:39:01 -0700802 MUST_USE_RESULT MaybeObject* TryCallRuntime(Runtime::FunctionId id,
803 int num_arguments);
Ben Murdochbb769b22010-08-11 14:56:33 +0100804
Andrei Popescu402d9372010-02-26 13:31:12 +0000805 // Convenience function: call an external reference.
806 void CallExternalReference(const ExternalReference& ext,
807 int num_arguments);
808
Steve Blocka7e24c12009-10-30 11:49:00 +0000809 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100810 // Like JumpToExternalReference, but also takes care of passing the number
811 // of parameters.
812 void TailCallExternalReference(const ExternalReference& ext,
813 int num_arguments,
814 int result_size);
815
816 // Convenience function: tail call a runtime routine (jump).
817 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000818 int num_arguments,
819 int result_size);
820
821 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100822 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000823
John Reck59135872010-11-02 12:39:01 -0700824 // Prepares stack to put arguments (aligns and so on).
825 // Uses calle-saved esi to restore stack state after call.
826 void PrepareCallApiFunction(int stack_space);
827
828 // Tail call an API function (jump). Allocates HandleScope, extracts
829 // returned value from handle and propogates exceptions.
830 // Clobbers ebx, edi and caller-save registers.
831 void CallApiFunctionAndReturn(ApiFunction* function);
832
Leon Clarke4515c472010-02-03 11:58:03 +0000833 // Before calling a C-function from generated code, align arguments on stack.
834 // After aligning the frame, arguments must be stored in esp[0], esp[4],
835 // etc., not pushed. The argument count assumes all arguments are word sized.
836 // The number of slots reserved for arguments depends on platform. On Windows
837 // stack slots are reserved for the arguments passed in registers. On other
838 // platforms stack slots are only reserved for the arguments actually passed
839 // on the stack.
840 void PrepareCallCFunction(int num_arguments);
841
842 // Calls a C function and cleans up the space for arguments allocated
843 // by PrepareCallCFunction. The called function is not allowed to trigger a
844 // garbage collection, since that might move the code and invalidate the
845 // return address (unless this is somehow accounted for by the called
846 // function).
847 void CallCFunction(ExternalReference function, int num_arguments);
848 void CallCFunction(Register function, int num_arguments);
849
850 // Calculate the number of stack slots to reserve for arguments when calling a
851 // C function.
852 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000853
854 // ---------------------------------------------------------------------------
855 // Utilities
856
857 void Ret();
858
Steve Blocka7e24c12009-10-30 11:49:00 +0000859 Handle<Object> CodeObject() { return code_object_; }
860
861
862 // ---------------------------------------------------------------------------
863 // StatsCounter support
864
865 void SetCounter(StatsCounter* counter, int value);
866 void IncrementCounter(StatsCounter* counter, int value);
867 void DecrementCounter(StatsCounter* counter, int value);
868
869
870 // ---------------------------------------------------------------------------
871 // Debugging
872
873 // Calls Abort(msg) if the condition cc is not satisfied.
874 // Use --debug_code to enable.
875 void Assert(Condition cc, const char* msg);
876
Iain Merrick75681382010-08-19 15:07:18 +0100877 void AssertFastElements(Register elements);
878
Steve Blocka7e24c12009-10-30 11:49:00 +0000879 // Like Assert(), but always enabled.
880 void Check(Condition cc, const char* msg);
881
882 // Print a message to stdout and abort execution.
883 void Abort(const char* msg);
884
Steve Block6ded16b2010-05-10 14:33:55 +0100885 // Check that the stack is aligned.
886 void CheckStackAlignment();
887
Steve Blocka7e24c12009-10-30 11:49:00 +0000888 // Verify restrictions about code generated in stubs.
889 void set_generating_stub(bool value) { generating_stub_ = value; }
890 bool generating_stub() { return generating_stub_; }
891 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
892 bool allow_stub_calls() { return allow_stub_calls_; }
893
894 private:
Steve Blocka7e24c12009-10-30 11:49:00 +0000895 bool generating_stub_;
896 bool allow_stub_calls_;
Steve Block8defd9f2010-07-08 12:39:36 +0100897
898 // Returns a register holding the smi value. The register MUST NOT be
899 // modified. It may be the "smi 1 constant" register.
900 Register GetSmiConstant(Smi* value);
901
902 // Moves the smi value to the destination register.
903 void LoadSmiConstant(Register dst, Smi* value);
904
Andrei Popescu31002712010-02-23 13:46:05 +0000905 // This handle will be patched with the code object on installation.
906 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000907
908 // Helper functions for generating invokes.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100909 template <typename LabelType>
Steve Blocka7e24c12009-10-30 11:49:00 +0000910 void InvokePrologue(const ParameterCount& expected,
911 const ParameterCount& actual,
912 Handle<Code> code_constant,
913 Register code_register,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100914 LabelType* done,
Steve Blocka7e24c12009-10-30 11:49:00 +0000915 InvokeFlag flag);
916
Steve Blocka7e24c12009-10-30 11:49:00 +0000917 // Activation support.
918 void EnterFrame(StackFrame::Type type);
919 void LeaveFrame(StackFrame::Type type);
920
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100921 void EnterExitFramePrologue(bool save_rax);
922 void EnterExitFrameEpilogue(int result_size, int argc);
Ben Murdochbb769b22010-08-11 14:56:33 +0100923
Steve Blocka7e24c12009-10-30 11:49:00 +0000924 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +0100925 // Loads the top of new-space into the result register.
926 // If flags contains RESULT_CONTAINS_TOP then result_end is valid and
927 // already contains the top of new-space, and scratch is invalid.
928 // Otherwise the address of the new-space top is loaded into scratch (if
929 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +0000930 void LoadAllocationTopHelper(Register result,
931 Register result_end,
932 Register scratch,
933 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +0100934 // Update allocation top with value in result_end register.
935 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +0000936 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Ben Murdochbb769b22010-08-11 14:56:33 +0100937
938 // Helper for PopHandleScope. Allowed to perform a GC and returns
939 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
940 // possibly returns a failure object indicating an allocation failure.
941 Object* PopHandleScopeHelper(Register saved,
942 Register scratch,
943 bool gc_allowed);
Steve Blocka7e24c12009-10-30 11:49:00 +0000944};
945
946
947// The code patcher is used to patch (typically) small parts of code e.g. for
948// debugging and other types of instrumentation. When using the code patcher
949// the exact number of bytes specified must be emitted. Is not legal to emit
950// relocation information. If any of these constraints are violated it causes
951// an assertion.
952class CodePatcher {
953 public:
954 CodePatcher(byte* address, int size);
955 virtual ~CodePatcher();
956
957 // Macro assembler to emit code.
958 MacroAssembler* masm() { return &masm_; }
959
960 private:
961 byte* address_; // The address of the code being patched.
962 int size_; // Number of bytes of the expected patch size.
963 MacroAssembler masm_; // Macro assembler used to generate the code.
964};
965
966
967// -----------------------------------------------------------------------------
968// Static helper functions.
969
970// Generate an Operand for loading a field from an object.
971static inline Operand FieldOperand(Register object, int offset) {
972 return Operand(object, offset - kHeapObjectTag);
973}
974
975
976// Generate an Operand for loading an indexed field from an object.
977static inline Operand FieldOperand(Register object,
978 Register index,
979 ScaleFactor scale,
980 int offset) {
981 return Operand(object, index, scale, offset - kHeapObjectTag);
982}
983
984
985#ifdef GENERATED_CODE_COVERAGE
986extern void LogGeneratedCodeCoverage(const char* file_line);
987#define CODE_COVERAGE_STRINGIFY(x) #x
988#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
989#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
990#define ACCESS_MASM(masm) { \
991 byte* x64_coverage_function = \
992 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
993 masm->pushfd(); \
994 masm->pushad(); \
995 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
996 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
997 masm->pop(rax); \
998 masm->popad(); \
999 masm->popfd(); \
1000 } \
1001 masm->
1002#else
1003#define ACCESS_MASM(masm) masm->
1004#endif
1005
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001006// -----------------------------------------------------------------------------
1007// Template implementations.
1008
1009static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1010
1011
1012template <typename LabelType>
1013void MacroAssembler::SmiNeg(Register dst,
1014 Register src,
1015 LabelType* on_smi_result) {
1016 if (dst.is(src)) {
1017 ASSERT(!dst.is(kScratchRegister));
1018 movq(kScratchRegister, src);
1019 neg(dst); // Low 32 bits are retained as zero by negation.
1020 // Test if result is zero or Smi::kMinValue.
1021 cmpq(dst, kScratchRegister);
1022 j(not_equal, on_smi_result);
1023 movq(src, kScratchRegister);
1024 } else {
1025 movq(dst, src);
1026 neg(dst);
1027 cmpq(dst, src);
1028 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1029 j(not_equal, on_smi_result);
1030 }
1031}
1032
1033
1034template <typename LabelType>
1035void MacroAssembler::SmiAdd(Register dst,
1036 Register src1,
1037 Register src2,
1038 LabelType* on_not_smi_result) {
1039 ASSERT_NOT_NULL(on_not_smi_result);
1040 ASSERT(!dst.is(src2));
1041 if (dst.is(src1)) {
1042 movq(kScratchRegister, src1);
1043 addq(kScratchRegister, src2);
1044 j(overflow, on_not_smi_result);
1045 movq(dst, kScratchRegister);
1046 } else {
1047 movq(dst, src1);
1048 addq(dst, src2);
1049 j(overflow, on_not_smi_result);
1050 }
1051}
1052
1053
1054template <typename LabelType>
1055void MacroAssembler::SmiSub(Register dst,
1056 Register src1,
1057 Register src2,
1058 LabelType* on_not_smi_result) {
1059 ASSERT_NOT_NULL(on_not_smi_result);
1060 ASSERT(!dst.is(src2));
1061 if (dst.is(src1)) {
1062 cmpq(dst, src2);
1063 j(overflow, on_not_smi_result);
1064 subq(dst, src2);
1065 } else {
1066 movq(dst, src1);
1067 subq(dst, src2);
1068 j(overflow, on_not_smi_result);
1069 }
1070}
1071
1072
1073template <typename LabelType>
1074void MacroAssembler::SmiSub(Register dst,
1075 Register src1,
1076 const Operand& src2,
1077 LabelType* on_not_smi_result) {
1078 ASSERT_NOT_NULL(on_not_smi_result);
1079 if (dst.is(src1)) {
1080 movq(kScratchRegister, src2);
1081 cmpq(src1, kScratchRegister);
1082 j(overflow, on_not_smi_result);
1083 subq(src1, kScratchRegister);
1084 } else {
1085 movq(dst, src1);
1086 subq(dst, src2);
1087 j(overflow, on_not_smi_result);
1088 }
1089}
1090
1091
1092template <typename LabelType>
1093void MacroAssembler::SmiMul(Register dst,
1094 Register src1,
1095 Register src2,
1096 LabelType* on_not_smi_result) {
1097 ASSERT(!dst.is(src2));
1098 ASSERT(!dst.is(kScratchRegister));
1099 ASSERT(!src1.is(kScratchRegister));
1100 ASSERT(!src2.is(kScratchRegister));
1101
1102 if (dst.is(src1)) {
1103 NearLabel failure, zero_correct_result;
1104 movq(kScratchRegister, src1); // Create backup for later testing.
1105 SmiToInteger64(dst, src1);
1106 imul(dst, src2);
1107 j(overflow, &failure);
1108
1109 // Check for negative zero result. If product is zero, and one
1110 // argument is negative, go to slow case.
1111 NearLabel correct_result;
1112 testq(dst, dst);
1113 j(not_zero, &correct_result);
1114
1115 movq(dst, kScratchRegister);
1116 xor_(dst, src2);
1117 j(positive, &zero_correct_result); // Result was positive zero.
1118
1119 bind(&failure); // Reused failure exit, restores src1.
1120 movq(src1, kScratchRegister);
1121 jmp(on_not_smi_result);
1122
1123 bind(&zero_correct_result);
1124 xor_(dst, dst);
1125
1126 bind(&correct_result);
1127 } else {
1128 SmiToInteger64(dst, src1);
1129 imul(dst, src2);
1130 j(overflow, on_not_smi_result);
1131 // Check for negative zero result. If product is zero, and one
1132 // argument is negative, go to slow case.
1133 NearLabel correct_result;
1134 testq(dst, dst);
1135 j(not_zero, &correct_result);
1136 // One of src1 and src2 is zero, the check whether the other is
1137 // negative.
1138 movq(kScratchRegister, src1);
1139 xor_(kScratchRegister, src2);
1140 j(negative, on_not_smi_result);
1141 bind(&correct_result);
1142 }
1143}
1144
1145
1146template <typename LabelType>
1147void MacroAssembler::SmiTryAddConstant(Register dst,
1148 Register src,
1149 Smi* constant,
1150 LabelType* on_not_smi_result) {
1151 // Does not assume that src is a smi.
1152 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1153 ASSERT_EQ(0, kSmiTag);
1154 ASSERT(!dst.is(kScratchRegister));
1155 ASSERT(!src.is(kScratchRegister));
1156
1157 JumpIfNotSmi(src, on_not_smi_result);
1158 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1159 LoadSmiConstant(tmp, constant);
1160 addq(tmp, src);
1161 j(overflow, on_not_smi_result);
1162 if (dst.is(src)) {
1163 movq(dst, tmp);
1164 }
1165}
1166
1167
1168template <typename LabelType>
1169void MacroAssembler::SmiAddConstant(Register dst,
1170 Register src,
1171 Smi* constant,
1172 LabelType* on_not_smi_result) {
1173 if (constant->value() == 0) {
1174 if (!dst.is(src)) {
1175 movq(dst, src);
1176 }
1177 } else if (dst.is(src)) {
1178 ASSERT(!dst.is(kScratchRegister));
1179
1180 LoadSmiConstant(kScratchRegister, constant);
1181 addq(kScratchRegister, src);
1182 j(overflow, on_not_smi_result);
1183 movq(dst, kScratchRegister);
1184 } else {
1185 LoadSmiConstant(dst, constant);
1186 addq(dst, src);
1187 j(overflow, on_not_smi_result);
1188 }
1189}
1190
1191
1192template <typename LabelType>
1193void MacroAssembler::SmiSubConstant(Register dst,
1194 Register src,
1195 Smi* constant,
1196 LabelType* on_not_smi_result) {
1197 if (constant->value() == 0) {
1198 if (!dst.is(src)) {
1199 movq(dst, src);
1200 }
1201 } else if (dst.is(src)) {
1202 ASSERT(!dst.is(kScratchRegister));
1203 if (constant->value() == Smi::kMinValue) {
1204 // Subtracting min-value from any non-negative value will overflow.
1205 // We test the non-negativeness before doing the subtraction.
1206 testq(src, src);
1207 j(not_sign, on_not_smi_result);
1208 LoadSmiConstant(kScratchRegister, constant);
1209 subq(dst, kScratchRegister);
1210 } else {
1211 // Subtract by adding the negation.
1212 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1213 addq(kScratchRegister, dst);
1214 j(overflow, on_not_smi_result);
1215 movq(dst, kScratchRegister);
1216 }
1217 } else {
1218 if (constant->value() == Smi::kMinValue) {
1219 // Subtracting min-value from any non-negative value will overflow.
1220 // We test the non-negativeness before doing the subtraction.
1221 testq(src, src);
1222 j(not_sign, on_not_smi_result);
1223 LoadSmiConstant(dst, constant);
1224 // Adding and subtracting the min-value gives the same result, it only
1225 // differs on the overflow bit, which we don't check here.
1226 addq(dst, src);
1227 } else {
1228 // Subtract by adding the negation.
1229 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1230 addq(dst, src);
1231 j(overflow, on_not_smi_result);
1232 }
1233 }
1234}
1235
1236
1237template <typename LabelType>
1238void MacroAssembler::SmiDiv(Register dst,
1239 Register src1,
1240 Register src2,
1241 LabelType* on_not_smi_result) {
1242 ASSERT(!src1.is(kScratchRegister));
1243 ASSERT(!src2.is(kScratchRegister));
1244 ASSERT(!dst.is(kScratchRegister));
1245 ASSERT(!src2.is(rax));
1246 ASSERT(!src2.is(rdx));
1247 ASSERT(!src1.is(rdx));
1248
1249 // Check for 0 divisor (result is +/-Infinity).
1250 NearLabel positive_divisor;
1251 testq(src2, src2);
1252 j(zero, on_not_smi_result);
1253
1254 if (src1.is(rax)) {
1255 movq(kScratchRegister, src1);
1256 }
1257 SmiToInteger32(rax, src1);
1258 // We need to rule out dividing Smi::kMinValue by -1, since that would
1259 // overflow in idiv and raise an exception.
1260 // We combine this with negative zero test (negative zero only happens
1261 // when dividing zero by a negative number).
1262
1263 // We overshoot a little and go to slow case if we divide min-value
1264 // by any negative value, not just -1.
1265 NearLabel safe_div;
1266 testl(rax, Immediate(0x7fffffff));
1267 j(not_zero, &safe_div);
1268 testq(src2, src2);
1269 if (src1.is(rax)) {
1270 j(positive, &safe_div);
1271 movq(src1, kScratchRegister);
1272 jmp(on_not_smi_result);
1273 } else {
1274 j(negative, on_not_smi_result);
1275 }
1276 bind(&safe_div);
1277
1278 SmiToInteger32(src2, src2);
1279 // Sign extend src1 into edx:eax.
1280 cdq();
1281 idivl(src2);
1282 Integer32ToSmi(src2, src2);
1283 // Check that the remainder is zero.
1284 testl(rdx, rdx);
1285 if (src1.is(rax)) {
1286 NearLabel smi_result;
1287 j(zero, &smi_result);
1288 movq(src1, kScratchRegister);
1289 jmp(on_not_smi_result);
1290 bind(&smi_result);
1291 } else {
1292 j(not_zero, on_not_smi_result);
1293 }
1294 if (!dst.is(src1) && src1.is(rax)) {
1295 movq(src1, kScratchRegister);
1296 }
1297 Integer32ToSmi(dst, rax);
1298}
1299
1300
1301template <typename LabelType>
1302void MacroAssembler::SmiMod(Register dst,
1303 Register src1,
1304 Register src2,
1305 LabelType* on_not_smi_result) {
1306 ASSERT(!dst.is(kScratchRegister));
1307 ASSERT(!src1.is(kScratchRegister));
1308 ASSERT(!src2.is(kScratchRegister));
1309 ASSERT(!src2.is(rax));
1310 ASSERT(!src2.is(rdx));
1311 ASSERT(!src1.is(rdx));
1312 ASSERT(!src1.is(src2));
1313
1314 testq(src2, src2);
1315 j(zero, on_not_smi_result);
1316
1317 if (src1.is(rax)) {
1318 movq(kScratchRegister, src1);
1319 }
1320 SmiToInteger32(rax, src1);
1321 SmiToInteger32(src2, src2);
1322
1323 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1324 NearLabel safe_div;
1325 cmpl(rax, Immediate(Smi::kMinValue));
1326 j(not_equal, &safe_div);
1327 cmpl(src2, Immediate(-1));
1328 j(not_equal, &safe_div);
1329 // Retag inputs and go slow case.
1330 Integer32ToSmi(src2, src2);
1331 if (src1.is(rax)) {
1332 movq(src1, kScratchRegister);
1333 }
1334 jmp(on_not_smi_result);
1335 bind(&safe_div);
1336
1337 // Sign extend eax into edx:eax.
1338 cdq();
1339 idivl(src2);
1340 // Restore smi tags on inputs.
1341 Integer32ToSmi(src2, src2);
1342 if (src1.is(rax)) {
1343 movq(src1, kScratchRegister);
1344 }
1345 // Check for a negative zero result. If the result is zero, and the
1346 // dividend is negative, go slow to return a floating point negative zero.
1347 NearLabel smi_result;
1348 testl(rdx, rdx);
1349 j(not_zero, &smi_result);
1350 testq(src1, src1);
1351 j(negative, on_not_smi_result);
1352 bind(&smi_result);
1353 Integer32ToSmi(dst, rdx);
1354}
1355
1356
1357template <typename LabelType>
1358void MacroAssembler::SmiShiftLogicalRightConstant(
1359 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1360 // Logic right shift interprets its result as an *unsigned* number.
1361 if (dst.is(src)) {
1362 UNIMPLEMENTED(); // Not used.
1363 } else {
1364 movq(dst, src);
1365 if (shift_value == 0) {
1366 testq(dst, dst);
1367 j(negative, on_not_smi_result);
1368 }
1369 shr(dst, Immediate(shift_value + kSmiShift));
1370 shl(dst, Immediate(kSmiShift));
1371 }
1372}
1373
1374
1375template <typename LabelType>
1376void MacroAssembler::SmiShiftLogicalRight(Register dst,
1377 Register src1,
1378 Register src2,
1379 LabelType* on_not_smi_result) {
1380 ASSERT(!dst.is(kScratchRegister));
1381 ASSERT(!src1.is(kScratchRegister));
1382 ASSERT(!src2.is(kScratchRegister));
1383 ASSERT(!dst.is(rcx));
1384 NearLabel result_ok;
1385 if (src1.is(rcx) || src2.is(rcx)) {
1386 movq(kScratchRegister, rcx);
1387 }
1388 if (!dst.is(src1)) {
1389 movq(dst, src1);
1390 }
1391 SmiToInteger32(rcx, src2);
1392 orl(rcx, Immediate(kSmiShift));
1393 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1394 shl(dst, Immediate(kSmiShift));
1395 testq(dst, dst);
1396 if (src1.is(rcx) || src2.is(rcx)) {
1397 NearLabel positive_result;
1398 j(positive, &positive_result);
1399 if (src1.is(rcx)) {
1400 movq(src1, kScratchRegister);
1401 } else {
1402 movq(src2, kScratchRegister);
1403 }
1404 jmp(on_not_smi_result);
1405 bind(&positive_result);
1406 } else {
1407 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1408 }
1409}
1410
1411
1412template <typename LabelType>
1413void MacroAssembler::SelectNonSmi(Register dst,
1414 Register src1,
1415 Register src2,
1416 LabelType* on_not_smis) {
1417 ASSERT(!dst.is(kScratchRegister));
1418 ASSERT(!src1.is(kScratchRegister));
1419 ASSERT(!src2.is(kScratchRegister));
1420 ASSERT(!dst.is(src1));
1421 ASSERT(!dst.is(src2));
1422 // Both operands must not be smis.
1423#ifdef DEBUG
1424 if (allow_stub_calls()) { // Check contains a stub call.
1425 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1426 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1427 }
1428#endif
1429 ASSERT_EQ(0, kSmiTag);
1430 ASSERT_EQ(0, Smi::FromInt(0));
1431 movl(kScratchRegister, Immediate(kSmiTagMask));
1432 and_(kScratchRegister, src1);
1433 testl(kScratchRegister, src2);
1434 // If non-zero then both are smis.
1435 j(not_zero, on_not_smis);
1436
1437 // Exactly one operand is a smi.
1438 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1439 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1440 subq(kScratchRegister, Immediate(1));
1441 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1442 movq(dst, src1);
1443 xor_(dst, src2);
1444 and_(dst, kScratchRegister);
1445 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1446 xor_(dst, src1);
1447 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1448}
1449
1450
1451template <typename LabelType>
1452void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1453 ASSERT_EQ(0, kSmiTag);
1454 Condition smi = CheckSmi(src);
1455 j(smi, on_smi);
1456}
1457
1458
1459template <typename LabelType>
1460void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1461 Condition smi = CheckSmi(src);
1462 j(NegateCondition(smi), on_not_smi);
1463}
1464
1465
1466template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001467void MacroAssembler::JumpUnlessNonNegativeSmi(
1468 Register src, LabelType* on_not_smi_or_negative) {
1469 Condition non_negative_smi = CheckNonNegativeSmi(src);
1470 j(NegateCondition(non_negative_smi), on_not_smi_or_negative);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001471}
1472
1473
1474template <typename LabelType>
1475void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1476 Smi* constant,
1477 LabelType* on_equals) {
1478 SmiCompare(src, constant);
1479 j(equal, on_equals);
1480}
1481
1482
1483template <typename LabelType>
1484void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1485 LabelType* on_invalid) {
1486 Condition is_valid = CheckInteger32ValidSmiValue(src);
1487 j(NegateCondition(is_valid), on_invalid);
1488}
1489
1490
1491template <typename LabelType>
1492void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1493 LabelType* on_invalid) {
1494 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1495 j(NegateCondition(is_valid), on_invalid);
1496}
1497
1498
1499template <typename LabelType>
1500void MacroAssembler::JumpIfNotBothSmi(Register src1,
1501 Register src2,
1502 LabelType* on_not_both_smi) {
1503 Condition both_smi = CheckBothSmi(src1, src2);
1504 j(NegateCondition(both_smi), on_not_both_smi);
1505}
1506
1507
1508template <typename LabelType>
Ben Murdochf87a2032010-10-22 12:50:53 +01001509void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1510 Register src2,
1511 LabelType* on_not_both_smi) {
1512 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001513 j(NegateCondition(both_smi), on_not_both_smi);
1514}
1515
1516
1517template <typename LabelType>
1518void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1519 Register second_object,
1520 Register scratch1,
1521 Register scratch2,
1522 LabelType* on_fail) {
1523 // Check that both objects are not smis.
1524 Condition either_smi = CheckEitherSmi(first_object, second_object);
1525 j(either_smi, on_fail);
1526
1527 // Load instance type for both strings.
1528 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1529 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1530 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1531 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1532
1533 // Check that both are flat ascii strings.
1534 ASSERT(kNotStringTag != 0);
1535 const int kFlatAsciiStringMask =
1536 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1537 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1538
1539 andl(scratch1, Immediate(kFlatAsciiStringMask));
1540 andl(scratch2, Immediate(kFlatAsciiStringMask));
1541 // Interleave the bits to check both scratch1 and scratch2 in one test.
1542 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1543 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1544 cmpl(scratch1,
1545 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1546 j(not_equal, on_fail);
1547}
1548
1549
1550template <typename LabelType>
1551void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1552 Register instance_type,
1553 Register scratch,
1554 LabelType *failure) {
1555 if (!scratch.is(instance_type)) {
1556 movl(scratch, instance_type);
1557 }
1558
1559 const int kFlatAsciiStringMask =
1560 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1561
1562 andl(scratch, Immediate(kFlatAsciiStringMask));
1563 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1564 j(not_equal, failure);
1565}
1566
1567
1568template <typename LabelType>
1569void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1570 Register first_object_instance_type,
1571 Register second_object_instance_type,
1572 Register scratch1,
1573 Register scratch2,
1574 LabelType* on_fail) {
1575 // Load instance type for both strings.
1576 movq(scratch1, first_object_instance_type);
1577 movq(scratch2, second_object_instance_type);
1578
1579 // Check that both are flat ascii strings.
1580 ASSERT(kNotStringTag != 0);
1581 const int kFlatAsciiStringMask =
1582 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1583 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1584
1585 andl(scratch1, Immediate(kFlatAsciiStringMask));
1586 andl(scratch2, Immediate(kFlatAsciiStringMask));
1587 // Interleave the bits to check both scratch1 and scratch2 in one test.
1588 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1589 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1590 cmpl(scratch1,
1591 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1592 j(not_equal, on_fail);
1593}
1594
1595
1596template <typename LabelType>
1597void MacroAssembler::InNewSpace(Register object,
1598 Register scratch,
1599 Condition cc,
1600 LabelType* branch) {
1601 if (Serializer::enabled()) {
1602 // Can't do arithmetic on external references if it might get serialized.
1603 // The mask isn't really an address. We load it as an external reference in
1604 // case the size of the new space is different between the snapshot maker
1605 // and the running system.
1606 if (scratch.is(object)) {
1607 movq(kScratchRegister, ExternalReference::new_space_mask());
1608 and_(scratch, kScratchRegister);
1609 } else {
1610 movq(scratch, ExternalReference::new_space_mask());
1611 and_(scratch, object);
1612 }
1613 movq(kScratchRegister, ExternalReference::new_space_start());
1614 cmpq(scratch, kScratchRegister);
1615 j(cc, branch);
1616 } else {
1617 ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
1618 intptr_t new_space_start =
1619 reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
1620 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1621 if (scratch.is(object)) {
1622 addq(scratch, kScratchRegister);
1623 } else {
1624 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1625 }
1626 and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
1627 j(cc, branch);
1628 }
1629}
1630
1631
1632template <typename LabelType>
1633void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1634 const ParameterCount& actual,
1635 Handle<Code> code_constant,
1636 Register code_register,
1637 LabelType* done,
1638 InvokeFlag flag) {
1639 bool definitely_matches = false;
1640 NearLabel invoke;
1641 if (expected.is_immediate()) {
1642 ASSERT(actual.is_immediate());
1643 if (expected.immediate() == actual.immediate()) {
1644 definitely_matches = true;
1645 } else {
1646 Set(rax, actual.immediate());
1647 if (expected.immediate() ==
1648 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1649 // Don't worry about adapting arguments for built-ins that
1650 // don't want that done. Skip adaption code by making it look
1651 // like we have a match between expected and actual number of
1652 // arguments.
1653 definitely_matches = true;
1654 } else {
1655 Set(rbx, expected.immediate());
1656 }
1657 }
1658 } else {
1659 if (actual.is_immediate()) {
1660 // Expected is in register, actual is immediate. This is the
1661 // case when we invoke function values without going through the
1662 // IC mechanism.
1663 cmpq(expected.reg(), Immediate(actual.immediate()));
1664 j(equal, &invoke);
1665 ASSERT(expected.reg().is(rbx));
1666 Set(rax, actual.immediate());
1667 } else if (!expected.reg().is(actual.reg())) {
1668 // Both expected and actual are in (different) registers. This
1669 // is the case when we invoke functions using call and apply.
1670 cmpq(expected.reg(), actual.reg());
1671 j(equal, &invoke);
1672 ASSERT(actual.reg().is(rax));
1673 ASSERT(expected.reg().is(rbx));
1674 }
1675 }
1676
1677 if (!definitely_matches) {
1678 Handle<Code> adaptor =
1679 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1680 if (!code_constant.is_null()) {
1681 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1682 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1683 } else if (!code_register.is(rdx)) {
1684 movq(rdx, code_register);
1685 }
1686
1687 if (flag == CALL_FUNCTION) {
1688 Call(adaptor, RelocInfo::CODE_TARGET);
1689 jmp(done);
1690 } else {
1691 Jump(adaptor, RelocInfo::CODE_TARGET);
1692 }
1693 bind(&invoke);
1694 }
1695}
1696
Steve Blocka7e24c12009-10-30 11:49:00 +00001697
1698} } // namespace v8::internal
1699
1700#endif // V8_X64_MACRO_ASSEMBLER_X64_H_