blob: b4f3240ec819e115a8b70738abcebc99e05fd085 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
29#define V8_X64_MACRO_ASSEMBLER_X64_H_
30
31#include "assembler.h"
32
33namespace v8 {
34namespace internal {
35
Kristian Monsen25f61362010-05-21 11:50:48 +010036// Flags used for the AllocateInNewSpace functions.
37enum AllocationFlags {
38 // No special flags.
39 NO_ALLOCATION_FLAGS = 0,
40 // Return the pointer to the allocated already tagged as a heap object.
41 TAG_OBJECT = 1 << 0,
42 // The content of the result register already contains the allocation top in
43 // new space.
44 RESULT_CONTAINS_TOP = 1 << 1
45};
46
Steve Blocka7e24c12009-10-30 11:49:00 +000047// Default scratch register used by MacroAssembler (and other code that needs
48// a spare register). The register isn't callee save, and not used by the
49// function calling convention.
Andrei Popescu402d9372010-02-26 13:31:12 +000050static const Register kScratchRegister = { 10 }; // r10.
Steve Block6ded16b2010-05-10 14:33:55 +010051static const Register kRootRegister = { 13 }; // r13
Steve Blocka7e24c12009-10-30 11:49:00 +000052
Leon Clarkee46be812010-01-19 14:06:41 +000053// Convenience for platform-independent signatures.
54typedef Operand MemOperand;
55
Steve Blocka7e24c12009-10-30 11:49:00 +000056// Forward declaration.
57class JumpTarget;
58
59struct SmiIndex {
60 SmiIndex(Register index_register, ScaleFactor scale)
61 : reg(index_register),
62 scale(scale) {}
63 Register reg;
64 ScaleFactor scale;
65};
66
67// MacroAssembler implements a collection of frequently used macros.
68class MacroAssembler: public Assembler {
69 public:
70 MacroAssembler(void* buffer, int size);
71
72 void LoadRoot(Register destination, Heap::RootListIndex index);
73 void CompareRoot(Register with, Heap::RootListIndex index);
74 void CompareRoot(Operand with, Heap::RootListIndex index);
75 void PushRoot(Heap::RootListIndex index);
Kristian Monsen25f61362010-05-21 11:50:48 +010076 void StoreRoot(Register source, Heap::RootListIndex index);
Steve Blocka7e24c12009-10-30 11:49:00 +000077
78 // ---------------------------------------------------------------------------
79 // GC Support
80
Steve Block6ded16b2010-05-10 14:33:55 +010081 // Set the remebered set bit for an address which points into an
82 // object. RecordWriteHelper only works if the object is not in new
83 // space.
84 void RecordWriteHelper(Register object,
85 Register addr,
86 Register scratch);
87
88 // Check if object is in new space. The condition cc can be equal or
89 // not_equal. If it is equal a jump will be done if the object is on new
90 // space. The register scratch can be object itself, but it will be clobbered.
91 void InNewSpace(Register object,
92 Register scratch,
93 Condition cc,
94 Label* branch);
95
Steve Blocka7e24c12009-10-30 11:49:00 +000096 // Set the remembered set bit for [object+offset].
97 // object is the object being stored into, value is the object being stored.
98 // If offset is zero, then the scratch register contains the array index into
99 // the elements array represented as a Smi.
100 // All registers are clobbered by the operation.
101 void RecordWrite(Register object,
102 int offset,
103 Register value,
104 Register scratch);
105
Steve Block3ce2e202009-11-05 08:53:23 +0000106 // Set the remembered set bit for [object+offset].
107 // The value is known to not be a smi.
108 // object is the object being stored into, value is the object being stored.
109 // If offset is zero, then the scratch register contains the array index into
110 // the elements array represented as a Smi.
111 // All registers are clobbered by the operation.
112 void RecordWriteNonSmi(Register object,
113 int offset,
114 Register value,
115 Register scratch);
116
Steve Blocka7e24c12009-10-30 11:49:00 +0000117#ifdef ENABLE_DEBUGGER_SUPPORT
118 // ---------------------------------------------------------------------------
119 // Debugger Support
120
121 void SaveRegistersToMemory(RegList regs);
122 void RestoreRegistersFromMemory(RegList regs);
123 void PushRegistersFromMemory(RegList regs);
124 void PopRegistersToMemory(RegList regs);
125 void CopyRegistersFromStackToMemory(Register base,
126 Register scratch,
127 RegList regs);
Andrei Popescu402d9372010-02-26 13:31:12 +0000128 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000129#endif
130
131 // ---------------------------------------------------------------------------
Steve Blockd0582a62009-12-15 09:54:21 +0000132 // Stack limit support
133
134 // Do simple test for stack overflow. This doesn't handle an overflow.
135 void StackLimitCheck(Label* on_stack_limit_hit);
136
137 // ---------------------------------------------------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 // Activation frames
139
140 void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
141 void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
142
143 void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
144 void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
145
Steve Blockd0582a62009-12-15 09:54:21 +0000146 // Enter specific kind of exit frame; either in normal or
147 // debug mode. Expects the number of arguments in register rax and
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 // sets up the number of arguments in register rdi and the pointer
149 // to the first argument in register rsi.
Steve Blockd0582a62009-12-15 09:54:21 +0000150 void EnterExitFrame(ExitFrame::Mode mode, int result_size = 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000151
152 // Leave the current exit frame. Expects/provides the return value in
153 // register rax:rdx (untouched) and the pointer to the first
154 // argument in register rsi.
Steve Blockd0582a62009-12-15 09:54:21 +0000155 void LeaveExitFrame(ExitFrame::Mode mode, int result_size = 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000156
157
158 // ---------------------------------------------------------------------------
159 // JavaScript invokes
160
161 // Invoke the JavaScript function code by either calling or jumping.
162 void InvokeCode(Register code,
163 const ParameterCount& expected,
164 const ParameterCount& actual,
165 InvokeFlag flag);
166
167 void InvokeCode(Handle<Code> code,
168 const ParameterCount& expected,
169 const ParameterCount& actual,
170 RelocInfo::Mode rmode,
171 InvokeFlag flag);
172
173 // Invoke the JavaScript function in the given register. Changes the
174 // current context to the context in the function before invoking.
175 void InvokeFunction(Register function,
176 const ParameterCount& actual,
177 InvokeFlag flag);
178
Andrei Popescu402d9372010-02-26 13:31:12 +0000179 void InvokeFunction(JSFunction* function,
180 const ParameterCount& actual,
181 InvokeFlag flag);
182
Steve Blocka7e24c12009-10-30 11:49:00 +0000183 // Invoke specified builtin JavaScript function. Adds an entry to
184 // the unresolved list if the name does not resolve.
185 void InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag);
186
187 // Store the code object for the given builtin in the target register.
188 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
189
190
191 // ---------------------------------------------------------------------------
192 // Smi tagging, untagging and operations on tagged smis.
193
194 // Conversions between tagged smi values and non-tagged integer values.
195
196 // Tag an integer value. The result must be known to be a valid smi value.
Leon Clarke4515c472010-02-03 11:58:03 +0000197 // Only uses the low 32 bits of the src register. Sets the N and Z flags
198 // based on the value of the resulting integer.
Steve Blocka7e24c12009-10-30 11:49:00 +0000199 void Integer32ToSmi(Register dst, Register src);
200
201 // Tag an integer value if possible, or jump the integer value cannot be
202 // represented as a smi. Only uses the low 32 bit of the src registers.
Steve Block3ce2e202009-11-05 08:53:23 +0000203 // NOTICE: Destroys the dst register even if unsuccessful!
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 void Integer32ToSmi(Register dst, Register src, Label* on_overflow);
205
206 // Adds constant to src and tags the result as a smi.
207 // Result must be a valid smi.
Steve Block3ce2e202009-11-05 08:53:23 +0000208 void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000209
210 // Convert smi to 32-bit integer. I.e., not sign extended into
211 // high 32 bits of destination.
212 void SmiToInteger32(Register dst, Register src);
213
214 // Convert smi to 64-bit integer (sign extended if necessary).
215 void SmiToInteger64(Register dst, Register src);
216
217 // Multiply a positive smi's integer value by a power of two.
218 // Provides result as 64-bit integer value.
219 void PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
220 Register src,
221 int power);
222
Steve Block3ce2e202009-11-05 08:53:23 +0000223 // Simple comparison of smis.
224 void SmiCompare(Register dst, Register src);
225 void SmiCompare(Register dst, Smi* src);
Steve Block6ded16b2010-05-10 14:33:55 +0100226 void SmiCompare(Register dst, const Operand& src);
Steve Block3ce2e202009-11-05 08:53:23 +0000227 void SmiCompare(const Operand& dst, Register src);
228 void SmiCompare(const Operand& dst, Smi* src);
229 // Sets sign and zero flags depending on value of smi in register.
230 void SmiTest(Register src);
231
Steve Blocka7e24c12009-10-30 11:49:00 +0000232 // Functions performing a check on a known or potential smi. Returns
233 // a condition that is satisfied if the check is successful.
234
235 // Is the value a tagged smi.
236 Condition CheckSmi(Register src);
237
Steve Blocka7e24c12009-10-30 11:49:00 +0000238 // Is the value a positive tagged smi.
239 Condition CheckPositiveSmi(Register src);
240
Leon Clarkee46be812010-01-19 14:06:41 +0000241 // Are both values tagged smis.
Steve Blocka7e24c12009-10-30 11:49:00 +0000242 Condition CheckBothSmi(Register first, Register second);
243
Leon Clarked91b9f72010-01-27 17:25:45 +0000244 // Are both values tagged smis.
245 Condition CheckBothPositiveSmi(Register first, Register second);
246
Leon Clarkee46be812010-01-19 14:06:41 +0000247 // Are either value a tagged smi.
248 Condition CheckEitherSmi(Register first, Register second);
249
Steve Blocka7e24c12009-10-30 11:49:00 +0000250 // Is the value the minimum smi value (since we are using
251 // two's complement numbers, negating the value is known to yield
252 // a non-smi value).
253 Condition CheckIsMinSmi(Register src);
254
Steve Blocka7e24c12009-10-30 11:49:00 +0000255 // Checks whether an 32-bit integer value is a valid for conversion
256 // to a smi.
257 Condition CheckInteger32ValidSmiValue(Register src);
258
Steve Block3ce2e202009-11-05 08:53:23 +0000259 // Checks whether an 32-bit unsigned integer value is a valid for
260 // conversion to a smi.
261 Condition CheckUInteger32ValidSmiValue(Register src);
262
Steve Blocka7e24c12009-10-30 11:49:00 +0000263 // Test-and-jump functions. Typically combines a check function
264 // above with a conditional jump.
265
266 // Jump if the value cannot be represented by a smi.
267 void JumpIfNotValidSmiValue(Register src, Label* on_invalid);
268
Steve Block3ce2e202009-11-05 08:53:23 +0000269 // Jump if the unsigned integer value cannot be represented by a smi.
270 void JumpIfUIntNotValidSmiValue(Register src, Label* on_invalid);
271
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 // Jump to label if the value is a tagged smi.
273 void JumpIfSmi(Register src, Label* on_smi);
274
275 // Jump to label if the value is not a tagged smi.
276 void JumpIfNotSmi(Register src, Label* on_not_smi);
277
278 // Jump to label if the value is not a positive tagged smi.
279 void JumpIfNotPositiveSmi(Register src, Label* on_not_smi);
280
Steve Block3ce2e202009-11-05 08:53:23 +0000281 // Jump to label if the value, which must be a tagged smi, has value equal
Steve Blocka7e24c12009-10-30 11:49:00 +0000282 // to the constant.
Steve Block3ce2e202009-11-05 08:53:23 +0000283 void JumpIfSmiEqualsConstant(Register src, Smi* constant, Label* on_equals);
Steve Blocka7e24c12009-10-30 11:49:00 +0000284
285 // Jump if either or both register are not smi values.
286 void JumpIfNotBothSmi(Register src1, Register src2, Label* on_not_both_smi);
287
Leon Clarked91b9f72010-01-27 17:25:45 +0000288 // Jump if either or both register are not positive smi values.
289 void JumpIfNotBothPositiveSmi(Register src1, Register src2,
290 Label* on_not_both_smi);
291
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 // Operations on tagged smi values.
293
294 // Smis represent a subset of integers. The subset is always equivalent to
295 // a two's complement interpretation of a fixed number of bits.
296
297 // Optimistically adds an integer constant to a supposed smi.
298 // If the src is not a smi, or the result is not a smi, jump to
299 // the label.
300 void SmiTryAddConstant(Register dst,
301 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000302 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000303 Label* on_not_smi_result);
304
Steve Block3ce2e202009-11-05 08:53:23 +0000305 // Add an integer constant to a tagged smi, giving a tagged smi as result.
306 // No overflow testing on the result is done.
307 void SmiAddConstant(Register dst, Register src, Smi* constant);
308
Leon Clarkef7060e22010-06-03 12:02:55 +0100309 // Add an integer constant to a tagged smi, giving a tagged smi as result.
310 // No overflow testing on the result is done.
311 void SmiAddConstant(const Operand& dst, Smi* constant);
312
Steve Blocka7e24c12009-10-30 11:49:00 +0000313 // Add an integer constant to a tagged smi, giving a tagged smi as result,
314 // or jumping to a label if the result cannot be represented by a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000315 void SmiAddConstant(Register dst,
316 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000317 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000318 Label* on_not_smi_result);
319
320 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Block6ded16b2010-05-10 14:33:55 +0100321 // result. No testing on the result is done. Sets the N and Z flags
322 // based on the value of the resulting integer.
Steve Block3ce2e202009-11-05 08:53:23 +0000323 void SmiSubConstant(Register dst, Register src, Smi* constant);
324
325 // Subtract an integer constant from a tagged smi, giving a tagged smi as
Steve Blocka7e24c12009-10-30 11:49:00 +0000326 // result, or jumping to a label if the result cannot be represented by a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000327 void SmiSubConstant(Register dst,
328 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000329 Smi* constant,
Steve Blocka7e24c12009-10-30 11:49:00 +0000330 Label* on_not_smi_result);
331
332 // Negating a smi can give a negative zero or too large positive value.
Steve Block3ce2e202009-11-05 08:53:23 +0000333 // NOTICE: This operation jumps on success, not failure!
Steve Blocka7e24c12009-10-30 11:49:00 +0000334 void SmiNeg(Register dst,
335 Register src,
Steve Block3ce2e202009-11-05 08:53:23 +0000336 Label* on_smi_result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000337
338 // Adds smi values and return the result as a smi.
339 // If dst is src1, then src1 will be destroyed, even if
340 // the operation is unsuccessful.
341 void SmiAdd(Register dst,
342 Register src1,
343 Register src2,
344 Label* on_not_smi_result);
345
346 // Subtracts smi values and return the result as a smi.
347 // If dst is src1, then src1 will be destroyed, even if
348 // the operation is unsuccessful.
349 void SmiSub(Register dst,
350 Register src1,
351 Register src2,
352 Label* on_not_smi_result);
353
Steve Block6ded16b2010-05-10 14:33:55 +0100354 void SmiSub(Register dst,
355 Register src1,
Leon Clarkef7060e22010-06-03 12:02:55 +0100356 const Operand& src2,
Steve Block6ded16b2010-05-10 14:33:55 +0100357 Label* on_not_smi_result);
358
Steve Blocka7e24c12009-10-30 11:49:00 +0000359 // Multiplies smi values and return the result as a smi,
360 // if possible.
361 // If dst is src1, then src1 will be destroyed, even if
362 // the operation is unsuccessful.
363 void SmiMul(Register dst,
364 Register src1,
365 Register src2,
366 Label* on_not_smi_result);
367
368 // Divides one smi by another and returns the quotient.
369 // Clobbers rax and rdx registers.
370 void SmiDiv(Register dst,
371 Register src1,
372 Register src2,
373 Label* on_not_smi_result);
374
375 // Divides one smi by another and returns the remainder.
376 // Clobbers rax and rdx registers.
377 void SmiMod(Register dst,
378 Register src1,
379 Register src2,
380 Label* on_not_smi_result);
381
382 // Bitwise operations.
383 void SmiNot(Register dst, Register src);
384 void SmiAnd(Register dst, Register src1, Register src2);
385 void SmiOr(Register dst, Register src1, Register src2);
386 void SmiXor(Register dst, Register src1, Register src2);
Steve Block3ce2e202009-11-05 08:53:23 +0000387 void SmiAndConstant(Register dst, Register src1, Smi* constant);
388 void SmiOrConstant(Register dst, Register src1, Smi* constant);
389 void SmiXorConstant(Register dst, Register src1, Smi* constant);
Steve Blocka7e24c12009-10-30 11:49:00 +0000390
391 void SmiShiftLeftConstant(Register dst,
392 Register src,
Kristian Monsen25f61362010-05-21 11:50:48 +0100393 int shift_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000394 void SmiShiftLogicalRightConstant(Register dst,
395 Register src,
396 int shift_value,
397 Label* on_not_smi_result);
398 void SmiShiftArithmeticRightConstant(Register dst,
399 Register src,
400 int shift_value);
401
402 // Shifts a smi value to the left, and returns the result if that is a smi.
403 // Uses and clobbers rcx, so dst may not be rcx.
404 void SmiShiftLeft(Register dst,
405 Register src1,
Kristian Monsen25f61362010-05-21 11:50:48 +0100406 Register src2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 // Shifts a smi value to the right, shifting in zero bits at the top, and
408 // returns the unsigned intepretation of the result if that is a smi.
409 // Uses and clobbers rcx, so dst may not be rcx.
410 void SmiShiftLogicalRight(Register dst,
411 Register src1,
412 Register src2,
413 Label* on_not_smi_result);
414 // Shifts a smi value to the right, sign extending the top, and
415 // returns the signed intepretation of the result. That will always
416 // be a valid smi value, since it's numerically smaller than the
417 // original.
418 // Uses and clobbers rcx, so dst may not be rcx.
419 void SmiShiftArithmeticRight(Register dst,
420 Register src1,
421 Register src2);
422
423 // Specialized operations
424
425 // Select the non-smi register of two registers where exactly one is a
426 // smi. If neither are smis, jump to the failure label.
427 void SelectNonSmi(Register dst,
428 Register src1,
429 Register src2,
430 Label* on_not_smis);
431
432 // Converts, if necessary, a smi to a combination of number and
433 // multiplier to be used as a scaled index.
434 // The src register contains a *positive* smi value. The shift is the
435 // power of two to multiply the index value by (e.g.
436 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
437 // The returned index register may be either src or dst, depending
438 // on what is most efficient. If src and dst are different registers,
439 // src is always unchanged.
440 SmiIndex SmiToIndex(Register dst, Register src, int shift);
441
442 // Converts a positive smi to a negative index.
443 SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
444
Steve Block3ce2e202009-11-05 08:53:23 +0000445 // Basic Smi operations.
446 void Move(Register dst, Smi* source) {
447 Set(dst, reinterpret_cast<int64_t>(source));
448 }
449
450 void Move(const Operand& dst, Smi* source) {
451 Set(dst, reinterpret_cast<int64_t>(source));
452 }
453
454 void Push(Smi* smi);
455 void Test(const Operand& dst, Smi* source);
456
Steve Blocka7e24c12009-10-30 11:49:00 +0000457 // ---------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000458 // String macros.
459 void JumpIfNotBothSequentialAsciiStrings(Register first_object,
460 Register second_object,
461 Register scratch1,
462 Register scratch2,
463 Label* on_not_both_flat_ascii);
464
Steve Block6ded16b2010-05-10 14:33:55 +0100465 // Check whether the instance type represents a flat ascii string. Jump to the
466 // label if not. If the instance type can be scratched specify same register
467 // for both instance type and scratch.
468 void JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,
469 Register scratch,
470 Label *on_not_flat_ascii_string);
471
472 void JumpIfBothInstanceTypesAreNotSequentialAscii(
473 Register first_object_instance_type,
474 Register second_object_instance_type,
475 Register scratch1,
476 Register scratch2,
477 Label* on_fail);
478
Leon Clarkee46be812010-01-19 14:06:41 +0000479 // ---------------------------------------------------------------------------
480 // Macro instructions.
Steve Blocka7e24c12009-10-30 11:49:00 +0000481
Steve Block3ce2e202009-11-05 08:53:23 +0000482 // Load a register with a long value as efficiently as possible.
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 void Set(Register dst, int64_t x);
484 void Set(const Operand& dst, int64_t x);
485
486 // Handle support
Steve Blocka7e24c12009-10-30 11:49:00 +0000487 void Move(Register dst, Handle<Object> source);
488 void Move(const Operand& dst, Handle<Object> source);
489 void Cmp(Register dst, Handle<Object> source);
490 void Cmp(const Operand& dst, Handle<Object> source);
491 void Push(Handle<Object> source);
Steve Blocka7e24c12009-10-30 11:49:00 +0000492
Leon Clarkee46be812010-01-19 14:06:41 +0000493 // Emit code to discard a non-negative number of pointer-sized elements
494 // from the stack, clobbering only the rsp register.
495 void Drop(int stack_elements);
496
497 void Call(Label* target) { call(target); }
498
Steve Blocka7e24c12009-10-30 11:49:00 +0000499 // Control Flow
500 void Jump(Address destination, RelocInfo::Mode rmode);
501 void Jump(ExternalReference ext);
502 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
503
504 void Call(Address destination, RelocInfo::Mode rmode);
505 void Call(ExternalReference ext);
506 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
507
508 // Compare object type for heap object.
509 // Always use unsigned comparisons: above and below, not less and greater.
510 // Incoming register is heap_object and outgoing register is map.
511 // They may be the same register, and may be kScratchRegister.
512 void CmpObjectType(Register heap_object, InstanceType type, Register map);
513
514 // Compare instance type for map.
515 // Always use unsigned comparisons: above and below, not less and greater.
516 void CmpInstanceType(Register map, InstanceType type);
517
Andrei Popescu31002712010-02-23 13:46:05 +0000518 // Check if the map of an object is equal to a specified map and
519 // branch to label if not. Skip the smi check if not required
520 // (object is known to be a heap object)
521 void CheckMap(Register obj,
522 Handle<Map> map,
523 Label* fail,
524 bool is_heap_object);
525
Leon Clarked91b9f72010-01-27 17:25:45 +0000526 // Check if the object in register heap_object is a string. Afterwards the
527 // register map contains the object map and the register instance_type
528 // contains the instance_type. The registers map and instance_type can be the
529 // same in which case it contains the instance type afterwards. Either of the
530 // registers map and instance_type can be the same as heap_object.
531 Condition IsObjectStringType(Register heap_object,
532 Register map,
533 Register instance_type);
534
Steve Blocka7e24c12009-10-30 11:49:00 +0000535 // FCmp is similar to integer cmp, but requires unsigned
536 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
537 void FCmp();
538
Andrei Popescu402d9372010-02-26 13:31:12 +0000539 // Abort execution if argument is not a number. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100540 void AbortIfNotNumber(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000541
Steve Block6ded16b2010-05-10 14:33:55 +0100542 // Abort execution if argument is not a smi. Used in debug code.
Leon Clarkef7060e22010-06-03 12:02:55 +0100543 void AbortIfNotSmi(Register object);
Steve Block6ded16b2010-05-10 14:33:55 +0100544
Steve Blocka7e24c12009-10-30 11:49:00 +0000545 // ---------------------------------------------------------------------------
546 // Exception handling
547
548 // Push a new try handler and link into try handler chain. The return
549 // address must be pushed before calling this helper.
550 void PushTryHandler(CodeLocation try_location, HandlerType type);
551
Leon Clarkee46be812010-01-19 14:06:41 +0000552 // Unlink the stack handler on top of the stack from the try handler chain.
553 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000554
555 // ---------------------------------------------------------------------------
556 // Inline caching support
557
558 // Generates code that verifies that the maps of objects in the
559 // prototype chain of object hasn't changed since the code was
560 // generated and branches to the miss label if any map has. If
561 // necessary the function also generates code for security check
562 // in case of global object holders. The scratch and holder
563 // registers are always clobbered, but the object register is only
564 // clobbered if it the same as the holder register. The function
565 // returns a register containing the holder - either object_reg or
566 // holder_reg.
Steve Block6ded16b2010-05-10 14:33:55 +0100567 // The function can optionally (when save_at_depth !=
568 // kInvalidProtoDepth) save the object at the given depth by moving
569 // it to [rsp + kPointerSize].
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 Register CheckMaps(JSObject* object, Register object_reg,
571 JSObject* holder, Register holder_reg,
Steve Block6ded16b2010-05-10 14:33:55 +0100572 Register scratch,
573 int save_at_depth,
574 Label* miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575
576 // Generate code for checking access rights - used for security checks
577 // on access to global objects across environments. The holder register
578 // is left untouched, but the scratch register and kScratchRegister,
579 // which must be different, are clobbered.
580 void CheckAccessGlobalProxy(Register holder_reg,
581 Register scratch,
582 Label* miss);
583
584
585 // ---------------------------------------------------------------------------
586 // Allocation support
587
588 // Allocate an object in new space. If the new space is exhausted control
589 // continues at the gc_required label. The allocated object is returned in
590 // result and end of the new object is returned in result_end. The register
591 // scratch can be passed as no_reg in which case an additional object
592 // reference will be added to the reloc info. The returned pointers in result
593 // and result_end have not yet been tagged as heap objects. If
594 // result_contains_top_on_entry is true the content of result is known to be
595 // the allocation top on entry (could be result_end from a previous call to
596 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
597 // should be no_reg as it is never used.
598 void AllocateInNewSpace(int object_size,
599 Register result,
600 Register result_end,
601 Register scratch,
602 Label* gc_required,
603 AllocationFlags flags);
604
605 void AllocateInNewSpace(int header_size,
606 ScaleFactor element_size,
607 Register element_count,
608 Register result,
609 Register result_end,
610 Register scratch,
611 Label* gc_required,
612 AllocationFlags flags);
613
614 void AllocateInNewSpace(Register object_size,
615 Register result,
616 Register result_end,
617 Register scratch,
618 Label* gc_required,
619 AllocationFlags flags);
620
621 // Undo allocation in new space. The object passed and objects allocated after
622 // it will no longer be allocated. Make sure that no pointers are left to the
623 // object(s) no longer allocated as they would be invalid when allocation is
624 // un-done.
625 void UndoAllocationInNewSpace(Register object);
626
Steve Block3ce2e202009-11-05 08:53:23 +0000627 // Allocate a heap number in new space with undefined value. Returns
628 // tagged pointer in result register, or jumps to gc_required if new
629 // space is full.
630 void AllocateHeapNumber(Register result,
631 Register scratch,
632 Label* gc_required);
633
Leon Clarkee46be812010-01-19 14:06:41 +0000634 // Allocate a sequential string. All the header fields of the string object
635 // are initialized.
636 void AllocateTwoByteString(Register result,
637 Register length,
638 Register scratch1,
639 Register scratch2,
640 Register scratch3,
641 Label* gc_required);
642 void AllocateAsciiString(Register result,
643 Register length,
644 Register scratch1,
645 Register scratch2,
646 Register scratch3,
647 Label* gc_required);
648
649 // Allocate a raw cons string object. Only the map field of the result is
650 // initialized.
651 void AllocateConsString(Register result,
652 Register scratch1,
653 Register scratch2,
654 Label* gc_required);
655 void AllocateAsciiConsString(Register result,
656 Register scratch1,
657 Register scratch2,
658 Label* gc_required);
659
Steve Blocka7e24c12009-10-30 11:49:00 +0000660 // ---------------------------------------------------------------------------
661 // Support functions.
662
663 // Check if result is zero and op is negative.
664 void NegativeZeroTest(Register result, Register op, Label* then_label);
665
666 // Check if result is zero and op is negative in code using jump targets.
667 void NegativeZeroTest(CodeGenerator* cgen,
668 Register result,
669 Register op,
670 JumpTarget* then_target);
671
672 // Check if result is zero and any of op1 and op2 are negative.
673 // Register scratch is destroyed, and it must be different from op2.
674 void NegativeZeroTest(Register result, Register op1, Register op2,
675 Register scratch, Label* then_label);
676
677 // Try to get function prototype of a function and puts the value in
678 // the result register. Checks that the function really is a
679 // function and jumps to the miss label if the fast checks fail. The
680 // function register will be untouched; the other register may be
681 // clobbered.
682 void TryGetFunctionPrototype(Register function,
683 Register result,
684 Label* miss);
685
686 // Generates code for reporting that an illegal operation has
687 // occurred.
688 void IllegalOperation(int num_arguments);
689
Steve Blockd0582a62009-12-15 09:54:21 +0000690 // Find the function context up the context chain.
691 void LoadContext(Register dst, int context_chain_length);
692
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 // ---------------------------------------------------------------------------
694 // Runtime calls
695
696 // Call a code stub.
697 void CallStub(CodeStub* stub);
698
Leon Clarkee46be812010-01-19 14:06:41 +0000699 // Tail call a code stub (jump).
700 void TailCallStub(CodeStub* stub);
701
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 // Return from a code stub after popping its arguments.
703 void StubReturn(int argc);
704
705 // Call a runtime routine.
Steve Blocka7e24c12009-10-30 11:49:00 +0000706 void CallRuntime(Runtime::Function* f, int num_arguments);
707
708 // Convenience function: Same as above, but takes the fid instead.
709 void CallRuntime(Runtime::FunctionId id, int num_arguments);
710
Andrei Popescu402d9372010-02-26 13:31:12 +0000711 // Convenience function: call an external reference.
712 void CallExternalReference(const ExternalReference& ext,
713 int num_arguments);
714
Steve Blocka7e24c12009-10-30 11:49:00 +0000715 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100716 // Like JumpToExternalReference, but also takes care of passing the number
717 // of parameters.
718 void TailCallExternalReference(const ExternalReference& ext,
719 int num_arguments,
720 int result_size);
721
722 // Convenience function: tail call a runtime routine (jump).
723 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000724 int num_arguments,
725 int result_size);
726
727 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100728 void JumpToExternalReference(const ExternalReference& ext, int result_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000729
Leon Clarke4515c472010-02-03 11:58:03 +0000730 // Before calling a C-function from generated code, align arguments on stack.
731 // After aligning the frame, arguments must be stored in esp[0], esp[4],
732 // etc., not pushed. The argument count assumes all arguments are word sized.
733 // The number of slots reserved for arguments depends on platform. On Windows
734 // stack slots are reserved for the arguments passed in registers. On other
735 // platforms stack slots are only reserved for the arguments actually passed
736 // on the stack.
737 void PrepareCallCFunction(int num_arguments);
738
739 // Calls a C function and cleans up the space for arguments allocated
740 // by PrepareCallCFunction. The called function is not allowed to trigger a
741 // garbage collection, since that might move the code and invalidate the
742 // return address (unless this is somehow accounted for by the called
743 // function).
744 void CallCFunction(ExternalReference function, int num_arguments);
745 void CallCFunction(Register function, int num_arguments);
746
747 // Calculate the number of stack slots to reserve for arguments when calling a
748 // C function.
749 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000750
751 // ---------------------------------------------------------------------------
752 // Utilities
753
754 void Ret();
755
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 Handle<Object> CodeObject() { return code_object_; }
757
758
759 // ---------------------------------------------------------------------------
760 // StatsCounter support
761
762 void SetCounter(StatsCounter* counter, int value);
763 void IncrementCounter(StatsCounter* counter, int value);
764 void DecrementCounter(StatsCounter* counter, int value);
765
766
767 // ---------------------------------------------------------------------------
768 // Debugging
769
770 // Calls Abort(msg) if the condition cc is not satisfied.
771 // Use --debug_code to enable.
772 void Assert(Condition cc, const char* msg);
773
774 // Like Assert(), but always enabled.
775 void Check(Condition cc, const char* msg);
776
777 // Print a message to stdout and abort execution.
778 void Abort(const char* msg);
779
Steve Block6ded16b2010-05-10 14:33:55 +0100780 // Check that the stack is aligned.
781 void CheckStackAlignment();
782
Steve Blocka7e24c12009-10-30 11:49:00 +0000783 // Verify restrictions about code generated in stubs.
784 void set_generating_stub(bool value) { generating_stub_ = value; }
785 bool generating_stub() { return generating_stub_; }
786 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
787 bool allow_stub_calls() { return allow_stub_calls_; }
788
789 private:
Steve Blocka7e24c12009-10-30 11:49:00 +0000790 bool generating_stub_;
791 bool allow_stub_calls_;
Andrei Popescu31002712010-02-23 13:46:05 +0000792 // This handle will be patched with the code object on installation.
793 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000794
795 // Helper functions for generating invokes.
796 void InvokePrologue(const ParameterCount& expected,
797 const ParameterCount& actual,
798 Handle<Code> code_constant,
799 Register code_register,
800 Label* done,
801 InvokeFlag flag);
802
Steve Blocka7e24c12009-10-30 11:49:00 +0000803 // Activation support.
804 void EnterFrame(StackFrame::Type type);
805 void LeaveFrame(StackFrame::Type type);
806
807 // Allocation support helpers.
Steve Block6ded16b2010-05-10 14:33:55 +0100808 // Loads the top of new-space into the result register.
809 // If flags contains RESULT_CONTAINS_TOP then result_end is valid and
810 // already contains the top of new-space, and scratch is invalid.
811 // Otherwise the address of the new-space top is loaded into scratch (if
812 // scratch is valid), and the new-space top is loaded into result.
Steve Blocka7e24c12009-10-30 11:49:00 +0000813 void LoadAllocationTopHelper(Register result,
814 Register result_end,
815 Register scratch,
816 AllocationFlags flags);
Steve Block6ded16b2010-05-10 14:33:55 +0100817 // Update allocation top with value in result_end register.
818 // If scratch is valid, it contains the address of the allocation top.
Steve Blocka7e24c12009-10-30 11:49:00 +0000819 void UpdateAllocationTopHelper(Register result_end, Register scratch);
820};
821
822
823// The code patcher is used to patch (typically) small parts of code e.g. for
824// debugging and other types of instrumentation. When using the code patcher
825// the exact number of bytes specified must be emitted. Is not legal to emit
826// relocation information. If any of these constraints are violated it causes
827// an assertion.
828class CodePatcher {
829 public:
830 CodePatcher(byte* address, int size);
831 virtual ~CodePatcher();
832
833 // Macro assembler to emit code.
834 MacroAssembler* masm() { return &masm_; }
835
836 private:
837 byte* address_; // The address of the code being patched.
838 int size_; // Number of bytes of the expected patch size.
839 MacroAssembler masm_; // Macro assembler used to generate the code.
840};
841
842
843// -----------------------------------------------------------------------------
844// Static helper functions.
845
846// Generate an Operand for loading a field from an object.
847static inline Operand FieldOperand(Register object, int offset) {
848 return Operand(object, offset - kHeapObjectTag);
849}
850
851
852// Generate an Operand for loading an indexed field from an object.
853static inline Operand FieldOperand(Register object,
854 Register index,
855 ScaleFactor scale,
856 int offset) {
857 return Operand(object, index, scale, offset - kHeapObjectTag);
858}
859
860
861#ifdef GENERATED_CODE_COVERAGE
862extern void LogGeneratedCodeCoverage(const char* file_line);
863#define CODE_COVERAGE_STRINGIFY(x) #x
864#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
865#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
866#define ACCESS_MASM(masm) { \
867 byte* x64_coverage_function = \
868 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
869 masm->pushfd(); \
870 masm->pushad(); \
871 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
872 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
873 masm->pop(rax); \
874 masm->popad(); \
875 masm->popfd(); \
876 } \
877 masm->
878#else
879#define ACCESS_MASM(masm) masm->
880#endif
881
882
883} } // namespace v8::internal
884
885#endif // V8_X64_MACRO_ASSEMBLER_X64_H_