blob: 5d9861ba65fbb3bb02bc4bd46352003b372f492e [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_CODEGEN_X64_H_
29#define V8_X64_CODEGEN_X64_H_
30
Steve Block6ded16b2010-05-10 14:33:55 +010031#include "ic-inl.h"
32
Steve Blocka7e24c12009-10-30 11:49:00 +000033namespace v8 {
34namespace internal {
35
36// Forward declarations
Leon Clarke4515c472010-02-03 11:58:03 +000037class CompilationInfo;
Steve Blocka7e24c12009-10-30 11:49:00 +000038class DeferredCode;
39class RegisterAllocator;
40class RegisterFile;
41
42enum InitState { CONST_INIT, NOT_CONST_INIT };
43enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
44
45
46// -------------------------------------------------------------------------
47// Reference support
48
Leon Clarked91b9f72010-01-27 17:25:45 +000049// A reference is a C++ stack-allocated object that puts a
50// reference on the virtual frame. The reference may be consumed
51// by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
52// When the lifetime (scope) of a valid reference ends, it must have
53// been consumed, and be in state UNLOADED.
Steve Blocka7e24c12009-10-30 11:49:00 +000054class Reference BASE_EMBEDDED {
55 public:
56 // The values of the types is important, see size().
Leon Clarked91b9f72010-01-27 17:25:45 +000057 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
58
59 Reference(CodeGenerator* cgen,
60 Expression* expression,
61 bool persist_after_get = false);
Steve Blocka7e24c12009-10-30 11:49:00 +000062 ~Reference();
63
64 Expression* expression() const { return expression_; }
65 Type type() const { return type_; }
66 void set_type(Type value) {
Leon Clarked91b9f72010-01-27 17:25:45 +000067 ASSERT_EQ(ILLEGAL, type_);
Steve Blocka7e24c12009-10-30 11:49:00 +000068 type_ = value;
69 }
70
Leon Clarked91b9f72010-01-27 17:25:45 +000071 void set_unloaded() {
72 ASSERT_NE(ILLEGAL, type_);
73 ASSERT_NE(UNLOADED, type_);
74 type_ = UNLOADED;
75 }
Steve Blocka7e24c12009-10-30 11:49:00 +000076 // The size the reference takes up on the stack.
Leon Clarked91b9f72010-01-27 17:25:45 +000077 int size() const {
78 return (type_ < SLOT) ? 0 : type_;
79 }
Steve Blocka7e24c12009-10-30 11:49:00 +000080
81 bool is_illegal() const { return type_ == ILLEGAL; }
82 bool is_slot() const { return type_ == SLOT; }
83 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
Leon Clarked91b9f72010-01-27 17:25:45 +000084 bool is_unloaded() const { return type_ == UNLOADED; }
Steve Blocka7e24c12009-10-30 11:49:00 +000085
86 // Return the name. Only valid for named property references.
87 Handle<String> GetName();
88
89 // Generate code to push the value of the reference on top of the
90 // expression stack. The reference is expected to be already on top of
Leon Clarked91b9f72010-01-27 17:25:45 +000091 // the expression stack, and it is consumed by the call unless the
92 // reference is for a compound assignment.
93 // If the reference is not consumed, it is left in place under its value.
Steve Blockd0582a62009-12-15 09:54:21 +000094 void GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +000095
96 // Like GetValue except that the slot is expected to be written to before
Leon Clarked91b9f72010-01-27 17:25:45 +000097 // being read from again. The value of the reference may be invalidated,
Steve Blocka7e24c12009-10-30 11:49:00 +000098 // causing subsequent attempts to read it to fail.
Steve Blockd0582a62009-12-15 09:54:21 +000099 void TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +0000100
101 // Generate code to store the value on top of the expression stack in the
102 // reference. The reference is expected to be immediately below the value
Leon Clarked91b9f72010-01-27 17:25:45 +0000103 // on the expression stack. The value is stored in the location specified
104 // by the reference, and is left on top of the stack, after the reference
105 // is popped from beneath it (unloaded).
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 void SetValue(InitState init_state);
107
108 private:
109 CodeGenerator* cgen_;
110 Expression* expression_;
111 Type type_;
Leon Clarked91b9f72010-01-27 17:25:45 +0000112 bool persist_after_get_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000113};
114
115
116// -------------------------------------------------------------------------
117// Control destinations.
118
119// A control destination encapsulates a pair of jump targets and a
120// flag indicating which one is the preferred fall-through. The
121// preferred fall-through must be unbound, the other may be already
122// bound (ie, a backward target).
123//
124// The true and false targets may be jumped to unconditionally or
125// control may split conditionally. Unconditional jumping and
126// splitting should be emitted in tail position (as the last thing
127// when compiling an expression) because they can cause either label
128// to be bound or the non-fall through to be jumped to leaving an
129// invalid virtual frame.
130//
131// The labels in the control destination can be extracted and
132// manipulated normally without affecting the state of the
133// destination.
134
135class ControlDestination BASE_EMBEDDED {
136 public:
137 ControlDestination(JumpTarget* true_target,
138 JumpTarget* false_target,
139 bool true_is_fall_through)
140 : true_target_(true_target),
141 false_target_(false_target),
142 true_is_fall_through_(true_is_fall_through),
143 is_used_(false) {
144 ASSERT(true_is_fall_through ? !true_target->is_bound()
145 : !false_target->is_bound());
146 }
147
148 // Accessors for the jump targets. Directly jumping or branching to
149 // or binding the targets will not update the destination's state.
150 JumpTarget* true_target() const { return true_target_; }
151 JumpTarget* false_target() const { return false_target_; }
152
153 // True if the the destination has been jumped to unconditionally or
154 // control has been split to both targets. This predicate does not
155 // test whether the targets have been extracted and manipulated as
156 // raw jump targets.
157 bool is_used() const { return is_used_; }
158
159 // True if the destination is used and the true target (respectively
160 // false target) was the fall through. If the target is backward,
161 // "fall through" included jumping unconditionally to it.
162 bool true_was_fall_through() const {
163 return is_used_ && true_is_fall_through_;
164 }
165
166 bool false_was_fall_through() const {
167 return is_used_ && !true_is_fall_through_;
168 }
169
170 // Emit a branch to one of the true or false targets, and bind the
171 // other target. Because this binds the fall-through target, it
172 // should be emitted in tail position (as the last thing when
173 // compiling an expression).
174 void Split(Condition cc) {
175 ASSERT(!is_used_);
176 if (true_is_fall_through_) {
177 false_target_->Branch(NegateCondition(cc));
178 true_target_->Bind();
179 } else {
180 true_target_->Branch(cc);
181 false_target_->Bind();
182 }
183 is_used_ = true;
184 }
185
186 // Emit an unconditional jump in tail position, to the true target
187 // (if the argument is true) or the false target. The "jump" will
188 // actually bind the jump target if it is forward, jump to it if it
189 // is backward.
190 void Goto(bool where) {
191 ASSERT(!is_used_);
192 JumpTarget* target = where ? true_target_ : false_target_;
193 if (target->is_bound()) {
194 target->Jump();
195 } else {
196 target->Bind();
197 }
198 is_used_ = true;
199 true_is_fall_through_ = where;
200 }
201
202 // Mark this jump target as used as if Goto had been called, but
203 // without generating a jump or binding a label (the control effect
204 // should have already happened). This is used when the left
205 // subexpression of the short-circuit boolean operators are
206 // compiled.
207 void Use(bool where) {
208 ASSERT(!is_used_);
209 ASSERT((where ? true_target_ : false_target_)->is_bound());
210 is_used_ = true;
211 true_is_fall_through_ = where;
212 }
213
214 // Swap the true and false targets but keep the same actual label as
215 // the fall through. This is used when compiling negated
216 // expressions, where we want to swap the targets but preserve the
217 // state.
218 void Invert() {
219 JumpTarget* temp_target = true_target_;
220 true_target_ = false_target_;
221 false_target_ = temp_target;
222
223 true_is_fall_through_ = !true_is_fall_through_;
224 }
225
226 private:
227 // True and false jump targets.
228 JumpTarget* true_target_;
229 JumpTarget* false_target_;
230
231 // Before using the destination: true if the true target is the
232 // preferred fall through, false if the false target is. After
233 // using the destination: true if the true target was actually used
234 // as the fall through, false if the false target was.
235 bool true_is_fall_through_;
236
237 // True if the Split or Goto functions have been called.
238 bool is_used_;
239};
240
241
242// -------------------------------------------------------------------------
243// Code generation state
244
245// The state is passed down the AST by the code generator (and back up, in
246// the form of the state of the jump target pair). It is threaded through
247// the call stack. Constructing a state implicitly pushes it on the owning
248// code generator's stack of states, and destroying one implicitly pops it.
249//
250// The code generator state is only used for expressions, so statements have
251// the initial state.
252
253class CodeGenState BASE_EMBEDDED {
254 public:
255 // Create an initial code generator state. Destroying the initial state
256 // leaves the code generator with a NULL state.
257 explicit CodeGenState(CodeGenerator* owner);
258
259 // Create a code generator state based on a code generator's current
Steve Blockd0582a62009-12-15 09:54:21 +0000260 // state. The new state has its own control destination.
261 CodeGenState(CodeGenerator* owner, ControlDestination* destination);
Steve Blocka7e24c12009-10-30 11:49:00 +0000262
263 // Destroy a code generator state and restore the owning code generator's
264 // previous state.
265 ~CodeGenState();
266
267 // Accessors for the state.
Steve Blocka7e24c12009-10-30 11:49:00 +0000268 ControlDestination* destination() const { return destination_; }
269
270 private:
271 // The owning code generator.
272 CodeGenerator* owner_;
273
Steve Blocka7e24c12009-10-30 11:49:00 +0000274 // A control destination in case the expression has a control-flow
275 // effect.
276 ControlDestination* destination_;
277
278 // The previous state of the owning code generator, restored when
279 // this state is destroyed.
280 CodeGenState* previous_;
281};
282
283
284// -------------------------------------------------------------------------
285// Arguments allocation mode
286
287enum ArgumentsAllocationMode {
288 NO_ARGUMENTS_ALLOCATION,
289 EAGER_ARGUMENTS_ALLOCATION,
290 LAZY_ARGUMENTS_ALLOCATION
291};
292
293
294// -------------------------------------------------------------------------
295// CodeGenerator
296
297class CodeGenerator: public AstVisitor {
298 public:
299 // Takes a function literal, generates code for it. This function should only
300 // be called by compiler.cc.
Andrei Popescu31002712010-02-23 13:46:05 +0000301 static Handle<Code> MakeCode(CompilationInfo* info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000302
Steve Block3ce2e202009-11-05 08:53:23 +0000303 // Printing of AST, etc. as requested by flags.
Andrei Popescu31002712010-02-23 13:46:05 +0000304 static void MakeCodePrologue(CompilationInfo* info);
Steve Block3ce2e202009-11-05 08:53:23 +0000305
306 // Allocate and install the code.
Andrei Popescu31002712010-02-23 13:46:05 +0000307 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm,
Steve Block3ce2e202009-11-05 08:53:23 +0000308 Code::Flags flags,
Andrei Popescu31002712010-02-23 13:46:05 +0000309 CompilationInfo* info);
Steve Block3ce2e202009-11-05 08:53:23 +0000310
Steve Blocka7e24c12009-10-30 11:49:00 +0000311#ifdef ENABLE_LOGGING_AND_PROFILING
312 static bool ShouldGenerateLog(Expression* type);
313#endif
314
Steve Block3ce2e202009-11-05 08:53:23 +0000315 static void RecordPositions(MacroAssembler* masm, int pos);
316
Steve Blocka7e24c12009-10-30 11:49:00 +0000317 // Accessors
318 MacroAssembler* masm() { return masm_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 VirtualFrame* frame() const { return frame_; }
Andrei Popescu31002712010-02-23 13:46:05 +0000320 inline Handle<Script> script();
Steve Blocka7e24c12009-10-30 11:49:00 +0000321
322 bool has_valid_frame() const { return frame_ != NULL; }
323
324 // Set the virtual frame to be new_frame, with non-frame register
325 // reference counts given by non_frame_registers. The non-frame
326 // register reference counts of the old frame are returned in
327 // non_frame_registers.
328 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
329
330 void DeleteFrame();
331
332 RegisterAllocator* allocator() const { return allocator_; }
333
334 CodeGenState* state() { return state_; }
335 void set_state(CodeGenState* state) { state_ = state; }
336
337 void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
338
339 bool in_spilled_code() const { return in_spilled_code_; }
340 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; }
341
Steve Block6ded16b2010-05-10 14:33:55 +0100342 // If the name is an inline runtime function call return the number of
343 // expected arguments. Otherwise return -1.
344 static int InlineRuntimeCallArgumentsCount(Handle<String> name);
345
Steve Blocka7e24c12009-10-30 11:49:00 +0000346 private:
347 // Construction/Destruction
Andrei Popescu31002712010-02-23 13:46:05 +0000348 explicit CodeGenerator(MacroAssembler* masm);
Steve Blocka7e24c12009-10-30 11:49:00 +0000349
350 // Accessors
Andrei Popescu31002712010-02-23 13:46:05 +0000351 inline bool is_eval();
Steve Block6ded16b2010-05-10 14:33:55 +0100352 inline Scope* scope();
Steve Blocka7e24c12009-10-30 11:49:00 +0000353
354 // Generating deferred code.
355 void ProcessDeferred();
356
Steve Blocka7e24c12009-10-30 11:49:00 +0000357 // State
Steve Blocka7e24c12009-10-30 11:49:00 +0000358 ControlDestination* destination() const { return state_->destination(); }
359
360 // Track loop nesting level.
361 int loop_nesting() const { return loop_nesting_; }
362 void IncrementLoopNesting() { loop_nesting_++; }
363 void DecrementLoopNesting() { loop_nesting_--; }
364
365
366 // Node visitors.
367 void VisitStatements(ZoneList<Statement*>* statements);
368
369#define DEF_VISIT(type) \
370 void Visit##type(type* node);
371 AST_NODE_LIST(DEF_VISIT)
372#undef DEF_VISIT
373
374 // Visit a statement and then spill the virtual frame if control flow can
375 // reach the end of the statement (ie, it does not exit via break,
376 // continue, return, or throw). This function is used temporarily while
377 // the code generator is being transformed.
378 void VisitAndSpill(Statement* statement);
379
380 // Visit a list of statements and then spill the virtual frame if control
381 // flow can reach the end of the list.
382 void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
383
384 // Main code generation function
Andrei Popescu402d9372010-02-26 13:31:12 +0000385 void Generate(CompilationInfo* info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000386
387 // Generate the return sequence code. Should be called no more than
388 // once per compiled function, immediately after binding the return
389 // target (which can not be done more than once).
390 void GenerateReturnSequence(Result* return_value);
391
392 // Returns the arguments allocation mode.
Andrei Popescu31002712010-02-23 13:46:05 +0000393 ArgumentsAllocationMode ArgumentsMode();
Steve Blocka7e24c12009-10-30 11:49:00 +0000394
395 // Store the arguments object and allocate it if necessary.
396 Result StoreArgumentsObject(bool initial);
397
398 // The following are used by class Reference.
399 void LoadReference(Reference* ref);
400 void UnloadReference(Reference* ref);
401
Steve Block3ce2e202009-11-05 08:53:23 +0000402 static Operand ContextOperand(Register context, int index) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000403 return Operand(context, Context::SlotOffset(index));
404 }
405
406 Operand SlotOperand(Slot* slot, Register tmp);
407
408 Operand ContextSlotOperandCheckExtensions(Slot* slot,
409 Result tmp,
410 JumpTarget* slow);
411
412 // Expressions
Steve Block3ce2e202009-11-05 08:53:23 +0000413 static Operand GlobalObject() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000414 return ContextOperand(rsi, Context::GLOBAL_INDEX);
415 }
416
417 void LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000418 ControlDestination* destination,
419 bool force_control);
Steve Blockd0582a62009-12-15 09:54:21 +0000420 void Load(Expression* expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000421 void LoadGlobal();
422 void LoadGlobalReceiver();
423
424 // Generate code to push the value of an expression on top of the frame
425 // and then spill the frame fully to memory. This function is used
426 // temporarily while the code generator is being transformed.
Steve Blockd0582a62009-12-15 09:54:21 +0000427 void LoadAndSpill(Expression* expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000428
429 // Read a value from a slot and leave it on top of the expression stack.
430 void LoadFromSlot(Slot* slot, TypeofState typeof_state);
431 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state);
432 Result LoadFromGlobalSlotCheckExtensions(Slot* slot,
433 TypeofState typeof_state,
434 JumpTarget* slow);
435
436 // Store the value on top of the expression stack into a slot, leaving the
437 // value in place.
438 void StoreToSlot(Slot* slot, InitState init_state);
439
Leon Clarked91b9f72010-01-27 17:25:45 +0000440 // Load a property of an object, returning it in a Result.
441 // The object and the property name are passed on the stack, and
442 // not changed.
443 Result EmitKeyedLoad(bool is_global);
444
Steve Blocka7e24c12009-10-30 11:49:00 +0000445 // Special code for typeof expressions: Unfortunately, we must
446 // be careful when loading the expression in 'typeof'
447 // expressions. We are not allowed to throw reference errors for
448 // non-existing properties of the global object, so we must make it
449 // look like an explicit property access, instead of an access
450 // through the context chain.
451 void LoadTypeofExpression(Expression* x);
452
453 // Translate the value on top of the frame into control flow to the
454 // control destination.
455 void ToBoolean(ControlDestination* destination);
456
Steve Block6ded16b2010-05-10 14:33:55 +0100457 // Generate code that computes a shortcutting logical operation.
458 void GenerateLogicalBooleanOperation(BinaryOperation* node);
459
460 void GenericBinaryOperation(BinaryOperation* expr,
461 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000462
463 // If possible, combine two constant smi values using op to produce
464 // a smi result, and push it on the virtual frame, all at compile time.
465 // Returns true if it succeeds. Otherwise it has no effect.
466 bool FoldConstantSmis(Token::Value op, int left, int right);
467
468 // Emit code to perform a binary operation on a constant
469 // smi and a likely smi. Consumes the Result *operand.
Steve Block6ded16b2010-05-10 14:33:55 +0100470 Result ConstantSmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +0000471 Result* operand,
472 Handle<Object> constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +0000473 bool reversed,
474 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000475
476 // Emit code to perform a binary operation on two likely smis.
477 // The code to handle smi arguments is produced inline.
478 // Consumes the Results *left and *right.
Steve Block6ded16b2010-05-10 14:33:55 +0100479 Result LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +0000480 Result* left,
481 Result* right,
482 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000483
Andrei Popescu402d9372010-02-26 13:31:12 +0000484 void Comparison(AstNode* node,
485 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +0000486 bool strict,
487 ControlDestination* destination);
Steve Block6ded16b2010-05-10 14:33:55 +0100488 void GenerateInlineNumberComparison(Result* left_side,
489 Result* right_side,
490 Condition cc,
491 ControlDestination* dest);
Steve Blocka7e24c12009-10-30 11:49:00 +0000492
493 // To prevent long attacker-controlled byte sequences, integer constants
494 // from the JavaScript source are loaded in two parts if they are larger
495 // than 16 bits.
496 static const int kMaxSmiInlinedBits = 16;
497 bool IsUnsafeSmi(Handle<Object> value);
498 // Load an integer constant x into a register target using
499 // at most 16 bits of user-controlled data per assembly operation.
500 void LoadUnsafeSmi(Register target, Handle<Object> value);
501
Leon Clarkee46be812010-01-19 14:06:41 +0000502 void CallWithArguments(ZoneList<Expression*>* arguments,
503 CallFunctionFlags flags,
504 int position);
Steve Blocka7e24c12009-10-30 11:49:00 +0000505
Leon Clarked91b9f72010-01-27 17:25:45 +0000506 // An optimized implementation of expressions of the form
507 // x.apply(y, arguments). We call x the applicand and y the receiver.
508 // The optimization avoids allocating an arguments object if possible.
509 void CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +0000510 Expression* receiver,
511 VariableProxy* arguments,
512 int position);
513
514 void CheckStack();
515
516 struct InlineRuntimeLUT {
517 void (CodeGenerator::*method)(ZoneList<Expression*>*);
518 const char* name;
Steve Block6ded16b2010-05-10 14:33:55 +0100519 int nargs;
Steve Blocka7e24c12009-10-30 11:49:00 +0000520 };
521 static InlineRuntimeLUT* FindInlineRuntimeLUT(Handle<String> name);
522 bool CheckForInlineRuntimeCall(CallRuntime* node);
523 static bool PatchInlineRuntimeEntry(Handle<String> name,
524 const InlineRuntimeLUT& new_entry,
525 InlineRuntimeLUT* old_entry);
Steve Blocka7e24c12009-10-30 11:49:00 +0000526 void ProcessDeclarations(ZoneList<Declaration*>* declarations);
527
Steve Block3ce2e202009-11-05 08:53:23 +0000528 static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000529
530 // Declare global variables and functions in the given array of
531 // name/value pairs.
532 void DeclareGlobals(Handle<FixedArray> pairs);
533
Steve Block6ded16b2010-05-10 14:33:55 +0100534 // Instantiate the function based on the shared function info.
535 void InstantiateFunction(Handle<SharedFunctionInfo> function_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000536
537 // Support for type checks.
538 void GenerateIsSmi(ZoneList<Expression*>* args);
539 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
540 void GenerateIsArray(ZoneList<Expression*>* args);
Andrei Popescu402d9372010-02-26 13:31:12 +0000541 void GenerateIsRegExp(ZoneList<Expression*>* args);
Steve Blockd0582a62009-12-15 09:54:21 +0000542 void GenerateIsObject(ZoneList<Expression*>* args);
543 void GenerateIsFunction(ZoneList<Expression*>* args);
Leon Clarked91b9f72010-01-27 17:25:45 +0000544 void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000545
546 // Support for construct call checks.
547 void GenerateIsConstructCall(ZoneList<Expression*>* args);
548
549 // Support for arguments.length and arguments[?].
550 void GenerateArgumentsLength(ZoneList<Expression*>* args);
Steve Block6ded16b2010-05-10 14:33:55 +0100551 void GenerateArguments(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000552
553 // Support for accessing the class and value fields of an object.
554 void GenerateClassOf(ZoneList<Expression*>* args);
555 void GenerateValueOf(ZoneList<Expression*>* args);
556 void GenerateSetValueOf(ZoneList<Expression*>* args);
557
558 // Fast support for charCodeAt(n).
559 void GenerateFastCharCodeAt(ZoneList<Expression*>* args);
560
Steve Block6ded16b2010-05-10 14:33:55 +0100561 // Fast support for string.charAt(n) and string[n].
562 void GenerateCharFromCode(ZoneList<Expression*>* args);
563
Steve Blocka7e24c12009-10-30 11:49:00 +0000564 // Fast support for object equality testing.
565 void GenerateObjectEquals(ZoneList<Expression*>* args);
566
567 void GenerateLog(ZoneList<Expression*>* args);
568
569 void GenerateGetFramePointer(ZoneList<Expression*>* args);
570
571 // Fast support for Math.random().
Steve Block6ded16b2010-05-10 14:33:55 +0100572 void GenerateRandomHeapNumber(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000573
Steve Blockd0582a62009-12-15 09:54:21 +0000574 // Fast support for StringAdd.
575 void GenerateStringAdd(ZoneList<Expression*>* args);
576
Leon Clarkee46be812010-01-19 14:06:41 +0000577 // Fast support for SubString.
578 void GenerateSubString(ZoneList<Expression*>* args);
579
580 // Fast support for StringCompare.
581 void GenerateStringCompare(ZoneList<Expression*>* args);
582
583 // Support for direct calls from JavaScript to native RegExp code.
584 void GenerateRegExpExec(ZoneList<Expression*>* args);
585
Steve Block6ded16b2010-05-10 14:33:55 +0100586 void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
587
588 // Support for fast native caches.
589 void GenerateGetFromCache(ZoneList<Expression*>* args);
590
Andrei Popescu402d9372010-02-26 13:31:12 +0000591 // Fast support for number to string.
592 void GenerateNumberToString(ZoneList<Expression*>* args);
593
Steve Block6ded16b2010-05-10 14:33:55 +0100594 // Fast swapping of elements. Takes three expressions, the object and two
595 // indices. This should only be used if the indices are known to be
596 // non-negative and within bounds of the elements array at the call site.
597 void GenerateSwapElements(ZoneList<Expression*>* args);
598
599 // Fast call for custom callbacks.
600 void GenerateCallFunction(ZoneList<Expression*>* args);
601
Andrei Popescu402d9372010-02-26 13:31:12 +0000602 // Fast call to math functions.
Steve Block6ded16b2010-05-10 14:33:55 +0100603 void GenerateMathPow(ZoneList<Expression*>* args);
Andrei Popescu402d9372010-02-26 13:31:12 +0000604 void GenerateMathSin(ZoneList<Expression*>* args);
605 void GenerateMathCos(ZoneList<Expression*>* args);
Steve Block6ded16b2010-05-10 14:33:55 +0100606 void GenerateMathSqrt(ZoneList<Expression*>* args);
Andrei Popescu402d9372010-02-26 13:31:12 +0000607
608// Simple condition analysis.
Steve Block3ce2e202009-11-05 08:53:23 +0000609 enum ConditionAnalysis {
610 ALWAYS_TRUE,
611 ALWAYS_FALSE,
612 DONT_KNOW
613 };
614 ConditionAnalysis AnalyzeCondition(Expression* cond);
615
Steve Blocka7e24c12009-10-30 11:49:00 +0000616 // Methods used to indicate which source code is generated for. Source
617 // positions are collected by the assembler and emitted with the relocation
618 // information.
619 void CodeForFunctionPosition(FunctionLiteral* fun);
620 void CodeForReturnPosition(FunctionLiteral* fun);
621 void CodeForStatementPosition(Statement* node);
Steve Blockd0582a62009-12-15 09:54:21 +0000622 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
Steve Blocka7e24c12009-10-30 11:49:00 +0000623 void CodeForSourcePosition(int pos);
624
Steve Block6ded16b2010-05-10 14:33:55 +0100625 void SetTypeForStackSlot(Slot* slot, TypeInfo info);
626
Steve Blocka7e24c12009-10-30 11:49:00 +0000627#ifdef DEBUG
628 // True if the registers are valid for entry to a block. There should
629 // be no frame-external references to (non-reserved) registers.
630 bool HasValidEntryRegisters();
631#endif
632
Steve Blocka7e24c12009-10-30 11:49:00 +0000633 ZoneList<DeferredCode*> deferred_;
634
635 // Assembler
636 MacroAssembler* masm_; // to generate code
637
Andrei Popescu31002712010-02-23 13:46:05 +0000638 CompilationInfo* info_;
639
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 // Code generation state
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 VirtualFrame* frame_;
642 RegisterAllocator* allocator_;
643 CodeGenState* state_;
644 int loop_nesting_;
645
646 // Jump targets.
647 // The target of the return from the function.
648 BreakTarget function_return_;
649
650 // True if the function return is shadowed (ie, jumping to the target
651 // function_return_ does not jump to the true function return, but rather
652 // to some unlinking code).
653 bool function_return_is_shadowed_;
654
655 // True when we are in code that expects the virtual frame to be fully
656 // spilled. Some virtual frame function are disabled in DEBUG builds when
657 // called from spilled code, because they do not leave the virtual frame
658 // in a spilled state.
659 bool in_spilled_code_;
660
661 static InlineRuntimeLUT kInlineRuntimeLUT[];
662
663 friend class VirtualFrame;
664 friend class JumpTarget;
665 friend class Reference;
666 friend class Result;
Leon Clarke4515c472010-02-03 11:58:03 +0000667 friend class FastCodeGenerator;
Leon Clarked91b9f72010-01-27 17:25:45 +0000668 friend class FullCodeGenerator;
669 friend class FullCodeGenSyntaxChecker;
Steve Blocka7e24c12009-10-30 11:49:00 +0000670
671 friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc
672
673 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
674};
675
676
Steve Block6ded16b2010-05-10 14:33:55 +0100677// Compute a transcendental math function natively, or call the
678// TranscendentalCache runtime function.
679class TranscendentalCacheStub: public CodeStub {
680 public:
681 explicit TranscendentalCacheStub(TranscendentalCache::Type type)
682 : type_(type) {}
683 void Generate(MacroAssembler* masm);
684 private:
685 TranscendentalCache::Type type_;
686 Major MajorKey() { return TranscendentalCache; }
687 int MinorKey() { return type_; }
688 Runtime::FunctionId RuntimeFunction();
689 void GenerateOperation(MacroAssembler* masm, Label* on_nan_result);
690};
691
692
Steve Blockd0582a62009-12-15 09:54:21 +0000693// Flag that indicates how to generate code for the stub GenericBinaryOpStub.
Steve Blocka7e24c12009-10-30 11:49:00 +0000694enum GenericBinaryFlags {
Steve Blockd0582a62009-12-15 09:54:21 +0000695 NO_GENERIC_BINARY_FLAGS = 0,
696 NO_SMI_CODE_IN_STUB = 1 << 0 // Omit smi code in stub.
Steve Blocka7e24c12009-10-30 11:49:00 +0000697};
698
699
700class GenericBinaryOpStub: public CodeStub {
701 public:
702 GenericBinaryOpStub(Token::Value op,
703 OverwriteMode mode,
Andrei Popescu402d9372010-02-26 13:31:12 +0000704 GenericBinaryFlags flags,
Steve Block6ded16b2010-05-10 14:33:55 +0100705 TypeInfo operands_type = TypeInfo::Unknown())
Steve Blockd0582a62009-12-15 09:54:21 +0000706 : op_(op),
707 mode_(mode),
708 flags_(flags),
709 args_in_registers_(false),
Leon Clarkee46be812010-01-19 14:06:41 +0000710 args_reversed_(false),
Steve Block6ded16b2010-05-10 14:33:55 +0100711 static_operands_type_(operands_type),
712 runtime_operands_type_(BinaryOpIC::DEFAULT),
713 name_(NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000714 use_sse3_ = CpuFeatures::IsSupported(SSE3);
Steve Blocka7e24c12009-10-30 11:49:00 +0000715 ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
716 }
717
Steve Block6ded16b2010-05-10 14:33:55 +0100718 GenericBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info)
719 : op_(OpBits::decode(key)),
720 mode_(ModeBits::decode(key)),
721 flags_(FlagBits::decode(key)),
722 args_in_registers_(ArgsInRegistersBits::decode(key)),
723 args_reversed_(ArgsReversedBits::decode(key)),
724 use_sse3_(SSE3Bits::decode(key)),
725 static_operands_type_(TypeInfo::ExpandedRepresentation(
726 StaticTypeInfoBits::decode(key))),
727 runtime_operands_type_(type_info),
728 name_(NULL) {
729 }
730
Steve Blockd0582a62009-12-15 09:54:21 +0000731 // Generate code to call the stub with the supplied arguments. This will add
732 // code at the call site to prepare arguments either in registers or on the
733 // stack together with the actual call.
734 void GenerateCall(MacroAssembler* masm, Register left, Register right);
735 void GenerateCall(MacroAssembler* masm, Register left, Smi* right);
736 void GenerateCall(MacroAssembler* masm, Smi* left, Register right);
Steve Blocka7e24c12009-10-30 11:49:00 +0000737
Leon Clarke4515c472010-02-03 11:58:03 +0000738 Result GenerateCall(MacroAssembler* masm,
739 VirtualFrame* frame,
740 Result* left,
741 Result* right);
742
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 private:
744 Token::Value op_;
745 OverwriteMode mode_;
746 GenericBinaryFlags flags_;
Steve Blockd0582a62009-12-15 09:54:21 +0000747 bool args_in_registers_; // Arguments passed in registers not on the stack.
748 bool args_reversed_; // Left and right argument are swapped.
Steve Blocka7e24c12009-10-30 11:49:00 +0000749 bool use_sse3_;
Steve Block6ded16b2010-05-10 14:33:55 +0100750
751 // Number type information of operands, determined by code generator.
752 TypeInfo static_operands_type_;
753
754 // Operand type information determined at runtime.
755 BinaryOpIC::TypeInfo runtime_operands_type_;
756
Leon Clarkee46be812010-01-19 14:06:41 +0000757 char* name_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000758
759 const char* GetName();
760
761#ifdef DEBUG
762 void Print() {
Andrei Popescu402d9372010-02-26 13:31:12 +0000763 PrintF("GenericBinaryOpStub %d (op %s), "
764 "(mode %d, flags %d, registers %d, reversed %d, only_numbers %s)\n",
765 MinorKey(),
Steve Blocka7e24c12009-10-30 11:49:00 +0000766 Token::String(op_),
767 static_cast<int>(mode_),
Steve Blockd0582a62009-12-15 09:54:21 +0000768 static_cast<int>(flags_),
769 static_cast<int>(args_in_registers_),
Andrei Popescu402d9372010-02-26 13:31:12 +0000770 static_cast<int>(args_reversed_),
Steve Block6ded16b2010-05-10 14:33:55 +0100771 static_operands_type_.ToString());
Steve Blocka7e24c12009-10-30 11:49:00 +0000772 }
773#endif
774
Steve Block6ded16b2010-05-10 14:33:55 +0100775 // Minor key encoding in 18 bits TTNNNFRASOOOOOOOMM.
Steve Blocka7e24c12009-10-30 11:49:00 +0000776 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
Andrei Popescu402d9372010-02-26 13:31:12 +0000777 class OpBits: public BitField<Token::Value, 2, 7> {};
778 class SSE3Bits: public BitField<bool, 9, 1> {};
779 class ArgsInRegistersBits: public BitField<bool, 10, 1> {};
780 class ArgsReversedBits: public BitField<bool, 11, 1> {};
781 class FlagBits: public BitField<GenericBinaryFlags, 12, 1> {};
Steve Block6ded16b2010-05-10 14:33:55 +0100782 class StaticTypeInfoBits: public BitField<int, 13, 3> {};
783 class RuntimeTypeInfoBits: public BitField<BinaryOpIC::TypeInfo, 16, 2> {};
Steve Blocka7e24c12009-10-30 11:49:00 +0000784
785 Major MajorKey() { return GenericBinaryOp; }
786 int MinorKey() {
Steve Block6ded16b2010-05-10 14:33:55 +0100787 // Encode the parameters in a unique 18 bit value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 return OpBits::encode(op_)
Steve Blockd0582a62009-12-15 09:54:21 +0000789 | ModeBits::encode(mode_)
790 | FlagBits::encode(flags_)
791 | SSE3Bits::encode(use_sse3_)
792 | ArgsInRegistersBits::encode(args_in_registers_)
Andrei Popescu402d9372010-02-26 13:31:12 +0000793 | ArgsReversedBits::encode(args_reversed_)
Steve Block6ded16b2010-05-10 14:33:55 +0100794 | StaticTypeInfoBits::encode(
795 static_operands_type_.ThreeBitRepresentation())
796 | RuntimeTypeInfoBits::encode(runtime_operands_type_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000797 }
Steve Blockd0582a62009-12-15 09:54:21 +0000798
Steve Blocka7e24c12009-10-30 11:49:00 +0000799 void Generate(MacroAssembler* masm);
Steve Blockd0582a62009-12-15 09:54:21 +0000800 void GenerateSmiCode(MacroAssembler* masm, Label* slow);
801 void GenerateLoadArguments(MacroAssembler* masm);
802 void GenerateReturn(MacroAssembler* masm);
Steve Block6ded16b2010-05-10 14:33:55 +0100803 void GenerateRegisterArgsPush(MacroAssembler* masm);
804 void GenerateTypeTransition(MacroAssembler* masm);
Steve Blockd0582a62009-12-15 09:54:21 +0000805
806 bool ArgsInRegistersSupported() {
Leon Clarke4515c472010-02-03 11:58:03 +0000807 return (op_ == Token::ADD) || (op_ == Token::SUB)
808 || (op_ == Token::MUL) || (op_ == Token::DIV);
Steve Blockd0582a62009-12-15 09:54:21 +0000809 }
810 bool IsOperationCommutative() {
811 return (op_ == Token::ADD) || (op_ == Token::MUL);
812 }
813
814 void SetArgsInRegisters() { args_in_registers_ = true; }
815 void SetArgsReversed() { args_reversed_ = true; }
816 bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; }
Leon Clarke4515c472010-02-03 11:58:03 +0000817 bool HasArgsInRegisters() { return args_in_registers_; }
818 bool HasArgsReversed() { return args_reversed_; }
Steve Block6ded16b2010-05-10 14:33:55 +0100819
820 bool ShouldGenerateSmiCode() {
821 return HasSmiCodeInStub() &&
822 runtime_operands_type_ != BinaryOpIC::HEAP_NUMBERS &&
823 runtime_operands_type_ != BinaryOpIC::STRINGS;
824 }
825
826 bool ShouldGenerateFPCode() {
827 return runtime_operands_type_ != BinaryOpIC::STRINGS;
828 }
829
830 virtual int GetCodeKind() { return Code::BINARY_OP_IC; }
831
832 virtual InlineCacheState GetICState() {
833 return BinaryOpIC::ToState(runtime_operands_type_);
834 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000835};
836
Steve Block6ded16b2010-05-10 14:33:55 +0100837class StringHelper : public AllStatic {
Leon Clarked91b9f72010-01-27 17:25:45 +0000838 public:
Steve Block6ded16b2010-05-10 14:33:55 +0100839 // Generates fast code for getting a char code out of a string
840 // object at the given index. May bail out for four reasons (in the
841 // listed order):
842 // * Receiver is not a string (receiver_not_string label).
843 // * Index is not a smi (index_not_smi label).
844 // * Index is out of range (index_out_of_range).
845 // * Some other reason (slow_case label). In this case it's
846 // guaranteed that the above conditions are not violated,
847 // e.g. it's safe to assume the receiver is a string and the
848 // index is a non-negative smi < length.
849 // When successful, object, index, and scratch are clobbered.
850 // Otherwise, scratch and result are clobbered.
851 static void GenerateFastCharCodeAt(MacroAssembler* masm,
852 Register object,
853 Register index,
854 Register scratch,
855 Register result,
856 Label* receiver_not_string,
857 Label* index_not_smi,
858 Label* index_out_of_range,
859 Label* slow_case);
860
861 // Generates code for creating a one-char string from the given char
862 // code. May do a runtime call, so any register can be clobbered
863 // and, if the given invoke flag specifies a call, an internal frame
864 // is required. In tail call mode the result must be rax register.
865 static void GenerateCharFromCode(MacroAssembler* masm,
866 Register code,
867 Register result,
868 Register scratch,
869 InvokeFlag flag);
870
Leon Clarked91b9f72010-01-27 17:25:45 +0000871 // Generate code for copying characters using a simple loop. This should only
872 // be used in places where the number of characters is small and the
873 // additional setup and checking in GenerateCopyCharactersREP adds too much
874 // overhead. Copying of overlapping regions is not supported.
Steve Block6ded16b2010-05-10 14:33:55 +0100875 static void GenerateCopyCharacters(MacroAssembler* masm,
876 Register dest,
877 Register src,
878 Register count,
879 bool ascii);
Leon Clarked91b9f72010-01-27 17:25:45 +0000880
881 // Generate code for copying characters using the rep movs instruction.
882 // Copies rcx characters from rsi to rdi. Copying of overlapping regions is
883 // not supported.
Steve Block6ded16b2010-05-10 14:33:55 +0100884 static void GenerateCopyCharactersREP(MacroAssembler* masm,
885 Register dest, // Must be rdi.
886 Register src, // Must be rsi.
887 Register count, // Must be rcx.
888 bool ascii);
889
890
891 // Probe the symbol table for a two character string. If the string is
892 // not found by probing a jump to the label not_found is performed. This jump
893 // does not guarantee that the string is not in the symbol table. If the
894 // string is found the code falls through with the string in register rax.
895 static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
896 Register c1,
897 Register c2,
898 Register scratch1,
899 Register scratch2,
900 Register scratch3,
901 Register scratch4,
902 Label* not_found);
903
904 // Generate string hash.
905 static void GenerateHashInit(MacroAssembler* masm,
906 Register hash,
907 Register character,
908 Register scratch);
909 static void GenerateHashAddCharacter(MacroAssembler* masm,
910 Register hash,
911 Register character,
912 Register scratch);
913 static void GenerateHashGetHash(MacroAssembler* masm,
914 Register hash,
915 Register scratch);
916
917 private:
918 DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
Leon Clarked91b9f72010-01-27 17:25:45 +0000919};
920
921
Leon Clarkee46be812010-01-19 14:06:41 +0000922// Flag that indicates how to generate code for the stub StringAddStub.
923enum StringAddFlags {
924 NO_STRING_ADD_FLAGS = 0,
925 NO_STRING_CHECK_IN_STUB = 1 << 0 // Omit string check in stub.
926};
927
928
Steve Block6ded16b2010-05-10 14:33:55 +0100929class StringAddStub: public CodeStub {
Leon Clarkee46be812010-01-19 14:06:41 +0000930 public:
931 explicit StringAddStub(StringAddFlags flags) {
932 string_check_ = ((flags & NO_STRING_CHECK_IN_STUB) == 0);
933 }
934
935 private:
936 Major MajorKey() { return StringAdd; }
937 int MinorKey() { return string_check_ ? 0 : 1; }
938
939 void Generate(MacroAssembler* masm);
940
Leon Clarkee46be812010-01-19 14:06:41 +0000941 // Should the stub check whether arguments are strings?
942 bool string_check_;
943};
944
945
Steve Block6ded16b2010-05-10 14:33:55 +0100946class SubStringStub: public CodeStub {
Leon Clarked91b9f72010-01-27 17:25:45 +0000947 public:
948 SubStringStub() {}
949
950 private:
951 Major MajorKey() { return SubString; }
952 int MinorKey() { return 0; }
953
954 void Generate(MacroAssembler* masm);
955};
956
957
Leon Clarkee46be812010-01-19 14:06:41 +0000958class StringCompareStub: public CodeStub {
959 public:
960 explicit StringCompareStub() {}
961
962 // Compare two flat ascii strings and returns result in rax after popping two
963 // arguments from the stack.
964 static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
965 Register left,
966 Register right,
967 Register scratch1,
968 Register scratch2,
969 Register scratch3,
970 Register scratch4);
971
972 private:
973 Major MajorKey() { return StringCompare; }
974 int MinorKey() { return 0; }
975
976 void Generate(MacroAssembler* masm);
977};
978
979
Steve Block6ded16b2010-05-10 14:33:55 +0100980class NumberToStringStub: public CodeStub {
981 public:
982 NumberToStringStub() { }
983
984 // Generate code to do a lookup in the number string cache. If the number in
985 // the register object is found in the cache the generated code falls through
986 // with the result in the result register. The object and the result register
987 // can be the same. If the number is not found in the cache the code jumps to
988 // the label not_found with only the content of register object unchanged.
989 static void GenerateLookupNumberStringCache(MacroAssembler* masm,
990 Register object,
991 Register result,
992 Register scratch1,
993 Register scratch2,
994 bool object_is_smi,
995 Label* not_found);
996
997 private:
998 static void GenerateConvertHashCodeToIndex(MacroAssembler* masm,
999 Register hash,
1000 Register mask);
1001
1002 Major MajorKey() { return NumberToString; }
1003 int MinorKey() { return 0; }
1004
1005 void Generate(MacroAssembler* masm);
1006
1007 const char* GetName() { return "NumberToStringStub"; }
1008
1009#ifdef DEBUG
1010 void Print() {
1011 PrintF("NumberToStringStub\n");
1012 }
1013#endif
1014};
1015
1016
1017class RecordWriteStub : public CodeStub {
1018 public:
1019 RecordWriteStub(Register object, Register addr, Register scratch)
1020 : object_(object), addr_(addr), scratch_(scratch) { }
1021
1022 void Generate(MacroAssembler* masm);
1023
1024 private:
1025 Register object_;
1026 Register addr_;
1027 Register scratch_;
1028
1029#ifdef DEBUG
1030 void Print() {
1031 PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
1032 object_.code(), addr_.code(), scratch_.code());
1033 }
1034#endif
1035
1036 // Minor key encoding in 12 bits. 4 bits for each of the three
1037 // registers (object, address and scratch) OOOOAAAASSSS.
1038 class ScratchBits : public BitField<uint32_t, 0, 4> {};
1039 class AddressBits : public BitField<uint32_t, 4, 4> {};
1040 class ObjectBits : public BitField<uint32_t, 8, 4> {};
1041
1042 Major MajorKey() { return RecordWrite; }
1043
1044 int MinorKey() {
1045 // Encode the registers.
1046 return ObjectBits::encode(object_.code()) |
1047 AddressBits::encode(addr_.code()) |
1048 ScratchBits::encode(scratch_.code());
1049 }
1050};
1051
1052
Steve Blocka7e24c12009-10-30 11:49:00 +00001053} } // namespace v8::internal
1054
1055#endif // V8_X64_CODEGEN_X64_H_