blob: af82de8ff1c81facb202266064c15f74ab3217f8 [file] [log] [blame]
ager@chromium.org5ec48922009-05-05 07:25:34 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
ager@chromium.org9085a012009-05-11 19:22:57 +000028#ifndef V8_X64_CODEGEN_X64_H_
29#define V8_X64_CODEGEN_X64_H_
30
kasperl@chromium.org71affb52009-05-26 05:44:31 +000031namespace v8 {
32namespace internal {
ager@chromium.org9085a012009-05-11 19:22:57 +000033
34// Forward declarations
35class DeferredCode;
36class RegisterAllocator;
37class RegisterFile;
38
39enum InitState { CONST_INIT, NOT_CONST_INIT };
40enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
41
42
43// -------------------------------------------------------------------------
44// Reference support
45
46// A reference is a C++ stack-allocated object that keeps an ECMA
47// reference on the execution stack while in scope. For variables
48// the reference is empty, indicating that it isn't necessary to
49// store state on the stack for keeping track of references to those.
50// For properties, we keep either one (named) or two (indexed) values
51// on the execution stack to represent the reference.
52
53class Reference BASE_EMBEDDED {
54 public:
55 // The values of the types is important, see size().
56 enum Type { ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
57 Reference(CodeGenerator* cgen, Expression* expression);
58 ~Reference();
59
60 Expression* expression() const { return expression_; }
61 Type type() const { return type_; }
62 void set_type(Type value) {
63 ASSERT(type_ == ILLEGAL);
64 type_ = value;
65 }
66
67 // The size the reference takes up on the stack.
68 int size() const { return (type_ == ILLEGAL) ? 0 : type_; }
69
70 bool is_illegal() const { return type_ == ILLEGAL; }
71 bool is_slot() const { return type_ == SLOT; }
72 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
73
74 // Return the name. Only valid for named property references.
75 Handle<String> GetName();
76
77 // Generate code to push the value of the reference on top of the
78 // expression stack. The reference is expected to be already on top of
79 // the expression stack, and it is left in place with its value above it.
80 void GetValue(TypeofState typeof_state);
81
82 // Like GetValue except that the slot is expected to be written to before
83 // being read from again. Thae value of the reference may be invalidated,
84 // causing subsequent attempts to read it to fail.
85 void TakeValue(TypeofState typeof_state);
86
87 // Generate code to store the value on top of the expression stack in the
88 // reference. The reference is expected to be immediately below the value
89 // on the expression stack. The stored value is left in place (with the
90 // reference intact below it) to support chained assignments.
91 void SetValue(InitState init_state);
92
93 private:
94 CodeGenerator* cgen_;
95 Expression* expression_;
96 Type type_;
97};
98
99
100// -------------------------------------------------------------------------
101// Control destinations.
102
103// A control destination encapsulates a pair of jump targets and a
104// flag indicating which one is the preferred fall-through. The
105// preferred fall-through must be unbound, the other may be already
106// bound (ie, a backward target).
107//
108// The true and false targets may be jumped to unconditionally or
109// control may split conditionally. Unconditional jumping and
110// splitting should be emitted in tail position (as the last thing
111// when compiling an expression) because they can cause either label
112// to be bound or the non-fall through to be jumped to leaving an
113// invalid virtual frame.
114//
115// The labels in the control destination can be extracted and
116// manipulated normally without affecting the state of the
117// destination.
118
119class ControlDestination BASE_EMBEDDED {
120 public:
121 ControlDestination(JumpTarget* true_target,
122 JumpTarget* false_target,
123 bool true_is_fall_through)
124 : true_target_(true_target),
125 false_target_(false_target),
126 true_is_fall_through_(true_is_fall_through),
127 is_used_(false) {
128 ASSERT(true_is_fall_through ? !true_target->is_bound()
129 : !false_target->is_bound());
130 }
131
132 // Accessors for the jump targets. Directly jumping or branching to
133 // or binding the targets will not update the destination's state.
134 JumpTarget* true_target() const { return true_target_; }
135 JumpTarget* false_target() const { return false_target_; }
136
137 // True if the the destination has been jumped to unconditionally or
138 // control has been split to both targets. This predicate does not
139 // test whether the targets have been extracted and manipulated as
140 // raw jump targets.
141 bool is_used() const { return is_used_; }
142
143 // True if the destination is used and the true target (respectively
144 // false target) was the fall through. If the target is backward,
145 // "fall through" included jumping unconditionally to it.
146 bool true_was_fall_through() const {
147 return is_used_ && true_is_fall_through_;
148 }
149
150 bool false_was_fall_through() const {
151 return is_used_ && !true_is_fall_through_;
152 }
153
154 // Emit a branch to one of the true or false targets, and bind the
155 // other target. Because this binds the fall-through target, it
156 // should be emitted in tail position (as the last thing when
157 // compiling an expression).
158 void Split(Condition cc) {
159 ASSERT(!is_used_);
160 if (true_is_fall_through_) {
161 false_target_->Branch(NegateCondition(cc));
162 true_target_->Bind();
163 } else {
164 true_target_->Branch(cc);
165 false_target_->Bind();
166 }
167 is_used_ = true;
168 }
169
170 // Emit an unconditional jump in tail position, to the true target
171 // (if the argument is true) or the false target. The "jump" will
172 // actually bind the jump target if it is forward, jump to it if it
173 // is backward.
174 void Goto(bool where) {
175 ASSERT(!is_used_);
176 JumpTarget* target = where ? true_target_ : false_target_;
177 if (target->is_bound()) {
178 target->Jump();
179 } else {
180 target->Bind();
181 }
182 is_used_ = true;
183 true_is_fall_through_ = where;
184 }
185
186 // Mark this jump target as used as if Goto had been called, but
187 // without generating a jump or binding a label (the control effect
188 // should have already happened). This is used when the left
189 // subexpression of the short-circuit boolean operators are
190 // compiled.
191 void Use(bool where) {
192 ASSERT(!is_used_);
193 ASSERT((where ? true_target_ : false_target_)->is_bound());
194 is_used_ = true;
195 true_is_fall_through_ = where;
196 }
197
198 // Swap the true and false targets but keep the same actual label as
199 // the fall through. This is used when compiling negated
200 // expressions, where we want to swap the targets but preserve the
201 // state.
202 void Invert() {
203 JumpTarget* temp_target = true_target_;
204 true_target_ = false_target_;
205 false_target_ = temp_target;
206
207 true_is_fall_through_ = !true_is_fall_through_;
208 }
209
210 private:
211 // True and false jump targets.
212 JumpTarget* true_target_;
213 JumpTarget* false_target_;
214
215 // Before using the destination: true if the true target is the
216 // preferred fall through, false if the false target is. After
217 // using the destination: true if the true target was actually used
218 // as the fall through, false if the false target was.
219 bool true_is_fall_through_;
220
221 // True if the Split or Goto functions have been called.
222 bool is_used_;
223};
224
225
226// -------------------------------------------------------------------------
227// Code generation state
228
229// The state is passed down the AST by the code generator (and back up, in
230// the form of the state of the jump target pair). It is threaded through
231// the call stack. Constructing a state implicitly pushes it on the owning
232// code generator's stack of states, and destroying one implicitly pops it.
233//
234// The code generator state is only used for expressions, so statements have
235// the initial state.
236
237class CodeGenState BASE_EMBEDDED {
238 public:
239 // Create an initial code generator state. Destroying the initial state
240 // leaves the code generator with a NULL state.
241 explicit CodeGenState(CodeGenerator* owner);
242
243 // Create a code generator state based on a code generator's current
244 // state. The new state may or may not be inside a typeof, and has its
245 // own control destination.
246 CodeGenState(CodeGenerator* owner,
247 TypeofState typeof_state,
248 ControlDestination* destination);
249
250 // Destroy a code generator state and restore the owning code generator's
251 // previous state.
252 ~CodeGenState();
253
254 // Accessors for the state.
255 TypeofState typeof_state() const { return typeof_state_; }
256 ControlDestination* destination() const { return destination_; }
257
258 private:
259 // The owning code generator.
260 CodeGenerator* owner_;
261
262 // A flag indicating whether we are compiling the immediate subexpression
263 // of a typeof expression.
264 TypeofState typeof_state_;
265
266 // A control destination in case the expression has a control-flow
267 // effect.
268 ControlDestination* destination_;
269
270 // The previous state of the owning code generator, restored when
271 // this state is destroyed.
272 CodeGenState* previous_;
273};
274
275
ager@chromium.org3e875802009-06-29 08:26:34 +0000276// -------------------------------------------------------------------------
277// Arguments allocation mode
278
279enum ArgumentsAllocationMode {
280 NO_ARGUMENTS_ALLOCATION,
281 EAGER_ARGUMENTS_ALLOCATION,
282 LAZY_ARGUMENTS_ALLOCATION
283};
ager@chromium.org9085a012009-05-11 19:22:57 +0000284
285
286// -------------------------------------------------------------------------
287// CodeGenerator
288
289class CodeGenerator: public AstVisitor {
290 public:
291 // Takes a function literal, generates code for it. This function should only
292 // be called by compiler.cc.
293 static Handle<Code> MakeCode(FunctionLiteral* fun,
294 Handle<Script> script,
295 bool is_eval);
296
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000297 // During implementation of CodeGenerator, this call creates a
298 // CodeGenerator instance, and calls GenCode on it with a null
299 // function literal. CodeGenerator will then construct and return
300 // a simple dummy function. Call this during bootstrapping before
301 // trying to compile any real functions, to get CodeGenerator up
302 // and running.
303 // TODO(X64): Remove once we can get through the bootstrapping process.
304 static void TestCodeGenerator();
305
ager@chromium.org9085a012009-05-11 19:22:57 +0000306#ifdef ENABLE_LOGGING_AND_PROFILING
307 static bool ShouldGenerateLog(Expression* type);
308#endif
309
310 static void SetFunctionInfo(Handle<JSFunction> fun,
311 int length,
312 int function_token_position,
313 int start_position,
314 int end_position,
315 bool is_expression,
316 bool is_toplevel,
317 Handle<Script> script,
318 Handle<String> inferred_name);
319
320 // Accessors
321 MacroAssembler* masm() { return masm_; }
322
323 VirtualFrame* frame() const { return frame_; }
324
325 bool has_valid_frame() const { return frame_ != NULL; }
326
327 // Set the virtual frame to be new_frame, with non-frame register
328 // reference counts given by non_frame_registers. The non-frame
329 // register reference counts of the old frame are returned in
330 // non_frame_registers.
331 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
332
333 void DeleteFrame();
334
335 RegisterAllocator* allocator() const { return allocator_; }
336
337 CodeGenState* state() { return state_; }
338 void set_state(CodeGenState* state) { state_ = state; }
339
340 void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
341
342 bool in_spilled_code() const { return in_spilled_code_; }
343 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; }
344
345 private:
346 // Construction/Destruction
347 CodeGenerator(int buffer_size, Handle<Script> script, bool is_eval);
348 virtual ~CodeGenerator() { delete masm_; }
349
350 // Accessors
351 Scope* scope() const { return scope_; }
352
sgjesse@chromium.org755c5b12009-05-29 11:04:38 +0000353 // Generating deferred code.
ager@chromium.org9085a012009-05-11 19:22:57 +0000354 void ProcessDeferred();
355
356 bool is_eval() { return is_eval_; }
357
358 // State
359 TypeofState typeof_state() const { return state_->typeof_state(); }
360 ControlDestination* destination() const { return state_->destination(); }
361
362 // Track loop nesting level.
363 int loop_nesting() const { return loop_nesting_; }
364 void IncrementLoopNesting() { loop_nesting_++; }
365 void DecrementLoopNesting() { loop_nesting_--; }
366
367
368 // Node visitors.
369 void VisitStatements(ZoneList<Statement*>* statements);
370
371#define DEF_VISIT(type) \
372 void Visit##type(type* node);
373 NODE_LIST(DEF_VISIT)
374#undef DEF_VISIT
375
376 // Visit a statement and then spill the virtual frame if control flow can
377 // reach the end of the statement (ie, it does not exit via break,
378 // continue, return, or throw). This function is used temporarily while
379 // the code generator is being transformed.
380 void VisitAndSpill(Statement* statement);
381
382 // Visit a list of statements and then spill the virtual frame if control
383 // flow can reach the end of the list.
384 void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
385
386 // Main code generation function
387 void GenCode(FunctionLiteral* fun);
388
389 // Generate the return sequence code. Should be called no more than
390 // once per compiled function, immediately after binding the return
391 // target (which can not be done more than once).
392 void GenerateReturnSequence(Result* return_value);
393
ager@chromium.org3e875802009-06-29 08:26:34 +0000394 // Returns the arguments allocation mode.
395 ArgumentsAllocationMode ArgumentsMode() const;
396
397 // Store the arguments object and allocate it if necessary.
398 Result StoreArgumentsObject(bool initial);
399
ager@chromium.org9085a012009-05-11 19:22:57 +0000400 // The following are used by class Reference.
401 void LoadReference(Reference* ref);
402 void UnloadReference(Reference* ref);
403
404 Operand ContextOperand(Register context, int index) const {
405 return Operand(context, Context::SlotOffset(index));
406 }
407
408 Operand SlotOperand(Slot* slot, Register tmp);
409
410 Operand ContextSlotOperandCheckExtensions(Slot* slot,
411 Result tmp,
412 JumpTarget* slow);
413
414 // Expressions
415 Operand GlobalObject() const {
416 return ContextOperand(rsi, Context::GLOBAL_INDEX);
417 }
418
419 void LoadCondition(Expression* x,
420 TypeofState typeof_state,
421 ControlDestination* destination,
422 bool force_control);
423 void Load(Expression* x, TypeofState typeof_state = NOT_INSIDE_TYPEOF);
424 void LoadGlobal();
425 void LoadGlobalReceiver();
426
427 // Generate code to push the value of an expression on top of the frame
428 // and then spill the frame fully to memory. This function is used
429 // temporarily while the code generator is being transformed.
430 void LoadAndSpill(Expression* expression,
431 TypeofState typeof_state = NOT_INSIDE_TYPEOF);
432
433 // Read a value from a slot and leave it on top of the expression stack.
434 void LoadFromSlot(Slot* slot, TypeofState typeof_state);
435 Result LoadFromGlobalSlotCheckExtensions(Slot* slot,
436 TypeofState typeof_state,
437 JumpTarget* slow);
438
439 // Store the value on top of the expression stack into a slot, leaving the
440 // value in place.
441 void StoreToSlot(Slot* slot, InitState init_state);
442
443 // Special code for typeof expressions: Unfortunately, we must
444 // be careful when loading the expression in 'typeof'
445 // expressions. We are not allowed to throw reference errors for
446 // non-existing properties of the global object, so we must make it
447 // look like an explicit property access, instead of an access
448 // through the context chain.
449 void LoadTypeofExpression(Expression* x);
450
451 // Translate the value on top of the frame into control flow to the
452 // control destination.
453 void ToBoolean(ControlDestination* destination);
454
455 void GenericBinaryOperation(
456 Token::Value op,
457 SmiAnalysis* type,
458 OverwriteMode overwrite_mode);
459
460 // If possible, combine two constant smi values using op to produce
461 // a smi result, and push it on the virtual frame, all at compile time.
462 // Returns true if it succeeds. Otherwise it has no effect.
463 bool FoldConstantSmis(Token::Value op, int left, int right);
464
465 // Emit code to perform a binary operation on a constant
466 // smi and a likely smi. Consumes the Result *operand.
467 void ConstantSmiBinaryOperation(Token::Value op,
468 Result* operand,
469 Handle<Object> constant_operand,
470 SmiAnalysis* type,
471 bool reversed,
472 OverwriteMode overwrite_mode);
473
474 // Emit code to perform a binary operation on two likely smis.
475 // The code to handle smi arguments is produced inline.
476 // Consumes the Results *left and *right.
477 void LikelySmiBinaryOperation(Token::Value op,
478 Result* left,
479 Result* right,
480 OverwriteMode overwrite_mode);
481
482 void Comparison(Condition cc,
483 bool strict,
484 ControlDestination* destination);
485
486 // To prevent long attacker-controlled byte sequences, integer constants
487 // from the JavaScript source are loaded in two parts if they are larger
488 // than 16 bits.
489 static const int kMaxSmiInlinedBits = 16;
490 bool IsUnsafeSmi(Handle<Object> value);
491 // Load an integer constant x into a register target using
492 // at most 16 bits of user-controlled data per assembly operation.
493 void LoadUnsafeSmi(Register target, Handle<Object> value);
494
495 void CallWithArguments(ZoneList<Expression*>* arguments, int position);
496
497 void CheckStack();
498
kasperl@chromium.orgb3284ad2009-05-18 06:12:45 +0000499 struct InlineRuntimeLUT {
500 void (CodeGenerator::*method)(ZoneList<Expression*>*);
501 const char* name;
502 };
503 static InlineRuntimeLUT* FindInlineRuntimeLUT(Handle<String> name);
ager@chromium.org9085a012009-05-11 19:22:57 +0000504 bool CheckForInlineRuntimeCall(CallRuntime* node);
kasperl@chromium.orgb3284ad2009-05-18 06:12:45 +0000505 static bool PatchInlineRuntimeEntry(Handle<String> name,
506 const InlineRuntimeLUT& new_entry,
507 InlineRuntimeLUT* old_entry);
ager@chromium.org9085a012009-05-11 19:22:57 +0000508 Handle<JSFunction> BuildBoilerplate(FunctionLiteral* node);
509 void ProcessDeclarations(ZoneList<Declaration*>* declarations);
510
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000511 Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
ager@chromium.org9085a012009-05-11 19:22:57 +0000512
513 // Declare global variables and functions in the given array of
514 // name/value pairs.
515 void DeclareGlobals(Handle<FixedArray> pairs);
516
517 // Instantiate the function boilerplate.
518 void InstantiateBoilerplate(Handle<JSFunction> boilerplate);
519
520 // Support for type checks.
521 void GenerateIsSmi(ZoneList<Expression*>* args);
522 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
523 void GenerateIsArray(ZoneList<Expression*>* args);
524
525 // Support for arguments.length and arguments[?].
526 void GenerateArgumentsLength(ZoneList<Expression*>* args);
527 void GenerateArgumentsAccess(ZoneList<Expression*>* args);
528
529 // Support for accessing the value field of an object (used by Date).
530 void GenerateValueOf(ZoneList<Expression*>* args);
531 void GenerateSetValueOf(ZoneList<Expression*>* args);
532
533 // Fast support for charCodeAt(n).
534 void GenerateFastCharCodeAt(ZoneList<Expression*>* args);
535
536 // Fast support for object equality testing.
537 void GenerateObjectEquals(ZoneList<Expression*>* args);
538
539 void GenerateLog(ZoneList<Expression*>* args);
540
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000541 // Fast support for Math.random().
542 void GenerateRandomPositiveSmi(ZoneList<Expression*>* args);
543
544 // Fast support for Math.sin and Math.cos.
545 enum MathOp { SIN, COS };
546 void GenerateFastMathOp(MathOp op, ZoneList<Expression*>* args);
547 inline void GenerateMathSin(ZoneList<Expression*>* args);
548 inline void GenerateMathCos(ZoneList<Expression*>* args);
ager@chromium.org9085a012009-05-11 19:22:57 +0000549
550 // Methods and constants for fast case switch statement support.
551 //
552 // Only allow fast-case switch if the range of labels is at most
553 // this factor times the number of case labels.
554 // Value is derived from comparing the size of code generated by the normal
555 // switch code for Smi-labels to the size of a single pointer. If code
556 // quality increases this number should be decreased to match.
557 static const int kFastSwitchMaxOverheadFactor = 5;
558
559 // Minimal number of switch cases required before we allow jump-table
560 // optimization.
561 static const int kFastSwitchMinCaseCount = 5;
562
563 // The limit of the range of a fast-case switch, as a factor of the number
564 // of cases of the switch. Each platform should return a value that
565 // is optimal compared to the default code generated for a switch statement
566 // on that platform.
567 int FastCaseSwitchMaxOverheadFactor();
568
569 // The minimal number of cases in a switch before the fast-case switch
570 // optimization is enabled. Each platform should return a value that
571 // is optimal compared to the default code generated for a switch statement
572 // on that platform.
573 int FastCaseSwitchMinCaseCount();
574
575 // Allocate a jump table and create code to jump through it.
576 // Should call GenerateFastCaseSwitchCases to generate the code for
577 // all the cases at the appropriate point.
578 void GenerateFastCaseSwitchJumpTable(SwitchStatement* node,
579 int min_index,
580 int range,
581 Label* fail_label,
582 Vector<Label*> case_targets,
583 Vector<Label> case_labels);
584
585 // Generate the code for cases for the fast case switch.
586 // Called by GenerateFastCaseSwitchJumpTable.
587 void GenerateFastCaseSwitchCases(SwitchStatement* node,
588 Vector<Label> case_labels,
589 VirtualFrame* start_frame);
590
591 // Fast support for constant-Smi switches.
592 void GenerateFastCaseSwitchStatement(SwitchStatement* node,
593 int min_index,
594 int range,
595 int default_index);
596
597 // Fast support for constant-Smi switches. Tests whether switch statement
598 // permits optimization and calls GenerateFastCaseSwitch if it does.
599 // Returns true if the fast-case switch was generated, and false if not.
600 bool TryGenerateFastCaseSwitchStatement(SwitchStatement* node);
601
602 // Methods used to indicate which source code is generated for. Source
603 // positions are collected by the assembler and emitted with the relocation
604 // information.
605 void CodeForFunctionPosition(FunctionLiteral* fun);
606 void CodeForReturnPosition(FunctionLiteral* fun);
607 void CodeForStatementPosition(Node* node);
608 void CodeForSourcePosition(int pos);
609
610#ifdef DEBUG
sgjesse@chromium.org755c5b12009-05-29 11:04:38 +0000611 // True if the registers are valid for entry to a block. There should
612 // be no frame-external references to (non-reserved) registers.
ager@chromium.org9085a012009-05-11 19:22:57 +0000613 bool HasValidEntryRegisters();
614#endif
615
616 bool is_eval_; // Tells whether code is generated for eval.
617 Handle<Script> script_;
sgjesse@chromium.org755c5b12009-05-29 11:04:38 +0000618 ZoneList<DeferredCode*> deferred_;
ager@chromium.org9085a012009-05-11 19:22:57 +0000619
620 // Assembler
621 MacroAssembler* masm_; // to generate code
622
623 // Code generation state
624 Scope* scope_;
625 VirtualFrame* frame_;
626 RegisterAllocator* allocator_;
627 CodeGenState* state_;
628 int loop_nesting_;
629
630 // Jump targets.
631 // The target of the return from the function.
632 BreakTarget function_return_;
633
634 // True if the function return is shadowed (ie, jumping to the target
635 // function_return_ does not jump to the true function return, but rather
636 // to some unlinking code).
637 bool function_return_is_shadowed_;
638
639 // True when we are in code that expects the virtual frame to be fully
640 // spilled. Some virtual frame function are disabled in DEBUG builds when
641 // called from spilled code, because they do not leave the virtual frame
642 // in a spilled state.
643 bool in_spilled_code_;
644
kasperl@chromium.orgb3284ad2009-05-18 06:12:45 +0000645 static InlineRuntimeLUT kInlineRuntimeLUT[];
646
ager@chromium.org9085a012009-05-11 19:22:57 +0000647 friend class VirtualFrame;
648 friend class JumpTarget;
649 friend class Reference;
650 friend class Result;
651
652 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
653};
654
655
656} } // namespace v8::internal
657
658#endif // V8_X64_CODEGEN_X64_H_