Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame] | 1 | // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #ifndef V8_FULL_CODEGEN_FULL_CODEGEN_H_ |
| 6 | #define V8_FULL_CODEGEN_FULL_CODEGEN_H_ |
| 7 | |
| 8 | #include "src/allocation.h" |
| 9 | #include "src/assert-scope.h" |
| 10 | #include "src/ast/ast.h" |
| 11 | #include "src/ast/scopes.h" |
| 12 | #include "src/bit-vector.h" |
| 13 | #include "src/code-factory.h" |
| 14 | #include "src/code-stubs.h" |
| 15 | #include "src/codegen.h" |
| 16 | #include "src/compiler.h" |
| 17 | #include "src/globals.h" |
| 18 | #include "src/objects.h" |
| 19 | |
| 20 | namespace v8 { |
| 21 | namespace internal { |
| 22 | |
| 23 | // Forward declarations. |
| 24 | class JumpPatchSite; |
| 25 | |
| 26 | // ----------------------------------------------------------------------------- |
| 27 | // Full code generator. |
| 28 | |
| 29 | class FullCodeGenerator: public AstVisitor { |
| 30 | public: |
| 31 | enum State { |
| 32 | NO_REGISTERS, |
| 33 | TOS_REG |
| 34 | }; |
| 35 | |
| 36 | FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info) |
| 37 | : masm_(masm), |
| 38 | info_(info), |
| 39 | isolate_(info->isolate()), |
| 40 | zone_(info->zone()), |
| 41 | scope_(info->scope()), |
| 42 | nesting_stack_(NULL), |
| 43 | loop_depth_(0), |
| 44 | try_catch_depth_(0), |
| 45 | globals_(NULL), |
| 46 | context_(NULL), |
| 47 | bailout_entries_(info->HasDeoptimizationSupport() |
| 48 | ? info->literal()->ast_node_count() |
| 49 | : 0, |
| 50 | info->zone()), |
| 51 | back_edges_(2, info->zone()), |
| 52 | handler_table_(info->zone()), |
| 53 | ic_total_count_(0) { |
| 54 | DCHECK(!info->IsStub()); |
| 55 | Initialize(); |
| 56 | } |
| 57 | |
| 58 | void Initialize(); |
| 59 | |
| 60 | static bool MakeCode(CompilationInfo* info); |
| 61 | |
| 62 | // Encode state and pc-offset as a BitField<type, start, size>. |
| 63 | // Only use 30 bits because we encode the result as a smi. |
| 64 | class StateField : public BitField<State, 0, 1> { }; |
| 65 | class PcField : public BitField<unsigned, 1, 30-1> { }; |
| 66 | |
| 67 | static const char* State2String(State state) { |
| 68 | switch (state) { |
| 69 | case NO_REGISTERS: return "NO_REGISTERS"; |
| 70 | case TOS_REG: return "TOS_REG"; |
| 71 | } |
| 72 | UNREACHABLE(); |
| 73 | return NULL; |
| 74 | } |
| 75 | |
| 76 | static const int kMaxBackEdgeWeight = 127; |
| 77 | |
| 78 | // Platform-specific code size multiplier. |
| 79 | #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87 |
| 80 | static const int kCodeSizeMultiplier = 105; |
| 81 | #elif V8_TARGET_ARCH_X64 |
| 82 | static const int kCodeSizeMultiplier = 165; |
| 83 | #elif V8_TARGET_ARCH_ARM |
| 84 | static const int kCodeSizeMultiplier = 149; |
| 85 | #elif V8_TARGET_ARCH_ARM64 |
| 86 | static const int kCodeSizeMultiplier = 220; |
| 87 | #elif V8_TARGET_ARCH_PPC64 |
| 88 | static const int kCodeSizeMultiplier = 200; |
| 89 | #elif V8_TARGET_ARCH_PPC |
| 90 | static const int kCodeSizeMultiplier = 200; |
| 91 | #elif V8_TARGET_ARCH_MIPS |
| 92 | static const int kCodeSizeMultiplier = 149; |
| 93 | #elif V8_TARGET_ARCH_MIPS64 |
| 94 | static const int kCodeSizeMultiplier = 149; |
| 95 | #else |
| 96 | #error Unsupported target architecture. |
| 97 | #endif |
| 98 | |
| 99 | private: |
| 100 | class Breakable; |
| 101 | class Iteration; |
| 102 | |
| 103 | class TestContext; |
| 104 | |
| 105 | class NestedStatement BASE_EMBEDDED { |
| 106 | public: |
| 107 | explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) { |
| 108 | // Link into codegen's nesting stack. |
| 109 | previous_ = codegen->nesting_stack_; |
| 110 | codegen->nesting_stack_ = this; |
| 111 | } |
| 112 | virtual ~NestedStatement() { |
| 113 | // Unlink from codegen's nesting stack. |
| 114 | DCHECK_EQ(this, codegen_->nesting_stack_); |
| 115 | codegen_->nesting_stack_ = previous_; |
| 116 | } |
| 117 | |
| 118 | virtual Breakable* AsBreakable() { return NULL; } |
| 119 | virtual Iteration* AsIteration() { return NULL; } |
| 120 | |
| 121 | virtual bool IsContinueTarget(Statement* target) { return false; } |
| 122 | virtual bool IsBreakTarget(Statement* target) { return false; } |
| 123 | |
| 124 | // Notify the statement that we are exiting it via break, continue, or |
| 125 | // return and give it a chance to generate cleanup code. Return the |
| 126 | // next outer statement in the nesting stack. We accumulate in |
| 127 | // *stack_depth the amount to drop the stack and in *context_length the |
| 128 | // number of context chain links to unwind as we traverse the nesting |
| 129 | // stack from an exit to its target. |
| 130 | virtual NestedStatement* Exit(int* stack_depth, int* context_length) { |
| 131 | return previous_; |
| 132 | } |
| 133 | |
| 134 | // Like the Exit() method above, but limited to accumulating stack depth. |
| 135 | virtual NestedStatement* AccumulateDepth(int* stack_depth) { |
| 136 | return previous_; |
| 137 | } |
| 138 | |
| 139 | protected: |
| 140 | MacroAssembler* masm() { return codegen_->masm(); } |
| 141 | |
| 142 | FullCodeGenerator* codegen_; |
| 143 | NestedStatement* previous_; |
| 144 | |
| 145 | private: |
| 146 | DISALLOW_COPY_AND_ASSIGN(NestedStatement); |
| 147 | }; |
| 148 | |
| 149 | // A breakable statement such as a block. |
| 150 | class Breakable : public NestedStatement { |
| 151 | public: |
| 152 | Breakable(FullCodeGenerator* codegen, BreakableStatement* statement) |
| 153 | : NestedStatement(codegen), statement_(statement) { |
| 154 | } |
| 155 | |
| 156 | Breakable* AsBreakable() override { return this; } |
| 157 | bool IsBreakTarget(Statement* target) override { |
| 158 | return statement() == target; |
| 159 | } |
| 160 | |
| 161 | BreakableStatement* statement() { return statement_; } |
| 162 | Label* break_label() { return &break_label_; } |
| 163 | |
| 164 | private: |
| 165 | BreakableStatement* statement_; |
| 166 | Label break_label_; |
| 167 | }; |
| 168 | |
| 169 | // An iteration statement such as a while, for, or do loop. |
| 170 | class Iteration : public Breakable { |
| 171 | public: |
| 172 | Iteration(FullCodeGenerator* codegen, IterationStatement* statement) |
| 173 | : Breakable(codegen, statement) { |
| 174 | } |
| 175 | |
| 176 | Iteration* AsIteration() override { return this; } |
| 177 | bool IsContinueTarget(Statement* target) override { |
| 178 | return statement() == target; |
| 179 | } |
| 180 | |
| 181 | Label* continue_label() { return &continue_label_; } |
| 182 | |
| 183 | private: |
| 184 | Label continue_label_; |
| 185 | }; |
| 186 | |
| 187 | // A nested block statement. |
| 188 | class NestedBlock : public Breakable { |
| 189 | public: |
| 190 | NestedBlock(FullCodeGenerator* codegen, Block* block) |
| 191 | : Breakable(codegen, block) { |
| 192 | } |
| 193 | |
| 194 | NestedStatement* Exit(int* stack_depth, int* context_length) override { |
| 195 | auto block_scope = statement()->AsBlock()->scope(); |
| 196 | if (block_scope != nullptr) { |
| 197 | if (block_scope->ContextLocalCount() > 0) ++(*context_length); |
| 198 | } |
| 199 | return previous_; |
| 200 | } |
| 201 | }; |
| 202 | |
| 203 | // The try block of a try/catch statement. |
| 204 | class TryCatch : public NestedStatement { |
| 205 | public: |
| 206 | static const int kElementCount = TryBlockConstant::kElementCount; |
| 207 | |
| 208 | explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {} |
| 209 | |
| 210 | NestedStatement* Exit(int* stack_depth, int* context_length) override { |
| 211 | *stack_depth += kElementCount; |
| 212 | return previous_; |
| 213 | } |
| 214 | NestedStatement* AccumulateDepth(int* stack_depth) override { |
| 215 | *stack_depth += kElementCount; |
| 216 | return previous_; |
| 217 | } |
| 218 | }; |
| 219 | |
| 220 | // The try block of a try/finally statement. |
| 221 | class TryFinally : public NestedStatement { |
| 222 | public: |
| 223 | static const int kElementCount = TryBlockConstant::kElementCount; |
| 224 | |
| 225 | TryFinally(FullCodeGenerator* codegen, Label* finally_entry) |
| 226 | : NestedStatement(codegen), finally_entry_(finally_entry) { |
| 227 | } |
| 228 | |
| 229 | NestedStatement* Exit(int* stack_depth, int* context_length) override; |
| 230 | NestedStatement* AccumulateDepth(int* stack_depth) override { |
| 231 | *stack_depth += kElementCount; |
| 232 | return previous_; |
| 233 | } |
| 234 | |
| 235 | private: |
| 236 | Label* finally_entry_; |
| 237 | }; |
| 238 | |
| 239 | // The finally block of a try/finally statement. |
| 240 | class Finally : public NestedStatement { |
| 241 | public: |
| 242 | static const int kElementCount = 3; |
| 243 | |
| 244 | explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) {} |
| 245 | |
| 246 | NestedStatement* Exit(int* stack_depth, int* context_length) override { |
| 247 | *stack_depth += kElementCount; |
| 248 | return previous_; |
| 249 | } |
| 250 | NestedStatement* AccumulateDepth(int* stack_depth) override { |
| 251 | *stack_depth += kElementCount; |
| 252 | return previous_; |
| 253 | } |
| 254 | }; |
| 255 | |
| 256 | // The body of a for/in loop. |
| 257 | class ForIn : public Iteration { |
| 258 | public: |
| 259 | static const int kElementCount = 5; |
| 260 | |
| 261 | ForIn(FullCodeGenerator* codegen, ForInStatement* statement) |
| 262 | : Iteration(codegen, statement) { |
| 263 | } |
| 264 | |
| 265 | NestedStatement* Exit(int* stack_depth, int* context_length) override { |
| 266 | *stack_depth += kElementCount; |
| 267 | return previous_; |
| 268 | } |
| 269 | NestedStatement* AccumulateDepth(int* stack_depth) override { |
| 270 | *stack_depth += kElementCount; |
| 271 | return previous_; |
| 272 | } |
| 273 | }; |
| 274 | |
| 275 | |
| 276 | // The body of a with or catch. |
| 277 | class WithOrCatch : public NestedStatement { |
| 278 | public: |
| 279 | explicit WithOrCatch(FullCodeGenerator* codegen) |
| 280 | : NestedStatement(codegen) { |
| 281 | } |
| 282 | |
| 283 | NestedStatement* Exit(int* stack_depth, int* context_length) override { |
| 284 | ++(*context_length); |
| 285 | return previous_; |
| 286 | } |
| 287 | }; |
| 288 | |
| 289 | // A platform-specific utility to overwrite the accumulator register |
| 290 | // with a GC-safe value. |
| 291 | void ClearAccumulator(); |
| 292 | |
| 293 | // Determine whether or not to inline the smi case for the given |
| 294 | // operation. |
| 295 | bool ShouldInlineSmiCase(Token::Value op); |
| 296 | |
| 297 | // Helper function to convert a pure value into a test context. The value |
| 298 | // is expected on the stack or the accumulator, depending on the platform. |
| 299 | // See the platform-specific implementation for details. |
| 300 | void DoTest(Expression* condition, |
| 301 | Label* if_true, |
| 302 | Label* if_false, |
| 303 | Label* fall_through); |
| 304 | void DoTest(const TestContext* context); |
| 305 | |
| 306 | // Helper function to split control flow and avoid a branch to the |
| 307 | // fall-through label if it is set up. |
| 308 | #if V8_TARGET_ARCH_MIPS |
| 309 | void Split(Condition cc, |
| 310 | Register lhs, |
| 311 | const Operand& rhs, |
| 312 | Label* if_true, |
| 313 | Label* if_false, |
| 314 | Label* fall_through); |
| 315 | #elif V8_TARGET_ARCH_MIPS64 |
| 316 | void Split(Condition cc, |
| 317 | Register lhs, |
| 318 | const Operand& rhs, |
| 319 | Label* if_true, |
| 320 | Label* if_false, |
| 321 | Label* fall_through); |
| 322 | #elif V8_TARGET_ARCH_PPC |
| 323 | void Split(Condition cc, Label* if_true, Label* if_false, Label* fall_through, |
| 324 | CRegister cr = cr7); |
| 325 | #else // All other arch. |
| 326 | void Split(Condition cc, |
| 327 | Label* if_true, |
| 328 | Label* if_false, |
| 329 | Label* fall_through); |
| 330 | #endif |
| 331 | |
| 332 | // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into |
| 333 | // a register. Emits a context chain walk if if necessary (so does |
| 334 | // SetVar) so avoid calling both on the same variable. |
| 335 | void GetVar(Register destination, Variable* var); |
| 336 | |
| 337 | // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in |
| 338 | // the context, the write barrier will be emitted and source, scratch0, |
| 339 | // scratch1 will be clobbered. Emits a context chain walk if if necessary |
| 340 | // (so does GetVar) so avoid calling both on the same variable. |
| 341 | void SetVar(Variable* var, |
| 342 | Register source, |
| 343 | Register scratch0, |
| 344 | Register scratch1); |
| 345 | |
| 346 | // An operand used to read/write a stack-allocated (PARAMETER or LOCAL) |
| 347 | // variable. Writing does not need the write barrier. |
| 348 | MemOperand StackOperand(Variable* var); |
| 349 | |
| 350 | // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT) |
| 351 | // variable. May emit code to traverse the context chain, loading the |
| 352 | // found context into the scratch register. Writing to this operand will |
| 353 | // need the write barrier if location is CONTEXT. |
| 354 | MemOperand VarOperand(Variable* var, Register scratch); |
| 355 | |
| 356 | void VisitForEffect(Expression* expr) { |
| 357 | EffectContext context(this); |
| 358 | Visit(expr); |
| 359 | PrepareForBailout(expr, NO_REGISTERS); |
| 360 | } |
| 361 | |
| 362 | void VisitForAccumulatorValue(Expression* expr) { |
| 363 | AccumulatorValueContext context(this); |
| 364 | Visit(expr); |
| 365 | PrepareForBailout(expr, TOS_REG); |
| 366 | } |
| 367 | |
| 368 | void VisitForStackValue(Expression* expr) { |
| 369 | StackValueContext context(this); |
| 370 | Visit(expr); |
| 371 | PrepareForBailout(expr, NO_REGISTERS); |
| 372 | } |
| 373 | |
| 374 | void VisitForControl(Expression* expr, |
| 375 | Label* if_true, |
| 376 | Label* if_false, |
| 377 | Label* fall_through) { |
| 378 | TestContext context(this, expr, if_true, if_false, fall_through); |
| 379 | Visit(expr); |
| 380 | // For test contexts, we prepare for bailout before branching, not at |
| 381 | // the end of the entire expression. This happens as part of visiting |
| 382 | // the expression. |
| 383 | } |
| 384 | |
| 385 | void VisitInDuplicateContext(Expression* expr); |
| 386 | |
| 387 | void VisitDeclarations(ZoneList<Declaration*>* declarations) override; |
| 388 | void DeclareModules(Handle<FixedArray> descriptions); |
| 389 | void DeclareGlobals(Handle<FixedArray> pairs); |
| 390 | int DeclareGlobalsFlags(); |
| 391 | |
| 392 | // Generate code to create an iterator result object. The "value" property is |
| 393 | // set to a value popped from the stack, and "done" is set according to the |
| 394 | // argument. The result object is left in the result register. |
| 395 | void EmitCreateIteratorResult(bool done); |
| 396 | |
| 397 | // Try to perform a comparison as a fast inlined literal compare if |
| 398 | // the operands allow it. Returns true if the compare operations |
| 399 | // has been matched and all code generated; false otherwise. |
| 400 | bool TryLiteralCompare(CompareOperation* compare); |
| 401 | |
| 402 | // Platform-specific code for comparing the type of a value with |
| 403 | // a given literal string. |
| 404 | void EmitLiteralCompareTypeof(Expression* expr, |
| 405 | Expression* sub_expr, |
| 406 | Handle<String> check); |
| 407 | |
| 408 | // Platform-specific code for equality comparison with a nil-like value. |
| 409 | void EmitLiteralCompareNil(CompareOperation* expr, |
| 410 | Expression* sub_expr, |
| 411 | NilValue nil); |
| 412 | |
| 413 | // Bailout support. |
| 414 | void PrepareForBailout(Expression* node, State state); |
| 415 | void PrepareForBailoutForId(BailoutId id, State state); |
| 416 | |
| 417 | // Returns a smi for the index into the FixedArray that backs the feedback |
| 418 | // vector |
| 419 | Smi* SmiFromSlot(FeedbackVectorSlot slot) const { |
| 420 | return Smi::FromInt(TypeFeedbackVector::GetIndexFromSpec( |
| 421 | literal()->feedback_vector_spec(), slot)); |
| 422 | } |
| 423 | |
| 424 | // Record a call's return site offset, used to rebuild the frame if the |
| 425 | // called function was inlined at the site. |
| 426 | void RecordJSReturnSite(Call* call); |
| 427 | |
| 428 | // Prepare for bailout before a test (or compare) and branch. If |
| 429 | // should_normalize, then the following comparison will not handle the |
| 430 | // canonical JS true value so we will insert a (dead) test against true at |
| 431 | // the actual bailout target from the optimized code. If not |
| 432 | // should_normalize, the true and false labels are ignored. |
| 433 | void PrepareForBailoutBeforeSplit(Expression* expr, |
| 434 | bool should_normalize, |
| 435 | Label* if_true, |
| 436 | Label* if_false); |
| 437 | |
| 438 | // If enabled, emit debug code for checking that the current context is |
| 439 | // neither a with nor a catch context. |
| 440 | void EmitDebugCheckDeclarationContext(Variable* variable); |
| 441 | |
| 442 | // This is meant to be called at loop back edges, |back_edge_target| is |
| 443 | // the jump target of the back edge and is used to approximate the amount |
| 444 | // of code inside the loop. |
| 445 | void EmitBackEdgeBookkeeping(IterationStatement* stmt, |
| 446 | Label* back_edge_target); |
| 447 | // Record the OSR AST id corresponding to a back edge in the code. |
| 448 | void RecordBackEdge(BailoutId osr_ast_id); |
| 449 | // Emit a table of back edge ids, pcs and loop depths into the code stream. |
| 450 | // Return the offset of the start of the table. |
| 451 | unsigned EmitBackEdgeTable(); |
| 452 | |
| 453 | void EmitProfilingCounterDecrement(int delta); |
| 454 | void EmitProfilingCounterReset(); |
| 455 | |
| 456 | // Emit code to pop values from the stack associated with nested statements |
| 457 | // like try/catch, try/finally, etc, running the finallies and unwinding the |
| 458 | // handlers as needed. |
| 459 | void EmitUnwindBeforeReturn(); |
| 460 | |
| 461 | // Platform-specific return sequence |
| 462 | void EmitReturnSequence(); |
| 463 | |
| 464 | // Platform-specific code sequences for calls |
| 465 | void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny); |
| 466 | void EmitSuperConstructorCall(Call* expr); |
| 467 | void EmitCallWithLoadIC(Call* expr); |
| 468 | void EmitSuperCallWithLoadIC(Call* expr); |
| 469 | void EmitKeyedCallWithLoadIC(Call* expr, Expression* key); |
| 470 | void EmitKeyedSuperCallWithLoadIC(Call* expr); |
| 471 | void EmitPossiblyEvalCall(Call* expr); |
| 472 | |
| 473 | #define FOR_EACH_FULL_CODE_INTRINSIC(F) \ |
| 474 | F(IsSmi) \ |
| 475 | F(IsArray) \ |
| 476 | F(IsTypedArray) \ |
| 477 | F(IsRegExp) \ |
| 478 | F(IsJSProxy) \ |
| 479 | F(Call) \ |
| 480 | F(ArgumentsLength) \ |
| 481 | F(Arguments) \ |
| 482 | F(ValueOf) \ |
| 483 | F(SetValueOf) \ |
| 484 | F(IsDate) \ |
| 485 | F(StringCharFromCode) \ |
| 486 | F(StringCharAt) \ |
| 487 | F(OneByteSeqStringSetChar) \ |
| 488 | F(TwoByteSeqStringSetChar) \ |
| 489 | F(ObjectEquals) \ |
| 490 | F(IsFunction) \ |
| 491 | F(IsJSReceiver) \ |
| 492 | F(IsSimdValue) \ |
| 493 | F(MathPow) \ |
| 494 | F(IsMinusZero) \ |
| 495 | F(HasCachedArrayIndex) \ |
| 496 | F(GetCachedArrayIndex) \ |
| 497 | F(GetSuperConstructor) \ |
| 498 | F(FastOneByteArrayJoin) \ |
| 499 | F(GeneratorNext) \ |
| 500 | F(GeneratorThrow) \ |
| 501 | F(DebugBreakInOptimizedCode) \ |
| 502 | F(ClassOf) \ |
| 503 | F(StringCharCodeAt) \ |
| 504 | F(SubString) \ |
| 505 | F(RegExpExec) \ |
| 506 | F(RegExpConstructResult) \ |
| 507 | F(ToInteger) \ |
| 508 | F(NumberToString) \ |
| 509 | F(ToString) \ |
| 510 | F(ToLength) \ |
| 511 | F(ToNumber) \ |
| 512 | F(ToName) \ |
| 513 | F(ToObject) \ |
| 514 | F(DebugIsActive) \ |
| 515 | F(CreateIterResultObject) |
| 516 | |
| 517 | #define GENERATOR_DECLARATION(Name) void Emit##Name(CallRuntime* call); |
| 518 | FOR_EACH_FULL_CODE_INTRINSIC(GENERATOR_DECLARATION) |
| 519 | #undef GENERATOR_DECLARATION |
| 520 | |
| 521 | void EmitIntrinsicAsStubCall(CallRuntime* expr, const Callable& callable); |
| 522 | |
| 523 | // Platform-specific code for resuming generators. |
| 524 | void EmitGeneratorResume(Expression *generator, |
| 525 | Expression *value, |
| 526 | JSGeneratorObject::ResumeMode resume_mode); |
| 527 | |
| 528 | // Platform-specific code for loading variables. |
| 529 | void EmitLoadGlobalCheckExtensions(VariableProxy* proxy, |
| 530 | TypeofMode typeof_mode, Label* slow); |
| 531 | MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow); |
| 532 | void EmitDynamicLookupFastCase(VariableProxy* proxy, TypeofMode typeof_mode, |
| 533 | Label* slow, Label* done); |
| 534 | void EmitGlobalVariableLoad(VariableProxy* proxy, TypeofMode typeof_mode); |
| 535 | void EmitVariableLoad(VariableProxy* proxy, |
| 536 | TypeofMode typeof_mode = NOT_INSIDE_TYPEOF); |
| 537 | |
| 538 | void EmitAccessor(ObjectLiteralProperty* property); |
| 539 | |
| 540 | bool NeedsHoleCheckForLoad(VariableProxy* proxy); |
| 541 | |
| 542 | // Expects the arguments and the function already pushed. |
| 543 | void EmitResolvePossiblyDirectEval(int arg_count); |
| 544 | |
| 545 | // Platform-specific support for allocating a new closure based on |
| 546 | // the given function info. |
| 547 | void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure); |
| 548 | |
| 549 | // Re-usable portions of CallRuntime |
| 550 | void EmitLoadJSRuntimeFunction(CallRuntime* expr); |
| 551 | void EmitCallJSRuntimeFunction(CallRuntime* expr); |
| 552 | |
| 553 | // Load a value from a named property. |
| 554 | // The receiver is left on the stack by the IC. |
| 555 | void EmitNamedPropertyLoad(Property* expr); |
| 556 | |
| 557 | // Load a value from super.named property. |
| 558 | // Expect receiver ('this' value) and home_object on the stack. |
| 559 | void EmitNamedSuperPropertyLoad(Property* expr); |
| 560 | |
| 561 | // Load a value from super[keyed] property. |
| 562 | // Expect receiver ('this' value), home_object and key on the stack. |
| 563 | void EmitKeyedSuperPropertyLoad(Property* expr); |
| 564 | |
| 565 | // Load a value from a keyed property. |
| 566 | // The receiver and the key is left on the stack by the IC. |
| 567 | void EmitKeyedPropertyLoad(Property* expr); |
| 568 | |
| 569 | // Adds the properties to the class (function) object and to its prototype. |
| 570 | // Expects the class (function) in the accumulator. The class (function) is |
| 571 | // in the accumulator after installing all the properties. |
| 572 | void EmitClassDefineProperties(ClassLiteral* lit); |
| 573 | |
| 574 | // Pushes the property key as a Name on the stack. |
| 575 | void EmitPropertyKey(ObjectLiteralProperty* property, BailoutId bailout_id); |
| 576 | |
| 577 | // Apply the compound assignment operator. Expects the left operand on top |
| 578 | // of the stack and the right one in the accumulator. |
| 579 | void EmitBinaryOp(BinaryOperation* expr, Token::Value op); |
| 580 | |
| 581 | // Helper functions for generating inlined smi code for certain |
| 582 | // binary operations. |
| 583 | void EmitInlineSmiBinaryOp(BinaryOperation* expr, |
| 584 | Token::Value op, |
| 585 | Expression* left, |
| 586 | Expression* right); |
| 587 | |
| 588 | // Assign to the given expression as if via '='. The right-hand-side value |
| 589 | // is expected in the accumulator. slot is only used if FLAG_vector_stores |
| 590 | // is true. |
| 591 | void EmitAssignment(Expression* expr, FeedbackVectorSlot slot); |
| 592 | |
| 593 | // Complete a variable assignment. The right-hand-side value is expected |
| 594 | // in the accumulator. |
| 595 | void EmitVariableAssignment(Variable* var, Token::Value op, |
| 596 | FeedbackVectorSlot slot); |
| 597 | |
| 598 | // Helper functions to EmitVariableAssignment |
| 599 | void EmitStoreToStackLocalOrContextSlot(Variable* var, |
| 600 | MemOperand location); |
| 601 | |
| 602 | // Complete a named property assignment. The receiver is expected on top |
| 603 | // of the stack and the right-hand-side value in the accumulator. |
| 604 | void EmitNamedPropertyAssignment(Assignment* expr); |
| 605 | |
| 606 | // Complete a super named property assignment. The right-hand-side value |
| 607 | // is expected in accumulator. |
| 608 | void EmitNamedSuperPropertyStore(Property* prop); |
| 609 | |
| 610 | // Complete a super named property assignment. The right-hand-side value |
| 611 | // is expected in accumulator. |
| 612 | void EmitKeyedSuperPropertyStore(Property* prop); |
| 613 | |
| 614 | // Complete a keyed property assignment. The receiver and key are |
| 615 | // expected on top of the stack and the right-hand-side value in the |
| 616 | // accumulator. |
| 617 | void EmitKeyedPropertyAssignment(Assignment* expr); |
| 618 | |
| 619 | static bool NeedsHomeObject(Expression* expr) { |
| 620 | return FunctionLiteral::NeedsHomeObject(expr); |
| 621 | } |
| 622 | |
| 623 | // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral. |
| 624 | // The value of the initializer is expected to be at the top of the stack. |
| 625 | // |offset| is the offset in the stack where the home object can be found. |
| 626 | void EmitSetHomeObject(Expression* initializer, int offset, |
| 627 | FeedbackVectorSlot slot); |
| 628 | |
| 629 | void EmitSetHomeObjectAccumulator(Expression* initializer, int offset, |
| 630 | FeedbackVectorSlot slot); |
| 631 | |
| 632 | void CallIC(Handle<Code> code, |
| 633 | TypeFeedbackId id = TypeFeedbackId::None()); |
| 634 | |
| 635 | // Inside typeof reference errors are never thrown. |
| 636 | void CallLoadIC(TypeofMode typeof_mode, LanguageMode language_mode = SLOPPY, |
| 637 | TypeFeedbackId id = TypeFeedbackId::None()); |
| 638 | void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None()); |
| 639 | |
| 640 | void SetFunctionPosition(FunctionLiteral* fun); |
| 641 | void SetReturnPosition(FunctionLiteral* fun); |
| 642 | |
| 643 | enum InsertBreak { INSERT_BREAK, SKIP_BREAK }; |
| 644 | |
| 645 | // During stepping we want to be able to break at each statement, but not at |
| 646 | // every (sub-)expression. That is why by default we insert breaks at every |
| 647 | // statement position, but not at every expression position, unless stated |
| 648 | // otherwise. |
| 649 | void SetStatementPosition(Statement* stmt, |
| 650 | InsertBreak insert_break = INSERT_BREAK); |
| 651 | void SetExpressionPosition(Expression* expr, |
| 652 | InsertBreak insert_break = SKIP_BREAK); |
| 653 | |
| 654 | // Consider an expression a statement. As such, we also insert a break. |
| 655 | // This is used in loop headers where we want to break for each iteration. |
| 656 | void SetExpressionAsStatementPosition(Expression* expr); |
| 657 | |
| 658 | void SetCallPosition(Expression* expr); |
| 659 | |
| 660 | void SetConstructCallPosition(Expression* expr) { |
| 661 | // Currently call and construct calls are treated the same wrt debugging. |
| 662 | SetCallPosition(expr); |
| 663 | } |
| 664 | |
| 665 | // Non-local control flow support. |
| 666 | void EnterTryBlock(int handler_index, Label* handler); |
| 667 | void ExitTryBlock(int handler_index); |
| 668 | void EnterFinallyBlock(); |
| 669 | void ExitFinallyBlock(); |
| 670 | void ClearPendingMessage(); |
| 671 | |
| 672 | // Loop nesting counter. |
| 673 | int loop_depth() { return loop_depth_; } |
| 674 | void increment_loop_depth() { loop_depth_++; } |
| 675 | void decrement_loop_depth() { |
| 676 | DCHECK(loop_depth_ > 0); |
| 677 | loop_depth_--; |
| 678 | } |
| 679 | |
| 680 | MacroAssembler* masm() const { return masm_; } |
| 681 | |
| 682 | class ExpressionContext; |
| 683 | const ExpressionContext* context() { return context_; } |
| 684 | void set_new_context(const ExpressionContext* context) { context_ = context; } |
| 685 | |
| 686 | Isolate* isolate() const { return isolate_; } |
| 687 | Zone* zone() const { return zone_; } |
| 688 | Handle<Script> script() { return info_->script(); } |
| 689 | bool is_eval() { return info_->is_eval(); } |
| 690 | bool is_native() { return info_->is_native(); } |
| 691 | LanguageMode language_mode() { return literal()->language_mode(); } |
| 692 | bool has_simple_parameters() { return info_->has_simple_parameters(); } |
| 693 | FunctionLiteral* literal() const { return info_->literal(); } |
| 694 | Scope* scope() { return scope_; } |
| 695 | |
| 696 | static Register result_register(); |
| 697 | static Register context_register(); |
| 698 | |
| 699 | // Set fields in the stack frame. Offsets are the frame pointer relative |
| 700 | // offsets defined in, e.g., StandardFrameConstants. |
| 701 | void StoreToFrameField(int frame_offset, Register value); |
| 702 | |
| 703 | // Load a value from the current context. Indices are defined as an enum |
| 704 | // in v8::internal::Context. |
| 705 | void LoadContextField(Register dst, int context_index); |
| 706 | |
| 707 | // Push the function argument for the runtime functions PushWithContext |
| 708 | // and PushCatchContext. |
| 709 | void PushFunctionArgumentForContextAllocation(); |
| 710 | |
| 711 | void PushCalleeAndWithBaseObject(Call* expr); |
| 712 | |
| 713 | // AST node visit functions. |
| 714 | #define DECLARE_VISIT(type) void Visit##type(type* node) override; |
| 715 | AST_NODE_LIST(DECLARE_VISIT) |
| 716 | #undef DECLARE_VISIT |
| 717 | |
| 718 | void VisitComma(BinaryOperation* expr); |
| 719 | void VisitLogicalExpression(BinaryOperation* expr); |
| 720 | void VisitArithmeticExpression(BinaryOperation* expr); |
| 721 | |
| 722 | void VisitForTypeofValue(Expression* expr); |
| 723 | |
| 724 | void Generate(); |
| 725 | void PopulateDeoptimizationData(Handle<Code> code); |
| 726 | void PopulateTypeFeedbackInfo(Handle<Code> code); |
| 727 | void PopulateHandlerTable(Handle<Code> code); |
| 728 | |
| 729 | bool MustCreateObjectLiteralWithRuntime(ObjectLiteral* expr) const; |
| 730 | bool MustCreateArrayLiteralWithRuntime(ArrayLiteral* expr) const; |
| 731 | |
| 732 | void EmitLoadStoreICSlot(FeedbackVectorSlot slot); |
| 733 | |
| 734 | int NewHandlerTableEntry(); |
| 735 | |
| 736 | struct BailoutEntry { |
| 737 | BailoutId id; |
| 738 | unsigned pc_and_state; |
| 739 | }; |
| 740 | |
| 741 | struct BackEdgeEntry { |
| 742 | BailoutId id; |
| 743 | unsigned pc; |
| 744 | uint32_t loop_depth; |
| 745 | }; |
| 746 | |
| 747 | struct HandlerTableEntry { |
| 748 | unsigned range_start; |
| 749 | unsigned range_end; |
| 750 | unsigned handler_offset; |
| 751 | int stack_depth; |
| 752 | int try_catch_depth; |
| 753 | }; |
| 754 | |
| 755 | class ExpressionContext BASE_EMBEDDED { |
| 756 | public: |
| 757 | explicit ExpressionContext(FullCodeGenerator* codegen) |
| 758 | : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) { |
| 759 | codegen->set_new_context(this); |
| 760 | } |
| 761 | |
| 762 | virtual ~ExpressionContext() { |
| 763 | codegen_->set_new_context(old_); |
| 764 | } |
| 765 | |
| 766 | Isolate* isolate() const { return codegen_->isolate(); } |
| 767 | |
| 768 | // Convert constant control flow (true or false) to the result expected for |
| 769 | // this expression context. |
| 770 | virtual void Plug(bool flag) const = 0; |
| 771 | |
| 772 | // Emit code to convert a pure value (in a register, known variable |
| 773 | // location, as a literal, or on top of the stack) into the result |
| 774 | // expected according to this expression context. |
| 775 | virtual void Plug(Register reg) const = 0; |
| 776 | virtual void Plug(Variable* var) const = 0; |
| 777 | virtual void Plug(Handle<Object> lit) const = 0; |
| 778 | virtual void Plug(Heap::RootListIndex index) const = 0; |
| 779 | virtual void PlugTOS() const = 0; |
| 780 | |
| 781 | // Emit code to convert pure control flow to a pair of unbound labels into |
| 782 | // the result expected according to this expression context. The |
| 783 | // implementation will bind both labels unless it's a TestContext, which |
| 784 | // won't bind them at this point. |
| 785 | virtual void Plug(Label* materialize_true, |
| 786 | Label* materialize_false) const = 0; |
| 787 | |
| 788 | // Emit code to discard count elements from the top of stack, then convert |
| 789 | // a pure value into the result expected according to this expression |
| 790 | // context. |
| 791 | virtual void DropAndPlug(int count, Register reg) const = 0; |
| 792 | |
| 793 | // Set up branch labels for a test expression. The three Label** parameters |
| 794 | // are output parameters. |
| 795 | virtual void PrepareTest(Label* materialize_true, |
| 796 | Label* materialize_false, |
| 797 | Label** if_true, |
| 798 | Label** if_false, |
| 799 | Label** fall_through) const = 0; |
| 800 | |
| 801 | // Returns true if we are evaluating only for side effects (i.e. if the |
| 802 | // result will be discarded). |
| 803 | virtual bool IsEffect() const { return false; } |
| 804 | |
| 805 | // Returns true if we are evaluating for the value (in accu/on stack). |
| 806 | virtual bool IsAccumulatorValue() const { return false; } |
| 807 | virtual bool IsStackValue() const { return false; } |
| 808 | |
| 809 | // Returns true if we are branching on the value rather than materializing |
| 810 | // it. Only used for asserts. |
| 811 | virtual bool IsTest() const { return false; } |
| 812 | |
| 813 | protected: |
| 814 | FullCodeGenerator* codegen() const { return codegen_; } |
| 815 | MacroAssembler* masm() const { return masm_; } |
| 816 | MacroAssembler* masm_; |
| 817 | |
| 818 | private: |
| 819 | const ExpressionContext* old_; |
| 820 | FullCodeGenerator* codegen_; |
| 821 | }; |
| 822 | |
| 823 | class AccumulatorValueContext : public ExpressionContext { |
| 824 | public: |
| 825 | explicit AccumulatorValueContext(FullCodeGenerator* codegen) |
| 826 | : ExpressionContext(codegen) { } |
| 827 | |
| 828 | void Plug(bool flag) const override; |
| 829 | void Plug(Register reg) const override; |
| 830 | void Plug(Label* materialize_true, Label* materialize_false) const override; |
| 831 | void Plug(Variable* var) const override; |
| 832 | void Plug(Handle<Object> lit) const override; |
| 833 | void Plug(Heap::RootListIndex) const override; |
| 834 | void PlugTOS() const override; |
| 835 | void DropAndPlug(int count, Register reg) const override; |
| 836 | void PrepareTest(Label* materialize_true, Label* materialize_false, |
| 837 | Label** if_true, Label** if_false, |
| 838 | Label** fall_through) const override; |
| 839 | bool IsAccumulatorValue() const override { return true; } |
| 840 | }; |
| 841 | |
| 842 | class StackValueContext : public ExpressionContext { |
| 843 | public: |
| 844 | explicit StackValueContext(FullCodeGenerator* codegen) |
| 845 | : ExpressionContext(codegen) { } |
| 846 | |
| 847 | void Plug(bool flag) const override; |
| 848 | void Plug(Register reg) const override; |
| 849 | void Plug(Label* materialize_true, Label* materialize_false) const override; |
| 850 | void Plug(Variable* var) const override; |
| 851 | void Plug(Handle<Object> lit) const override; |
| 852 | void Plug(Heap::RootListIndex) const override; |
| 853 | void PlugTOS() const override; |
| 854 | void DropAndPlug(int count, Register reg) const override; |
| 855 | void PrepareTest(Label* materialize_true, Label* materialize_false, |
| 856 | Label** if_true, Label** if_false, |
| 857 | Label** fall_through) const override; |
| 858 | bool IsStackValue() const override { return true; } |
| 859 | }; |
| 860 | |
| 861 | class TestContext : public ExpressionContext { |
| 862 | public: |
| 863 | TestContext(FullCodeGenerator* codegen, |
| 864 | Expression* condition, |
| 865 | Label* true_label, |
| 866 | Label* false_label, |
| 867 | Label* fall_through) |
| 868 | : ExpressionContext(codegen), |
| 869 | condition_(condition), |
| 870 | true_label_(true_label), |
| 871 | false_label_(false_label), |
| 872 | fall_through_(fall_through) { } |
| 873 | |
| 874 | static const TestContext* cast(const ExpressionContext* context) { |
| 875 | DCHECK(context->IsTest()); |
| 876 | return reinterpret_cast<const TestContext*>(context); |
| 877 | } |
| 878 | |
| 879 | Expression* condition() const { return condition_; } |
| 880 | Label* true_label() const { return true_label_; } |
| 881 | Label* false_label() const { return false_label_; } |
| 882 | Label* fall_through() const { return fall_through_; } |
| 883 | |
| 884 | void Plug(bool flag) const override; |
| 885 | void Plug(Register reg) const override; |
| 886 | void Plug(Label* materialize_true, Label* materialize_false) const override; |
| 887 | void Plug(Variable* var) const override; |
| 888 | void Plug(Handle<Object> lit) const override; |
| 889 | void Plug(Heap::RootListIndex) const override; |
| 890 | void PlugTOS() const override; |
| 891 | void DropAndPlug(int count, Register reg) const override; |
| 892 | void PrepareTest(Label* materialize_true, Label* materialize_false, |
| 893 | Label** if_true, Label** if_false, |
| 894 | Label** fall_through) const override; |
| 895 | bool IsTest() const override { return true; } |
| 896 | |
| 897 | private: |
| 898 | Expression* condition_; |
| 899 | Label* true_label_; |
| 900 | Label* false_label_; |
| 901 | Label* fall_through_; |
| 902 | }; |
| 903 | |
| 904 | class EffectContext : public ExpressionContext { |
| 905 | public: |
| 906 | explicit EffectContext(FullCodeGenerator* codegen) |
| 907 | : ExpressionContext(codegen) { } |
| 908 | |
| 909 | void Plug(bool flag) const override; |
| 910 | void Plug(Register reg) const override; |
| 911 | void Plug(Label* materialize_true, Label* materialize_false) const override; |
| 912 | void Plug(Variable* var) const override; |
| 913 | void Plug(Handle<Object> lit) const override; |
| 914 | void Plug(Heap::RootListIndex) const override; |
| 915 | void PlugTOS() const override; |
| 916 | void DropAndPlug(int count, Register reg) const override; |
| 917 | void PrepareTest(Label* materialize_true, Label* materialize_false, |
| 918 | Label** if_true, Label** if_false, |
| 919 | Label** fall_through) const override; |
| 920 | bool IsEffect() const override { return true; } |
| 921 | }; |
| 922 | |
| 923 | class EnterBlockScopeIfNeeded { |
| 924 | public: |
| 925 | EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope, |
| 926 | BailoutId entry_id, BailoutId declarations_id, |
| 927 | BailoutId exit_id); |
| 928 | ~EnterBlockScopeIfNeeded(); |
| 929 | |
| 930 | private: |
| 931 | MacroAssembler* masm() const { return codegen_->masm(); } |
| 932 | |
| 933 | FullCodeGenerator* codegen_; |
| 934 | Scope* saved_scope_; |
| 935 | BailoutId exit_id_; |
| 936 | bool needs_block_context_; |
| 937 | }; |
| 938 | |
| 939 | MacroAssembler* masm_; |
| 940 | CompilationInfo* info_; |
| 941 | Isolate* isolate_; |
| 942 | Zone* zone_; |
| 943 | Scope* scope_; |
| 944 | Label return_label_; |
| 945 | NestedStatement* nesting_stack_; |
| 946 | int loop_depth_; |
| 947 | int try_catch_depth_; |
| 948 | ZoneList<Handle<Object> >* globals_; |
| 949 | Handle<FixedArray> modules_; |
| 950 | int module_index_; |
| 951 | const ExpressionContext* context_; |
| 952 | ZoneList<BailoutEntry> bailout_entries_; |
| 953 | ZoneList<BackEdgeEntry> back_edges_; |
| 954 | ZoneVector<HandlerTableEntry> handler_table_; |
| 955 | int ic_total_count_; |
| 956 | Handle<Cell> profiling_counter_; |
| 957 | bool generate_debug_code_; |
| 958 | |
| 959 | friend class NestedStatement; |
| 960 | |
| 961 | DEFINE_AST_VISITOR_SUBCLASS_MEMBERS(); |
| 962 | DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator); |
| 963 | }; |
| 964 | |
| 965 | |
| 966 | class BackEdgeTable { |
| 967 | public: |
| 968 | BackEdgeTable(Code* code, DisallowHeapAllocation* required) { |
| 969 | DCHECK(code->kind() == Code::FUNCTION); |
| 970 | instruction_start_ = code->instruction_start(); |
| 971 | Address table_address = instruction_start_ + code->back_edge_table_offset(); |
| 972 | length_ = Memory::uint32_at(table_address); |
| 973 | start_ = table_address + kTableLengthSize; |
| 974 | } |
| 975 | |
| 976 | uint32_t length() { return length_; } |
| 977 | |
| 978 | BailoutId ast_id(uint32_t index) { |
| 979 | return BailoutId(static_cast<int>( |
| 980 | Memory::uint32_at(entry_at(index) + kAstIdOffset))); |
| 981 | } |
| 982 | |
| 983 | uint32_t loop_depth(uint32_t index) { |
| 984 | return Memory::uint32_at(entry_at(index) + kLoopDepthOffset); |
| 985 | } |
| 986 | |
| 987 | uint32_t pc_offset(uint32_t index) { |
| 988 | return Memory::uint32_at(entry_at(index) + kPcOffsetOffset); |
| 989 | } |
| 990 | |
| 991 | Address pc(uint32_t index) { |
| 992 | return instruction_start_ + pc_offset(index); |
| 993 | } |
| 994 | |
| 995 | enum BackEdgeState { |
| 996 | INTERRUPT, |
| 997 | ON_STACK_REPLACEMENT, |
| 998 | OSR_AFTER_STACK_CHECK |
| 999 | }; |
| 1000 | |
| 1001 | // Increase allowed loop nesting level by one and patch those matching loops. |
| 1002 | static void Patch(Isolate* isolate, Code* unoptimized_code); |
| 1003 | |
| 1004 | // Patch the back edge to the target state, provided the correct callee. |
| 1005 | static void PatchAt(Code* unoptimized_code, |
| 1006 | Address pc, |
| 1007 | BackEdgeState target_state, |
| 1008 | Code* replacement_code); |
| 1009 | |
| 1010 | // Change all patched back edges back to normal interrupts. |
| 1011 | static void Revert(Isolate* isolate, |
| 1012 | Code* unoptimized_code); |
| 1013 | |
| 1014 | // Change a back edge patched for on-stack replacement to perform a |
| 1015 | // stack check first. |
| 1016 | static void AddStackCheck(Handle<Code> code, uint32_t pc_offset); |
| 1017 | |
| 1018 | // Revert the patch by AddStackCheck. |
| 1019 | static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset); |
| 1020 | |
| 1021 | // Return the current patch state of the back edge. |
| 1022 | static BackEdgeState GetBackEdgeState(Isolate* isolate, |
| 1023 | Code* unoptimized_code, |
| 1024 | Address pc_after); |
| 1025 | |
| 1026 | #ifdef DEBUG |
| 1027 | // Verify that all back edges of a certain loop depth are patched. |
| 1028 | static bool Verify(Isolate* isolate, Code* unoptimized_code); |
| 1029 | #endif // DEBUG |
| 1030 | |
| 1031 | private: |
| 1032 | Address entry_at(uint32_t index) { |
| 1033 | DCHECK(index < length_); |
| 1034 | return start_ + index * kEntrySize; |
| 1035 | } |
| 1036 | |
| 1037 | static const int kTableLengthSize = kIntSize; |
| 1038 | static const int kAstIdOffset = 0 * kIntSize; |
| 1039 | static const int kPcOffsetOffset = 1 * kIntSize; |
| 1040 | static const int kLoopDepthOffset = 2 * kIntSize; |
| 1041 | static const int kEntrySize = 3 * kIntSize; |
| 1042 | |
| 1043 | Address start_; |
| 1044 | Address instruction_start_; |
| 1045 | uint32_t length_; |
| 1046 | }; |
| 1047 | |
| 1048 | |
| 1049 | } // namespace internal |
| 1050 | } // namespace v8 |
| 1051 | |
| 1052 | #endif // V8_FULL_CODEGEN_FULL_CODEGEN_H_ |