blob: 9b439397c3c18f25503787d7476edff9ca95ec56 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
5#ifndef V8_COMPILER_H_
6#define V8_COMPILER_H_
7
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/allocation.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/ast/ast.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/bailout-reason.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/compilation-dependencies.h"
12#include "src/signature.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013#include "src/zone.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000014
15namespace v8 {
16namespace internal {
17
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018class AstValueFactory;
19class HydrogenCodeStub;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020class JavaScriptFrame;
21class ParseInfo;
22class ScriptData;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023
24
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025// This class encapsulates encoding and decoding of sources positions from
26// which hydrogen values originated.
27// When FLAG_track_hydrogen_positions is set this object encodes the
28// identifier of the inlining and absolute offset from the start of the
29// inlined function.
30// When the flag is not set we simply track absolute offset from the
31// script start.
32class SourcePosition {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034 static SourcePosition Unknown() {
35 return SourcePosition::FromRaw(kNoPosition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036 }
37
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038 bool IsUnknown() const { return value_ == kNoPosition; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 uint32_t position() const { return PositionField::decode(value_); }
41 void set_position(uint32_t position) {
42 if (FLAG_hydrogen_track_positions) {
43 value_ = static_cast<uint32_t>(PositionField::update(value_, position));
44 } else {
45 value_ = position;
46 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000047 }
48
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000049 uint32_t inlining_id() const { return InliningIdField::decode(value_); }
50 void set_inlining_id(uint32_t inlining_id) {
51 if (FLAG_hydrogen_track_positions) {
52 value_ =
53 static_cast<uint32_t>(InliningIdField::update(value_, inlining_id));
54 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 }
56
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057 uint32_t raw() const { return value_; }
58
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 static const uint32_t kNoPosition =
61 static_cast<uint32_t>(RelocInfo::kNoPosition);
62 typedef BitField<uint32_t, 0, 9> InliningIdField;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000064 // Offset from the start of the inlined function.
65 typedef BitField<uint32_t, 9, 23> PositionField;
66
67 friend class HPositionInfo;
68 friend class Deoptimizer;
69
70 static SourcePosition FromRaw(uint32_t raw_position) {
71 SourcePosition position;
72 position.value_ = raw_position;
73 return position;
74 }
75
76 // If FLAG_hydrogen_track_positions is set contains bitfields InliningIdField
77 // and PositionField.
78 // Otherwise contains absolute offset from the script start.
79 uint32_t value_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080};
Ben Murdochf87a2032010-10-22 12:50:53 +010081
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000082
83std::ostream& operator<<(std::ostream& os, const SourcePosition& p);
84
85
86struct InlinedFunctionInfo {
87 InlinedFunctionInfo(int parent_id, SourcePosition inline_position,
88 int script_id, int start_position)
89 : parent_id(parent_id),
90 inline_position(inline_position),
91 script_id(script_id),
92 start_position(start_position) {}
93 int parent_id;
94 SourcePosition inline_position;
95 int script_id;
96 int start_position;
97 std::vector<size_t> deopt_pc_offsets;
98
99 static const int kNoParentId = -1;
100};
101
102
Andrei Popescu31002712010-02-23 13:46:05 +0000103// CompilationInfo encapsulates some information known at compile time. It
104// is constructed based on the resources available at compile-time.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105class CompilationInfo {
Leon Clarke4515c472010-02-03 11:58:03 +0000106 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 // Various configuration flags for a compilation, as well as some properties
108 // of the compiled code produced by a compilation.
109 enum Flag {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000110 kDeferredCalling = 1 << 0,
111 kNonDeferredCalling = 1 << 1,
112 kSavesCallerDoubles = 1 << 2,
113 kRequiresFrame = 1 << 3,
114 kMustNotHaveEagerFrame = 1 << 4,
115 kDeoptimizationSupport = 1 << 5,
116 kDebug = 1 << 6,
117 kSerializing = 1 << 7,
118 kFunctionContextSpecializing = 1 << 8,
119 kFrameSpecializing = 1 << 9,
120 kNativeContextSpecializing = 1 << 10,
121 kInliningEnabled = 1 << 11,
122 kTypingEnabled = 1 << 12,
123 kDisableFutureOptimization = 1 << 13,
124 kSplittingEnabled = 1 << 14,
125 kDeoptimizationEnabled = 1 << 16,
126 kSourcePositionsEnabled = 1 << 17,
127 kFirstCompile = 1 << 18,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 };
Ben Murdochf87a2032010-10-22 12:50:53 +0100129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 explicit CompilationInfo(ParseInfo* parse_info);
131 CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone);
132 CompilationInfo(const char* debug_name, Isolate* isolate, Zone* zone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000133 virtual ~CompilationInfo();
134
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000135 ParseInfo* parse_info() const { return parse_info_; }
136
137 // -----------------------------------------------------------
138 // TODO(titzer): inline and delete accessors of ParseInfo
139 // -----------------------------------------------------------
140 Handle<Script> script() const;
141 bool is_eval() const;
142 bool is_native() const;
143 bool is_module() const;
144 LanguageMode language_mode() const;
145 Handle<JSFunction> closure() const;
146 FunctionLiteral* literal() const;
147 Scope* scope() const;
148 Handle<Context> context() const;
149 Handle<SharedFunctionInfo> shared_info() const;
150 bool has_shared_info() const;
151 bool has_context() const;
152 bool has_literal() const;
153 bool has_scope() const;
154 // -----------------------------------------------------------
155
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000156 Isolate* isolate() const {
Steve Block44f0eee2011-05-26 01:26:41 +0100157 return isolate_;
158 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 Zone* zone() { return zone_; }
160 bool is_osr() const { return !osr_ast_id_.IsNone(); }
Ben Murdochf87a2032010-10-22 12:50:53 +0100161 Handle<Code> code() const { return code_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000162 CodeStub* code_stub() const { return code_stub_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 BailoutId osr_ast_id() const { return osr_ast_id_; }
164 Handle<Code> unoptimized_code() const { return unoptimized_code_; }
165 int opt_count() const { return opt_count_; }
166 int num_parameters() const;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000167 int num_parameters_including_this() const;
168 bool is_this_defined() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000169 int num_heap_slots() const;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170
171 void set_parameter_count(int parameter_count) {
172 DCHECK(IsStub());
173 parameter_count_ = parameter_count;
Steve Block1e0659c2011-05-24 12:43:12 +0100174 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000175
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000176 bool has_bytecode_array() const { return !bytecode_array_.is_null(); }
177 Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000179 bool is_tracking_positions() const { return track_positions_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180
181 bool is_calling() const {
182 return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
Steve Block44f0eee2011-05-26 01:26:41 +0100183 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184
185 void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
186
187 bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
188
189 void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
190
191 bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
192
193 void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
194
195 bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
196
197 void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
198
199 bool requires_frame() const { return GetFlag(kRequiresFrame); }
200
201 void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
202
203 bool GetMustNotHaveEagerFrame() const {
204 return GetFlag(kMustNotHaveEagerFrame);
205 }
206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000207 // Compiles marked as debug produce unoptimized code with debug break slots.
208 // Inner functions that cannot be compiled w/o context are compiled eagerly.
209 // Always include deoptimization support to avoid having to recompile again.
210 void MarkAsDebug() {
211 SetFlag(kDebug);
212 SetFlag(kDeoptimizationSupport);
213 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214
215 bool is_debug() const { return GetFlag(kDebug); }
216
217 void PrepareForSerializing() { SetFlag(kSerializing); }
218
219 bool will_serialize() const { return GetFlag(kSerializing); }
220
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000221 void MarkAsFunctionContextSpecializing() {
222 SetFlag(kFunctionContextSpecializing);
223 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225 bool is_function_context_specializing() const {
226 return GetFlag(kFunctionContextSpecializing);
227 }
228
229 void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }
230
231 bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }
232
233 void MarkAsNativeContextSpecializing() {
234 SetFlag(kNativeContextSpecializing);
235 }
236
237 bool is_native_context_specializing() const {
238 return GetFlag(kNativeContextSpecializing);
239 }
240
241 void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }
242
243 bool is_deoptimization_enabled() const {
244 return GetFlag(kDeoptimizationEnabled);
245 }
246
247 void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
248
249 bool is_source_positions_enabled() const {
250 return GetFlag(kSourcePositionsEnabled);
251 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252
253 void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
254
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255 bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
256
257 void MarkAsTypingEnabled() { SetFlag(kTypingEnabled); }
258
259 bool is_typing_enabled() const { return GetFlag(kTypingEnabled); }
260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400262
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263 bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 void MarkAsFirstCompile() { SetFlag(kFirstCompile); }
266
267 void MarkAsCompiled() { SetFlag(kFirstCompile, false); }
268
269 bool is_first_compile() const { return GetFlag(kFirstCompile); }
270
271 bool GeneratePreagedPrologue() const {
272 // Generate a pre-aged prologue if we are optimizing for size, which
273 // will make code flushing more aggressive. Only apply to Code::FUNCTION,
274 // since StaticMarkingVisitor::IsFlushable only flushes proper functions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 return FLAG_optimize_for_size && FLAG_age_code && !will_serialize() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 !is_debug() && output_code_kind_ == Code::FUNCTION;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000277 }
278
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400279 void EnsureFeedbackVector();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 Handle<TypeFeedbackVector> feedback_vector() const {
281 return feedback_vector_;
282 }
Ben Murdochf87a2032010-10-22 12:50:53 +0100283 void SetCode(Handle<Code> code) { code_ = code; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285 void SetBytecodeArray(Handle<BytecodeArray> bytecode_array) {
286 bytecode_array_ = bytecode_array;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000287 }
288
289 bool ShouldTrapOnDeopt() const {
290 return (FLAG_trap_on_deopt && IsOptimizing()) ||
291 (FLAG_trap_on_stub_deopt && IsStub());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100292 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100293
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 bool has_native_context() const {
295 return !closure().is_null() && (closure()->native_context() != nullptr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100296 }
297
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000298 Context* native_context() const {
299 return has_native_context() ? closure()->native_context() : nullptr;
300 }
301
302 bool has_global_object() const { return has_native_context(); }
303
304 JSGlobalObject* global_object() const {
305 return has_global_object() ? native_context()->global_object() : nullptr;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100306 }
307
308 // Accessors for the different compilation modes.
309 bool IsOptimizing() const { return mode_ == OPTIMIZE; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000310 bool IsStub() const { return mode_ == STUB; }
311 void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000312 DCHECK(has_shared_info());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100313 SetMode(OPTIMIZE);
314 osr_ast_id_ = osr_ast_id;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000315 unoptimized_code_ = unoptimized;
316 optimization_id_ = isolate()->NextOptimizationId();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000317 set_output_code_kind(Code::OPTIMIZED_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100318 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100319
320 // Deoptimization support.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100321 bool HasDeoptimizationSupport() const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 return GetFlag(kDeoptimizationSupport);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100323 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100324 void EnableDeoptimizationSupport() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 DCHECK_EQ(BASE, mode_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 SetFlag(kDeoptimizationSupport);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
329
330 bool ExpectsJSReceiverAsReceiver();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100331
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100332 // Determines whether or not to insert a self-optimization header.
333 bool ShouldSelfOptimize();
Andrei Popescu31002712010-02-23 13:46:05 +0000334
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000335 void set_deferred_handles(DeferredHandles* deferred_handles) {
336 DCHECK(deferred_handles_ == NULL);
337 deferred_handles_ = deferred_handles;
338 }
339
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 void ReopenHandlesInNewHandleScope() {
341 unoptimized_code_ = Handle<Code>(*unoptimized_code_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342 }
343
344 void AbortOptimization(BailoutReason reason) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345 DCHECK(reason != kNoReason);
346 if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 SetFlag(kDisableFutureOptimization);
348 }
349
350 void RetryOptimization(BailoutReason reason) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000351 DCHECK(reason != kNoReason);
352 if (GetFlag(kDisableFutureOptimization)) return;
353 bailout_reason_ = reason;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 }
355
356 BailoutReason bailout_reason() const { return bailout_reason_; }
357
358 int prologue_offset() const {
359 DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
360 return prologue_offset_;
361 }
362
363 void set_prologue_offset(int prologue_offset) {
364 DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
365 prologue_offset_ = prologue_offset;
366 }
367
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 int start_position_for(uint32_t inlining_id) {
369 return inlined_function_infos_.at(inlining_id).start_position;
370 }
371 const std::vector<InlinedFunctionInfo>& inlined_function_infos() {
372 return inlined_function_infos_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373 }
374
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000375 void LogDeoptCallPosition(int pc_offset, int inlining_id);
376 int TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
377 SourcePosition position, int pareint_id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000378
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000379 CompilationDependencies* dependencies() { return &dependencies_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000380
381 bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000382 return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 }
384
385 int optimization_id() const { return optimization_id_; }
386
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000387 int osr_expr_stack_height() { return osr_expr_stack_height_; }
388 void set_osr_expr_stack_height(int height) {
389 DCHECK(height >= 0);
390 osr_expr_stack_height_ = height;
391 }
392 JavaScriptFrame* osr_frame() const { return osr_frame_; }
393 void set_osr_frame(JavaScriptFrame* osr_frame) { osr_frame_ = osr_frame; }
394
395#if DEBUG
396 void PrintAstForTesting();
397#endif
398
399 bool has_simple_parameters();
400
401 struct InlinedFunctionHolder {
402 Handle<SharedFunctionInfo> shared_info;
403
404 // Root that holds the unoptimized code of the inlined function alive
405 // (and out of reach of code flushing) until we finish compilation.
406 // Do not remove.
407 Handle<Code> inlined_code_object_root;
408
409 explicit InlinedFunctionHolder(
410 Handle<SharedFunctionInfo> inlined_shared_info)
411 : shared_info(inlined_shared_info),
412 inlined_code_object_root(inlined_shared_info->code()) {}
413 };
414
415 typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
416 InlinedFunctionList const& inlined_functions() const {
417 return inlined_functions_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 }
419
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000420 void AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function) {
421 inlined_functions_.push_back(InlinedFunctionHolder(inlined_function));
422 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000423
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000424 base::SmartArrayPointer<char> GetDebugName() const;
425
426 Code::Kind output_code_kind() const { return output_code_kind_; }
427
428 void set_output_code_kind(Code::Kind kind) { output_code_kind_ = kind; }
429
430 protected:
431 ParseInfo* parse_info_;
432
433 void DisableFutureOptimization() {
434 if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
435 shared_info()->DisableOptimization(bailout_reason());
436 }
437 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000438
Leon Clarke4515c472010-02-03 11:58:03 +0000439 private:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100440 // Compilation mode.
441 // BASE is generated by the full codegen, optionally prepared for bailouts.
442 // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100443 enum Mode {
444 BASE,
445 OPTIMIZE,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446 STUB
Ben Murdochb0fe1622011-05-05 13:52:32 +0100447 };
448
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000449 CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
450 const char* debug_name, Mode mode, Isolate* isolate,
451 Zone* zone);
452
453 Isolate* isolate_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100454
455 void SetMode(Mode mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100456 mode_ = mode;
457 }
458
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000459 void SetFlag(Flag flag) { flags_ |= flag; }
Andrei Popescu31002712010-02-23 13:46:05 +0000460
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000461 void SetFlag(Flag flag, bool value) {
462 flags_ = value ? flags_ | flag : flags_ & ~flag;
463 }
464
465 bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000466
Ben Murdochf87a2032010-10-22 12:50:53 +0100467 unsigned flags_;
468
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 Code::Kind output_code_kind_;
470
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 // For compiled stubs, the stub object
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000472 CodeStub* code_stub_;
Ben Murdochf87a2032010-10-22 12:50:53 +0100473 // The compiled code.
474 Handle<Code> code_;
475
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
477 Handle<TypeFeedbackVector> feedback_vector_;
Andrei Popescu31002712010-02-23 13:46:05 +0000478
Ben Murdochb0fe1622011-05-05 13:52:32 +0100479 // Compilation mode flag and whether deoptimization is allowed.
480 Mode mode_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 BailoutId osr_ast_id_;
482 // The unoptimized code we patched for OSR may not be the shared code
483 // afterwards, since we may need to compile it again to include deoptimization
484 // data. Keep track which code we patched.
485 Handle<Code> unoptimized_code_;
486
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487 // Holds the bytecode array generated by the interpreter.
488 // TODO(rmcilroy/mstarzinger): Temporary work-around until compiler.cc is
489 // refactored to avoid us needing to carry the BytcodeArray around.
490 Handle<BytecodeArray> bytecode_array_;
491
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492 // The zone from which the compilation pipeline working on this
493 // CompilationInfo allocates.
494 Zone* zone_;
495
496 DeferredHandles* deferred_handles_;
497
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 // Dependencies for this compilation, e.g. stable maps.
499 CompilationDependencies dependencies_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500
501 BailoutReason bailout_reason_;
502
503 int prologue_offset_;
504
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000505 std::vector<InlinedFunctionInfo> inlined_function_infos_;
506 bool track_positions_;
507
508 InlinedFunctionList inlined_functions_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509
510 // A copy of shared_info()->opt_count() to avoid handle deref
511 // during graph optimization.
512 int opt_count_;
513
514 // Number of parameters used for compilation of stubs that require arguments.
515 int parameter_count_;
516
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 int optimization_id_;
518
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 int osr_expr_stack_height_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000521 // The current OSR frame for specialization or {nullptr}.
522 JavaScriptFrame* osr_frame_ = nullptr;
523
524 const char* debug_name_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100525
Andrei Popescu31002712010-02-23 13:46:05 +0000526 DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
Leon Clarke4515c472010-02-03 11:58:03 +0000527};
528
529
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530// A wrapper around a CompilationInfo that detaches the Handles from
531// the underlying DeferredHandleScope and stores them in info_ on
532// destruction.
533class CompilationHandleScope BASE_EMBEDDED {
534 public:
535 explicit CompilationHandleScope(CompilationInfo* info)
536 : deferred_(info->isolate()), info_(info) {}
537 ~CompilationHandleScope() {
538 info_->set_deferred_handles(deferred_.Detach());
539 }
540
541 private:
542 DeferredHandleScope deferred_;
543 CompilationInfo* info_;
544};
545
546
547class HGraph;
548class HOptimizedGraphBuilder;
549class LChunk;
550
551// A helper class that calls the three compilation phases in
552// Crankshaft and keeps track of its state. The three phases
553// CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
554// fail, bail-out to the full code generator or succeed. Apart from
555// their return value, the status of the phase last run can be checked
556// using last_status().
557class OptimizedCompileJob: public ZoneObject {
558 public:
559 explicit OptimizedCompileJob(CompilationInfo* info)
560 : info_(info),
561 graph_builder_(NULL),
562 graph_(NULL),
563 chunk_(NULL),
564 last_status_(FAILED),
565 awaiting_install_(false) { }
566
567 enum Status {
568 FAILED, BAILED_OUT, SUCCEEDED
569 };
570
571 MUST_USE_RESULT Status CreateGraph();
572 MUST_USE_RESULT Status OptimizeGraph();
573 MUST_USE_RESULT Status GenerateCode();
574
575 Status last_status() const { return last_status_; }
576 CompilationInfo* info() const { return info_; }
577 Isolate* isolate() const { return info()->isolate(); }
578
579 Status RetryOptimization(BailoutReason reason) {
580 info_->RetryOptimization(reason);
581 return SetLastStatus(BAILED_OUT);
582 }
583
584 Status AbortOptimization(BailoutReason reason) {
585 info_->AbortOptimization(reason);
586 return SetLastStatus(BAILED_OUT);
587 }
588
589 void WaitForInstall() {
590 DCHECK(info_->is_osr());
591 awaiting_install_ = true;
592 }
593
594 bool IsWaitingForInstall() { return awaiting_install_; }
595
596 private:
597 CompilationInfo* info_;
598 HOptimizedGraphBuilder* graph_builder_;
599 HGraph* graph_;
600 LChunk* chunk_;
601 base::TimeDelta time_taken_to_create_graph_;
602 base::TimeDelta time_taken_to_optimize_;
603 base::TimeDelta time_taken_to_codegen_;
604 Status last_status_;
605 bool awaiting_install_;
606
607 MUST_USE_RESULT Status SetLastStatus(Status status) {
608 last_status_ = status;
609 return last_status_;
610 }
611 void RecordOptimizationStats();
612
613 struct Timer {
614 Timer(OptimizedCompileJob* job, base::TimeDelta* location)
615 : job_(job), location_(location) {
616 DCHECK(location_ != NULL);
617 timer_.Start();
618 }
619
620 ~Timer() {
621 *location_ += timer_.Elapsed();
622 }
623
624 OptimizedCompileJob* job_;
625 base::ElapsedTimer timer_;
626 base::TimeDelta* location_;
627 };
628};
629
630
Steve Blocka7e24c12009-10-30 11:49:00 +0000631// The V8 compiler
632//
633// General strategy: Source code is translated into an anonymous function w/o
634// parameters which then can be executed. If the source code contains other
635// functions, they will be compiled and allocated as part of the compilation
636// of the source code.
637
Ben Murdochf87a2032010-10-22 12:50:53 +0100638// Please note this interface returns shared function infos. This means you
639// need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
640// real function with a context.
Steve Blocka7e24c12009-10-30 11:49:00 +0000641
642class Compiler : public AllStatic {
643 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
645 Handle<JSFunction> function);
646 MUST_USE_RESULT static MaybeHandle<Code> GetLazyCode(
647 Handle<JSFunction> function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000648
649 static bool Compile(Handle<JSFunction> function, ClearExceptionFlag flag);
650 static bool CompileDebugCode(Handle<JSFunction> function);
651 static bool CompileDebugCode(Handle<SharedFunctionInfo> shared);
652 static void CompileForLiveEdit(Handle<Script> script);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100653
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400654 // Parser::Parse, then Compiler::Analyze.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000655 static bool ParseAndAnalyze(ParseInfo* info);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400656 // Rewrite, analyze scopes, and renumber.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000657 static bool Analyze(ParseInfo* info);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400658 // Adds deoptimization support, requires ParseAndAnalyze.
659 static bool EnsureDeoptimizationSupport(CompilationInfo* info);
660
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000661 // Compile a String source within a context for eval.
662 MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
663 Handle<String> source, Handle<SharedFunctionInfo> outer_info,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 Handle<Context> context, LanguageMode language_mode,
665 ParseRestriction restriction, int line_offset, int column_offset = 0,
666 Handle<Object> script_name = Handle<Object>(),
667 ScriptOriginOptions options = ScriptOriginOptions());
Steve Blocka7e24c12009-10-30 11:49:00 +0000668
669 // Compile a String source within a context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 static Handle<SharedFunctionInfo> CompileScript(
671 Handle<String> source, Handle<Object> script_name, int line_offset,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000672 int column_offset, ScriptOriginOptions resource_options,
673 Handle<Object> source_map_url, Handle<Context> context,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000674 v8::Extension* extension, ScriptData** cached_data,
675 ScriptCompiler::CompileOptions compile_options,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000676 NativesFlag is_natives_code, bool is_module);
Steve Blocka7e24c12009-10-30 11:49:00 +0000677
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000678 static Handle<SharedFunctionInfo> CompileStreamedScript(Handle<Script> script,
679 ParseInfo* info,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000680 int source_length);
Steve Blocka7e24c12009-10-30 11:49:00 +0000681
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 // Create a shared function info object (the code may be lazily compiled).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000683 static Handle<SharedFunctionInfo> GetSharedFunctionInfo(
684 FunctionLiteral* node, Handle<Script> script, CompilationInfo* outer);
Steve Blockd0582a62009-12-15 09:54:21 +0000685
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686 enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
Andrei Popescu31002712010-02-23 13:46:05 +0000687
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000688 // Generate and return optimized code or start a concurrent optimization job.
689 // In the latter case, return the InOptimizationQueue builtin. On failure,
690 // return the empty handle.
691 MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000692 Handle<JSFunction> function, Handle<Code> current_code,
693 ConcurrencyMode mode, BailoutId osr_ast_id = BailoutId::None(),
694 JavaScriptFrame* osr_frame = nullptr);
Ben Murdochf87a2032010-10-22 12:50:53 +0100695
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696 // Generate and return code from previously queued optimization job.
697 // On failure, return the empty handle.
698 static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
Steve Blocka7e24c12009-10-30 11:49:00 +0000699};
700
701
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000702class CompilationPhase BASE_EMBEDDED {
703 public:
704 CompilationPhase(const char* name, CompilationInfo* info);
705 ~CompilationPhase();
706
707 protected:
708 bool ShouldProduceTraceOutput() const;
709
710 const char* name() const { return name_; }
711 CompilationInfo* info() const { return info_; }
712 Isolate* isolate() const { return info()->isolate(); }
713 Zone* zone() { return &zone_; }
714
715 private:
716 const char* name_;
717 CompilationInfo* info_;
718 Zone zone_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000719 size_t info_zone_start_allocation_size_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000720 base::ElapsedTimer timer_;
721
722 DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
723};
724
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000725} // namespace internal
726} // namespace v8
Steve Blocka7e24c12009-10-30 11:49:00 +0000727
728#endif // V8_COMPILER_H_