blob: 1b571a742925fbda610b2ae2328433a67ba55c67 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/runtime-profiler.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/assembler.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/ast/scopeinfo.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/platform/platform.h"
10#include "src/bootstrapper.h"
11#include "src/code-stubs.h"
12#include "src/compilation-cache.h"
13#include "src/execution.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/frames-inl.h"
15#include "src/full-codegen/full-codegen.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000016#include "src/global-handles.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010017
18namespace v8 {
19namespace internal {
20
21
Ben Murdoch3ef787d2012-04-12 10:51:47 +010022// Number of times a function has to be seen on the stack before it is
Ben Murdochc5610432016-08-08 18:44:38 +010023// compiled for baseline.
24static const int kProfilerTicksBeforeBaseline = 2;
25// Number of times a function has to be seen on the stack before it is
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026// optimized.
27static const int kProfilerTicksBeforeOptimization = 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000028// If the function optimization was disabled due to high deoptimization count,
29// but the function is hot and has been seen on the stack this number of times,
30// then we try to reenable optimization for this function.
31static const int kProfilerTicksBeforeReenablingOptimization = 250;
Ben Murdoch8f9999f2012-04-23 10:39:17 +010032// If a function does not have enough type info (according to
33// FLAG_type_info_threshold), but has seen a huge number of ticks,
34// optimize it as it is.
35static const int kTicksWhenNotEnoughTypeInfo = 100;
36// We only have one byte to store the number of ticks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
38STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
Ben Murdoch8f9999f2012-04-23 10:39:17 +010039STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010040
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041// Maximum size in bytes of generate code for a function to allow OSR.
42static const int kOSRCodeSizeAllowanceBase =
43 100 * FullCodeGenerator::kCodeSizeMultiplier;
44
45static const int kOSRCodeSizeAllowancePerTick =
46 4 * FullCodeGenerator::kCodeSizeMultiplier;
47
Ben Murdoch3ef787d2012-04-12 10:51:47 +010048// Maximum size in bytes of generated code for a function to be optimized
49// the very first time it is seen on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000050static const int kMaxSizeEarlyOpt =
51 5 * FullCodeGenerator::kCodeSizeMultiplier;
Ben Murdoch8b112d22011-06-08 16:22:53 +010052
Steve Block44f0eee2011-05-26 01:26:41 +010053
54RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
55 : isolate_(isolate),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000056 any_ic_changed_(false) {
Steve Block44f0eee2011-05-26 01:26:41 +010057}
58
59
Emily Bernierd0a1eb72015-03-24 16:35:39 -040060static void GetICCounts(SharedFunctionInfo* shared,
61 int* ic_with_type_info_count, int* ic_generic_count,
62 int* ic_total_count, int* type_info_percentage,
63 int* generic_percentage) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010064 *ic_total_count = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065 *ic_generic_count = 0;
Ben Murdoch8f9999f2012-04-23 10:39:17 +010066 *ic_with_type_info_count = 0;
Ben Murdoch097c5b22016-05-18 11:27:45 +010067 if (shared->code()->kind() == Code::FUNCTION) {
68 Code* shared_code = shared->code();
69 Object* raw_info = shared_code->type_feedback_info();
70 if (raw_info->IsTypeFeedbackInfo()) {
71 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
72 *ic_with_type_info_count = info->ic_with_type_info_count();
73 *ic_generic_count = info->ic_generic_count();
74 *ic_total_count = info->ic_total_count();
75 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +010076 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040077
78 // Harvest vector-ics as well
79 TypeFeedbackVector* vector = shared->feedback_vector();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 int with = 0, gen = 0;
81 vector->ComputeCounts(&with, &gen);
82 *ic_with_type_info_count += with;
83 *ic_generic_count += gen;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040084
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 if (*ic_total_count > 0) {
86 *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
87 *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
88 } else {
89 *type_info_percentage = 100; // Compared against lower bound.
90 *generic_percentage = 0; // Compared against upper bound.
91 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +010092}
93
Ben Murdochc5610432016-08-08 18:44:38 +010094static void TraceRecompile(JSFunction* function, const char* reason,
95 const char* type) {
Ben Murdochda12d292016-06-02 14:46:10 +010096 if (FLAG_trace_opt &&
97 function->shared()->PassesFilter(FLAG_hydrogen_filter)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000098 PrintF("[marking ");
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 function->ShortPrint();
Ben Murdochc5610432016-08-08 18:44:38 +0100100 PrintF(" for %s recompilation, reason: %s", type, reason);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100101 if (FLAG_type_info_threshold > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000102 int typeinfo, generic, total, type_percentage, generic_percentage;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400103 GetICCounts(function->shared(), &typeinfo, &generic, &total,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 &type_percentage, &generic_percentage);
105 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
106 type_percentage);
107 PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100108 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100109 PrintF("]\n");
110 }
Ben Murdochc5610432016-08-08 18:44:38 +0100111}
Ben Murdochb0fe1622011-05-05 13:52:32 +0100112
Ben Murdochc5610432016-08-08 18:44:38 +0100113void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
114 TraceRecompile(function, reason, "optimized");
115
116 // TODO(4280): Fix this to check function is compiled to baseline once we
117 // have a standard way to check that. For now, if baseline code doesn't have
118 // a bytecode array.
119 DCHECK(!function->shared()->HasBytecodeArray());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400120 function->AttemptConcurrentOptimization();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100121}
122
Ben Murdochc5610432016-08-08 18:44:38 +0100123void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) {
124 TraceRecompile(function, reason, "baseline");
125
126 // TODO(4280): Fix this to check function is compiled for the interpreter
127 // once we have a standard way to check that. For now function will only
128 // have a bytecode array if compiled for the interpreter.
129 DCHECK(function->shared()->HasBytecodeArray());
130 function->MarkForBaseline();
131}
Ben Murdochb0fe1622011-05-05 13:52:32 +0100132
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000133void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
134 int loop_nesting_levels) {
135 SharedFunctionInfo* shared = function->shared();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000136 if (!FLAG_use_osr || function->shared()->IsBuiltin()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100137 return;
138 }
139
Ben Murdoch589d6972011-11-30 16:04:58 +0000140 // If the code is not optimizable, don't try OSR.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000141 if (shared->optimization_disabled()) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100142
143 // We are not prepared to do OSR for a function that already has an
144 // allocated arguments object. The optimized code would bypass it for
145 // arguments accesses, which is unsound. Don't try OSR.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000146 if (shared->uses_arguments()) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100147
148 // We're using on-stack replacement: patch the unoptimized code so that
149 // any back edge in any unoptimized frame will trigger on-stack
150 // replacement for that frame.
151 if (FLAG_trace_osr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 PrintF("[OSR - patching back edges in ");
Ben Murdochb0fe1622011-05-05 13:52:32 +0100153 function->PrintName();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 PrintF("]\n");
Ben Murdochb0fe1622011-05-05 13:52:32 +0100155 }
156
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000157 for (int i = 0; i < loop_nesting_levels; i++) {
158 BackEdgeTable::Patch(isolate_, shared->code());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100159 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100160}
161
Ben Murdoch097c5b22016-05-18 11:27:45 +0100162void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
163 int frame_count,
164 bool frame_optimized) {
165 SharedFunctionInfo* shared = function->shared();
166 Code* shared_code = shared->code();
167 if (shared_code->kind() != Code::FUNCTION) return;
168 if (function->IsInOptimizationQueue()) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100169
Ben Murdoch097c5b22016-05-18 11:27:45 +0100170 if (FLAG_always_osr) {
171 AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker);
172 // Fall through and do a normal optimized compile as well.
173 } else if (!frame_optimized &&
174 (function->IsMarkedForOptimization() ||
175 function->IsMarkedForConcurrentOptimization() ||
176 function->IsOptimized())) {
177 // Attempt OSR if we are still running unoptimized code even though the
178 // the function has long been marked or even already been optimized.
179 int ticks = shared_code->profiler_ticks();
180 int64_t allowance =
181 kOSRCodeSizeAllowanceBase +
182 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick;
183 if (shared_code->CodeSize() > allowance &&
184 ticks < Code::ProfilerTicksField::kMax) {
185 shared_code->set_profiler_ticks(ticks + 1);
186 } else {
187 AttemptOnStackReplacement(function);
188 }
189 return;
190 }
191
192 // Only record top-level code on top of the execution stack and
193 // avoid optimizing excessively large scripts since top-level code
194 // will be executed only once.
195 const int kMaxToplevelSourceSize = 10 * 1024;
196 if (shared->is_toplevel() &&
197 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
198 return;
199 }
200
201 // Do not record non-optimizable functions.
202 if (shared->optimization_disabled()) {
203 if (shared->deopt_count() >= FLAG_max_opt_count) {
204 // If optimization was disabled due to many deoptimizations,
205 // then check if the function is hot and try to reenable optimization.
206 int ticks = shared_code->profiler_ticks();
207 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
208 shared_code->set_profiler_ticks(0);
209 shared->TryReenableOptimization();
210 } else {
211 shared_code->set_profiler_ticks(ticks + 1);
212 }
213 }
214 return;
215 }
216 if (function->IsOptimized()) return;
217
218 int ticks = shared_code->profiler_ticks();
219
220 if (ticks >= kProfilerTicksBeforeOptimization) {
221 int typeinfo, generic, total, type_percentage, generic_percentage;
222 GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage,
223 &generic_percentage);
224 if (type_percentage >= FLAG_type_info_threshold &&
225 generic_percentage <= FLAG_generic_ic_threshold) {
226 // If this particular function hasn't had any ICs patched for enough
227 // ticks, optimize it now.
228 Optimize(function, "hot and stable");
229 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
230 Optimize(function, "not much type info but very hot");
231 } else {
232 shared_code->set_profiler_ticks(ticks + 1);
233 if (FLAG_trace_opt_verbose) {
234 PrintF("[not yet optimizing ");
235 function->PrintName();
236 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
237 type_percentage);
238 }
239 }
240 } else if (!any_ic_changed_ &&
241 shared_code->instruction_size() < kMaxSizeEarlyOpt) {
242 // If no IC was patched since the last tick and this function is very
243 // small, optimistically optimize it now.
244 int typeinfo, generic, total, type_percentage, generic_percentage;
245 GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage,
246 &generic_percentage);
247 if (type_percentage >= FLAG_type_info_threshold &&
248 generic_percentage <= FLAG_generic_ic_threshold) {
249 Optimize(function, "small function");
250 } else {
251 shared_code->set_profiler_ticks(ticks + 1);
252 }
253 } else {
254 shared_code->set_profiler_ticks(ticks + 1);
255 }
256}
257
Ben Murdochc5610432016-08-08 18:44:38 +0100258void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100259 if (function->IsInOptimizationQueue()) return;
260
261 SharedFunctionInfo* shared = function->shared();
262 int ticks = shared->profiler_ticks();
263
264 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
265 // than kMaxToplevelSourceSize.
266 // TODO(rmcilroy): Consider whether we should optimize small functions when
267 // they are first seen on the stack (e.g., kMaxSizeEarlyOpt).
268
Ben Murdochc5610432016-08-08 18:44:38 +0100269 if (function->IsMarkedForBaseline() || function->IsMarkedForOptimization() ||
270 function->IsMarkedForConcurrentOptimization() ||
271 function->IsOptimized()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100272 // TODO(rmcilroy): Support OSR in these cases.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100273 return;
274 }
275
Ben Murdochc5610432016-08-08 18:44:38 +0100276 if (shared->optimization_disabled() &&
277 shared->disable_optimization_reason() == kOptimizationDisabledForTest) {
278 // Don't baseline functions which have been marked by NeverOptimizeFunction
279 // in a test.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100280 return;
281 }
282
Ben Murdochc5610432016-08-08 18:44:38 +0100283 if (ticks >= kProfilerTicksBeforeBaseline) {
284 Baseline(function, "hot enough for baseline");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100285 }
286}
287
288void RuntimeProfiler::MarkCandidatesForOptimization() {
Steve Block44f0eee2011-05-26 01:26:41 +0100289 HandleScope scope(isolate_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100290
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000291 if (!isolate_->use_crankshaft()) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000292
293 DisallowHeapAllocation no_gc;
294
Ben Murdochb0fe1622011-05-05 13:52:32 +0100295 // Run through the JavaScript frames and collect them. If we already
296 // have a sample of the function, we mark it for optimizations
297 // (eagerly or lazily).
Ben Murdochb0fe1622011-05-05 13:52:32 +0100298 int frame_count = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000299 int frame_count_limit = FLAG_frame_count;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100300 for (JavaScriptFrameIterator it(isolate_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100301 frame_count++ < frame_count_limit && !it.done();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100302 it.Advance()) {
303 JavaScriptFrame* frame = it.frame();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 JSFunction* function = frame->function();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100305
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306 List<JSFunction*> functions(4);
307 frame->GetFunctions(&functions);
308 for (int i = functions.length(); --i >= 0; ) {
309 SharedFunctionInfo* shared_function_info = functions[i]->shared();
310 int ticks = shared_function_info->profiler_ticks();
311 if (ticks < Smi::kMaxValue) {
312 shared_function_info->set_profiler_ticks(ticks + 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100313 }
314 }
315
Ben Murdochc5610432016-08-08 18:44:38 +0100316 if (frame->is_interpreted()) {
317 DCHECK(!frame->is_optimized());
318 MaybeOptimizeIgnition(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000319 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100320 MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100321 }
322 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323 any_ic_changed_ = false;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100324}
325
326
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000327} // namespace internal
328} // namespace v8