blob: c3ec835ff9a04d58f0787d22566274ecbaabbd0b [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/optimizing-compile-dispatcher.h"
6
7#include "src/base/atomicops.h"
8#include "src/full-codegen/full-codegen.h"
9#include "src/isolate.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010010#include "src/tracing/trace-event.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/v8.h"
12
13namespace v8 {
14namespace internal {
15
16namespace {
17
Ben Murdochc5610432016-08-08 18:44:38 +010018void DisposeCompilationJob(CompilationJob* job, bool restore_function_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000019 if (restore_function_code) {
Ben Murdochc5610432016-08-08 18:44:38 +010020 Handle<JSFunction> function = job->info()->closure();
Ben Murdochda12d292016-06-02 14:46:10 +010021 function->ReplaceCode(function->shared()->code());
Ben Murdoch61f157c2016-09-16 13:49:30 +010022 // TODO(mvstanton): We can't call ensureliterals here due to allocation,
23 // but we probably shouldn't call ReplaceCode either, as this
24 // sometimes runs on the worker thread!
25 // JSFunction::EnsureLiterals(function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000026 }
Ben Murdochc5610432016-08-08 18:44:38 +010027 delete job;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028}
29
30} // namespace
31
32
33class OptimizingCompileDispatcher::CompileTask : public v8::Task {
34 public:
35 explicit CompileTask(Isolate* isolate) : isolate_(isolate) {
36 OptimizingCompileDispatcher* dispatcher =
37 isolate_->optimizing_compile_dispatcher();
38 base::LockGuard<base::Mutex> lock_guard(&dispatcher->ref_count_mutex_);
39 ++dispatcher->ref_count_;
40 }
41
42 virtual ~CompileTask() {}
43
44 private:
45 // v8::Task overrides.
46 void Run() override {
47 DisallowHeapAllocation no_allocation;
48 DisallowHandleAllocation no_handles;
49 DisallowHandleDereference no_deref;
50
51 OptimizingCompileDispatcher* dispatcher =
52 isolate_->optimizing_compile_dispatcher();
53 {
54 TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
Ben Murdoch097c5b22016-05-18 11:27:45 +010055 TRACE_EVENT0("v8", "V8.RecompileConcurrent");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000056
57 if (dispatcher->recompilation_delay_ != 0) {
58 base::OS::Sleep(base::TimeDelta::FromMilliseconds(
59 dispatcher->recompilation_delay_));
60 }
61
62 dispatcher->CompileNext(dispatcher->NextInput(true));
63 }
64 {
65 base::LockGuard<base::Mutex> lock_guard(&dispatcher->ref_count_mutex_);
66 if (--dispatcher->ref_count_ == 0) {
67 dispatcher->ref_count_zero_.NotifyOne();
68 }
69 }
70 }
71
72 Isolate* isolate_;
73
74 DISALLOW_COPY_AND_ASSIGN(CompileTask);
75};
76
77
78OptimizingCompileDispatcher::~OptimizingCompileDispatcher() {
79#ifdef DEBUG
80 {
81 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
82 DCHECK_EQ(0, ref_count_);
83 }
84#endif
85 DCHECK_EQ(0, input_queue_length_);
86 DeleteArray(input_queue_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087}
88
Ben Murdochc5610432016-08-08 18:44:38 +010089CompilationJob* OptimizingCompileDispatcher::NextInput(bool check_if_flushing) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000090 base::LockGuard<base::Mutex> access_input_queue_(&input_queue_mutex_);
91 if (input_queue_length_ == 0) return NULL;
Ben Murdochc5610432016-08-08 18:44:38 +010092 CompilationJob* job = input_queue_[InputQueueIndex(0)];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000093 DCHECK_NOT_NULL(job);
94 input_queue_shift_ = InputQueueIndex(1);
95 input_queue_length_--;
96 if (check_if_flushing) {
97 if (static_cast<ModeFlag>(base::Acquire_Load(&mode_)) == FLUSH) {
Ben Murdochc5610432016-08-08 18:44:38 +010098 AllowHandleDereference allow_handle_dereference;
99 DisposeCompilationJob(job, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 return NULL;
101 }
102 }
103 return job;
104}
105
Ben Murdochc5610432016-08-08 18:44:38 +0100106void OptimizingCompileDispatcher::CompileNext(CompilationJob* job) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000107 if (!job) return;
108
109 // The function may have already been optimized by OSR. Simply continue.
Ben Murdochc5610432016-08-08 18:44:38 +0100110 CompilationJob::Status status = job->OptimizeGraph();
111 USE(status); // Prevent an unused-variable error.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000112
113 // The function may have already been optimized by OSR. Simply continue.
114 // Use a mutex to make sure that functions marked for install
115 // are always also queued.
116 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_);
117 output_queue_.push(job);
118 isolate_->stack_guard()->RequestInstallCode();
119}
120
121
122void OptimizingCompileDispatcher::FlushOutputQueue(bool restore_function_code) {
123 for (;;) {
Ben Murdochc5610432016-08-08 18:44:38 +0100124 CompilationJob* job = NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000125 {
126 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_);
127 if (output_queue_.empty()) return;
128 job = output_queue_.front();
129 output_queue_.pop();
130 }
131
Ben Murdochc5610432016-08-08 18:44:38 +0100132 DisposeCompilationJob(job, restore_function_code);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000133 }
134}
135
136
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000137void OptimizingCompileDispatcher::Flush() {
138 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH));
139 if (FLAG_block_concurrent_recompilation) Unblock();
140 {
141 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
142 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
143 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE));
144 }
145 FlushOutputQueue(true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000146 if (FLAG_trace_concurrent_recompilation) {
147 PrintF(" ** Flushed concurrent recompilation queues.\n");
148 }
149}
150
151
152void OptimizingCompileDispatcher::Stop() {
153 base::Release_Store(&mode_, static_cast<base::AtomicWord>(FLUSH));
154 if (FLAG_block_concurrent_recompilation) Unblock();
155 {
156 base::LockGuard<base::Mutex> lock_guard(&ref_count_mutex_);
157 while (ref_count_ > 0) ref_count_zero_.Wait(&ref_count_mutex_);
158 base::Release_Store(&mode_, static_cast<base::AtomicWord>(COMPILE));
159 }
160
161 if (recompilation_delay_ != 0) {
162 // At this point the optimizing compiler thread's event loop has stopped.
163 // There is no need for a mutex when reading input_queue_length_.
164 while (input_queue_length_ > 0) CompileNext(NextInput());
165 InstallOptimizedFunctions();
166 } else {
167 FlushOutputQueue(false);
168 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169}
170
171
172void OptimizingCompileDispatcher::InstallOptimizedFunctions() {
173 HandleScope handle_scope(isolate_);
174
175 for (;;) {
Ben Murdochc5610432016-08-08 18:44:38 +0100176 CompilationJob* job = NULL;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 {
178 base::LockGuard<base::Mutex> access_output_queue_(&output_queue_mutex_);
179 if (output_queue_.empty()) return;
180 job = output_queue_.front();
181 output_queue_.pop();
182 }
183 CompilationInfo* info = job->info();
184 Handle<JSFunction> function(*info->closure());
Ben Murdochda12d292016-06-02 14:46:10 +0100185 if (function->IsOptimized()) {
186 if (FLAG_trace_concurrent_recompilation) {
187 PrintF(" ** Aborting compilation for ");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000188 function->ShortPrint();
Ben Murdochda12d292016-06-02 14:46:10 +0100189 PrintF(" as it has already been optimized.\n");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190 }
Ben Murdochc5610432016-08-08 18:44:38 +0100191 DisposeCompilationJob(job, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000192 } else {
Ben Murdochc5610432016-08-08 18:44:38 +0100193 Compiler::FinalizeCompilationJob(job);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000194 }
195 }
196}
197
Ben Murdochc5610432016-08-08 18:44:38 +0100198void OptimizingCompileDispatcher::QueueForOptimization(CompilationJob* job) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000199 DCHECK(IsQueueAvailable());
Ben Murdochda12d292016-06-02 14:46:10 +0100200 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000201 // Add job to the back of the input queue.
202 base::LockGuard<base::Mutex> access_input_queue(&input_queue_mutex_);
203 DCHECK_LT(input_queue_length_, input_queue_capacity_);
204 input_queue_[InputQueueIndex(input_queue_length_)] = job;
205 input_queue_length_++;
206 }
207 if (FLAG_block_concurrent_recompilation) {
208 blocked_jobs_++;
209 } else {
210 V8::GetCurrentPlatform()->CallOnBackgroundThread(
211 new CompileTask(isolate_), v8::Platform::kShortRunningTask);
212 }
213}
214
215
216void OptimizingCompileDispatcher::Unblock() {
217 while (blocked_jobs_ > 0) {
218 V8::GetCurrentPlatform()->CallOnBackgroundThread(
219 new CompileTask(isolate_), v8::Platform::kShortRunningTask);
220 blocked_jobs_--;
221 }
222}
223
224
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225} // namespace internal
226} // namespace v8