blob: 15a3a14156f138e2c87349bcf2eeaea84db76f43 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/runtime/runtime-utils.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006
7#include "src/arguments.h"
8#include "src/compiler.h"
9#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/frames-inl.h"
11#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/isolate-inl.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/messages.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014#include "src/v8threads.h"
15#include "src/vm-state-inl.h"
16
17namespace v8 {
18namespace internal {
19
20RUNTIME_FUNCTION(Runtime_CompileLazy) {
21 HandleScope scope(isolate);
22 DCHECK(args.length() == 1);
23 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
24#ifdef DEBUG
25 if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
26 PrintF("[unoptimized: ");
27 function->PrintName();
28 PrintF("]\n");
29 }
30#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 StackLimitCheck check(isolate);
32 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040033
34 // Compile the target function.
35 DCHECK(function->shared()->allows_lazy_compilation());
36
37 Handle<Code> code;
38 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code,
39 Compiler::GetLazyCode(function));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 DCHECK(code->IsJavaScriptCode());
41
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042 function->ReplaceCode(*code);
43 return *code;
44}
45
46
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047namespace {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000049Object* CompileOptimized(Isolate* isolate, Handle<JSFunction> function,
50 Compiler::ConcurrencyMode mode) {
51 StackLimitCheck check(isolate);
52 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040053
Emily Bernierd0a1eb72015-03-24 16:35:39 -040054 Handle<Code> code;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000055 Handle<Code> unoptimized(function->shared()->code());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057 // Optimization succeeded, return optimized code.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040058 function->ReplaceCode(*code);
59 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 // Optimization failed, get unoptimized code.
61 if (isolate->has_pending_exception()) { // Possible stack overflow.
62 return isolate->heap()->exception();
63 }
64 code = Handle<Code>(function->shared()->code(), isolate);
65 if (code->kind() != Code::FUNCTION &&
66 code->kind() != Code::OPTIMIZED_FUNCTION) {
67 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
68 isolate, code, Compiler::GetUnoptimizedCode(function));
69 }
70 function->ReplaceCode(*code);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 }
72
73 DCHECK(function->code()->kind() == Code::FUNCTION ||
74 function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
75 function->IsInOptimizationQueue());
76 return function->code();
77}
78
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000079} // namespace
80
81
82RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
83 HandleScope scope(isolate);
84 DCHECK(args.length() == 1);
85 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
86 return CompileOptimized(isolate, function, Compiler::CONCURRENT);
87}
88
89
90RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
91 HandleScope scope(isolate);
92 DCHECK(args.length() == 1);
93 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
94 return CompileOptimized(isolate, function, Compiler::NOT_CONCURRENT);
95}
96
Emily Bernierd0a1eb72015-03-24 16:35:39 -040097
98RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
99 HandleScope scope(isolate);
100 DCHECK(args.length() == 0);
101 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
102 DCHECK(AllowHeapAllocation::IsAllowed());
103 delete deoptimizer;
104 return isolate->heap()->undefined_value();
105}
106
107
108class ActivationsFinder : public ThreadVisitor {
109 public:
110 Code* code_;
111 bool has_code_activations_;
112
113 explicit ActivationsFinder(Code* code)
114 : code_(code), has_code_activations_(false) {}
115
116 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
117 JavaScriptFrameIterator it(isolate, top);
118 VisitFrames(&it);
119 }
120
121 void VisitFrames(JavaScriptFrameIterator* it) {
122 for (; !it->done(); it->Advance()) {
123 JavaScriptFrame* frame = it->frame();
124 if (code_->contains(frame->pc())) has_code_activations_ = true;
125 }
126 }
127};
128
129
130RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
131 HandleScope scope(isolate);
132 DCHECK(args.length() == 1);
133 CONVERT_SMI_ARG_CHECKED(type_arg, 0);
134 Deoptimizer::BailoutType type =
135 static_cast<Deoptimizer::BailoutType>(type_arg);
136 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
137 DCHECK(AllowHeapAllocation::IsAllowed());
138
139 Handle<JSFunction> function = deoptimizer->function();
140 Handle<Code> optimized_code = deoptimizer->compiled_code();
141
142 DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
143 DCHECK(type == deoptimizer->bailout_type());
144
145 // Make sure to materialize objects before causing any allocation.
146 JavaScriptFrameIterator it(isolate);
147 deoptimizer->MaterializeHeapObjects(&it);
148 delete deoptimizer;
149
150 JavaScriptFrame* frame = it.frame();
151 RUNTIME_ASSERT(frame->function()->IsJSFunction());
152 DCHECK(frame->function() == *function);
153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154 // Ensure the context register is updated for materialized objects.
155 JavaScriptFrameIterator top_it(isolate);
156 JavaScriptFrame* top_frame = top_it.frame();
157 isolate->set_context(Context::cast(top_frame->context()));
158
159 if (type == Deoptimizer::LAZY) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400160 return isolate->heap()->undefined_value();
161 }
162
163 // Search for other activations of the same function and code.
164 ActivationsFinder activations_finder(*optimized_code);
165 activations_finder.VisitFrames(&it);
166 isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
167
168 if (!activations_finder.has_code_activations_) {
169 if (function->code() == *optimized_code) {
170 if (FLAG_trace_deopt) {
171 PrintF("[removing optimized code for: ");
172 function->PrintName();
173 PrintF("]\n");
174 }
175 function->ReplaceCode(function->shared()->code());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400176 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 // Evict optimized code for this function from the cache so that it
178 // doesn't get used for new closures.
179 function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
180 "notify deoptimized");
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 } else {
182 // TODO(titzer): we should probably do DeoptimizeCodeList(code)
183 // unconditionally if the code is not already marked for deoptimization.
184 // If there is an index by shared function info, all the better.
185 Deoptimizer::DeoptimizeFunction(*function);
186 }
187
188 return isolate->heap()->undefined_value();
189}
190
191
192static bool IsSuitableForOnStackReplacement(Isolate* isolate,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000193 Handle<JSFunction> function) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400194 // Keep track of whether we've succeeded in optimizing.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 if (function->shared()->optimization_disabled()) return false;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400196 // If we are trying to do OSR when there are already optimized
197 // activations of the function, it means (a) the function is directly or
198 // indirectly recursive and (b) an optimized invocation has been
199 // deoptimized so that we are currently in an unoptimized activation.
200 // Check for optimized activations of this function.
201 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
202 JavaScriptFrame* frame = it.frame();
203 if (frame->is_optimized() && frame->function() == *function) return false;
204 }
205
206 return true;
207}
208
209
210RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
211 HandleScope scope(isolate);
212 DCHECK(args.length() == 1);
213 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
214 Handle<Code> caller_code(function->shared()->code());
215
216 // We're not prepared to handle a function with arguments object.
217 DCHECK(!function->shared()->uses_arguments());
218
219 RUNTIME_ASSERT(FLAG_use_osr);
220
221 // Passing the PC in the javascript frame from the caller directly is
222 // not GC safe, so we walk the stack to get it.
223 JavaScriptFrameIterator it(isolate);
224 JavaScriptFrame* frame = it.frame();
225 if (!caller_code->contains(frame->pc())) {
226 // Code on the stack may not be the code object referenced by the shared
227 // function info. It may have been replaced to include deoptimization data.
228 caller_code = Handle<Code>(frame->LookupCode());
229 }
230
231 uint32_t pc_offset =
232 static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
233
234#ifdef DEBUG
235 DCHECK_EQ(frame->function(), *function);
236 DCHECK_EQ(frame->LookupCode(), *caller_code);
237 DCHECK(caller_code->contains(frame->pc()));
238#endif // DEBUG
239
240
241 BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
242 DCHECK(!ast_id.IsNone());
243
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244 // Disable concurrent OSR for asm.js, to enable frame specialization.
245 Compiler::ConcurrencyMode mode = (isolate->concurrent_osr_enabled() &&
246 !function->shared()->asm_function() &&
247 function->shared()->ast_node_count() > 512)
248 ? Compiler::CONCURRENT
249 : Compiler::NOT_CONCURRENT;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400250 Handle<Code> result = Handle<Code>::null();
251
252 OptimizedCompileJob* job = NULL;
253 if (mode == Compiler::CONCURRENT) {
254 // Gate the OSR entry with a stack check.
255 BackEdgeTable::AddStackCheck(caller_code, pc_offset);
256 // Poll already queued compilation jobs.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 OptimizingCompileDispatcher* dispatcher =
258 isolate->optimizing_compile_dispatcher();
259 if (dispatcher->IsQueuedForOSR(function, ast_id)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400260 if (FLAG_trace_osr) {
261 PrintF("[OSR - Still waiting for queued: ");
262 function->PrintName();
263 PrintF(" at AST id %d]\n", ast_id.ToInt());
264 }
265 return NULL;
266 }
267
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 job = dispatcher->FindReadyOSRCandidate(function, ast_id);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400269 }
270
271 if (job != NULL) {
272 if (FLAG_trace_osr) {
273 PrintF("[OSR - Found ready: ");
274 function->PrintName();
275 PrintF(" at AST id %d]\n", ast_id.ToInt());
276 }
277 result = Compiler::GetConcurrentlyOptimizedCode(job);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000278 } else if (IsSuitableForOnStackReplacement(isolate, function)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400279 if (FLAG_trace_osr) {
280 PrintF("[OSR - Compiling: ");
281 function->PrintName();
282 PrintF(" at AST id %d]\n", ast_id.ToInt());
283 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 MaybeHandle<Code> maybe_result = Compiler::GetOptimizedCode(
285 function, caller_code, mode, ast_id,
286 (mode == Compiler::NOT_CONCURRENT) ? frame : nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400287 if (maybe_result.ToHandle(&result) &&
288 result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
289 // Optimization is queued. Return to check later.
290 return NULL;
291 }
292 }
293
294 // Revert the patched back edge table, regardless of whether OSR succeeds.
295 BackEdgeTable::Revert(isolate, *caller_code);
296
297 // Check whether we ended up with usable optimized code.
298 if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
299 DeoptimizationInputData* data =
300 DeoptimizationInputData::cast(result->deoptimization_data());
301
302 if (data->OsrPcOffset()->value() >= 0) {
303 DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
304 if (FLAG_trace_osr) {
305 PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
306 ast_id.ToInt(), data->OsrPcOffset()->value());
307 }
308 // TODO(titzer): this is a massive hack to make the deopt counts
309 // match. Fix heuristics for reenabling optimizations!
310 function->shared()->increment_deopt_count();
311
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000312 if (result->is_turbofanned()) {
313 // TurboFanned OSR code cannot be installed into the function.
314 // But the function is obviously hot, so optimize it next time.
315 function->ReplaceCode(
316 isolate->builtins()->builtin(Builtins::kCompileOptimized));
317 } else {
318 // Crankshafted OSR code can be installed into the function.
319 function->ReplaceCode(*result);
320 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400321 return *result;
322 }
323 }
324
325 // Failed.
326 if (FLAG_trace_osr) {
327 PrintF("[OSR - Failed: ");
328 function->PrintName();
329 PrintF(" at AST id %d]\n", ast_id.ToInt());
330 }
331
332 if (!function->IsOptimized()) {
333 function->ReplaceCode(function->shared()->code());
334 }
335 return NULL;
336}
337
338
339RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
340 HandleScope scope(isolate);
341 DCHECK(args.length() == 1);
342 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
343
344 // First check if this is a real stack overflow.
345 StackLimitCheck check(isolate);
346 if (check.JsHasOverflowed()) {
347 SealHandleScope shs(isolate);
348 return isolate->StackOverflow();
349 }
350
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000351 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400352 return (function->IsOptimized()) ? function->code()
353 : function->shared()->code();
354}
355
356
357bool CodeGenerationFromStringsAllowed(Isolate* isolate,
358 Handle<Context> context) {
359 DCHECK(context->allow_code_gen_from_strings()->IsFalse());
360 // Check with callback if set.
361 AllowCodeGenerationFromStringsCallback callback =
362 isolate->allow_code_gen_callback();
363 if (callback == NULL) {
364 // No callback set and code generation disallowed.
365 return false;
366 } else {
367 // Callback set. Let it decide if code generation is allowed.
368 VMState<EXTERNAL> state(isolate);
369 return callback(v8::Utils::ToLocal(context));
370 }
371}
372
373
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
375 Handle<SharedFunctionInfo> outer_info,
376 LanguageMode language_mode,
377 int scope_position) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400378 Handle<Context> context = Handle<Context>(isolate->context());
379 Handle<Context> native_context = Handle<Context>(context->native_context());
380
381 // Check if native context allows code generation from
382 // strings. Throw an exception if it doesn't.
383 if (native_context->allow_code_gen_from_strings()->IsFalse() &&
384 !CodeGenerationFromStringsAllowed(isolate, native_context)) {
385 Handle<Object> error_message =
386 native_context->ErrorMessageForCodeGenerationFromStrings();
387 Handle<Object> error;
388 MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000389 MessageTemplate::kCodeGenFromStrings, error_message);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400390 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000391 return isolate->heap()->exception();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400392 }
393
394 // Deal with a normal eval call with a string argument. Compile it
395 // and return the compiled function bound in the local context.
396 static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
397 Handle<JSFunction> compiled;
398 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
399 isolate, compiled,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000400 Compiler::GetFunctionFromEval(source, outer_info, context, language_mode,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400401 restriction, scope_position),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402 isolate->heap()->exception());
403 return *compiled;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400404}
405
406
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000407RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400408 HandleScope scope(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000409 DCHECK(args.length() == 5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400410
411 Handle<Object> callee = args.at<Object>(0);
412
413 // If "eval" didn't refer to the original GlobalEval, it's not a
414 // direct call to eval.
415 // (And even if it is, but the first argument isn't a string, just let
416 // execution default to an indirect call to eval, which will also return
417 // the first argument without doing anything).
418 if (*callee != isolate->native_context()->global_eval_fun() ||
419 !args[1]->IsString()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000420 return *callee;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400421 }
422
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000423 DCHECK(args[3]->IsSmi());
424 DCHECK(is_valid_language_mode(args.smi_at(3)));
425 LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400426 DCHECK(args[4]->IsSmi());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400427 Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
428 isolate);
429 return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000430 language_mode, args.smi_at(4));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400431}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000432} // namespace internal
433} // namespace v8