blob: ebd0c13f0f9b1ef2e2bb44d5bfcb4d017d5e041c [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/arguments.h"
8#include "src/compiler.h"
9#include "src/deoptimizer.h"
10#include "src/frames.h"
11#include "src/full-codegen.h"
12#include "src/isolate-inl.h"
13#include "src/runtime/runtime-utils.h"
14#include "src/v8threads.h"
15#include "src/vm-state-inl.h"
16
17namespace v8 {
18namespace internal {
19
20RUNTIME_FUNCTION(Runtime_CompileLazy) {
21 HandleScope scope(isolate);
22 DCHECK(args.length() == 1);
23 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
24#ifdef DEBUG
25 if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
26 PrintF("[unoptimized: ");
27 function->PrintName();
28 PrintF("]\n");
29 }
30#endif
31
32 // Compile the target function.
33 DCHECK(function->shared()->allows_lazy_compilation());
34
35 Handle<Code> code;
36 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code,
37 Compiler::GetLazyCode(function));
38 DCHECK(code->kind() == Code::FUNCTION ||
39 code->kind() == Code::OPTIMIZED_FUNCTION);
40 function->ReplaceCode(*code);
41 return *code;
42}
43
44
45RUNTIME_FUNCTION(Runtime_CompileOptimized) {
46 HandleScope scope(isolate);
47 DCHECK(args.length() == 2);
48 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
49 CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1);
50 DCHECK(isolate->use_crankshaft());
51
52 Handle<Code> unoptimized(function->shared()->code());
53 if (function->shared()->optimization_disabled() ||
54 isolate->DebuggerHasBreakPoints()) {
55 // If the function is not optimizable or debugger is active continue
56 // using the code from the full compiler.
57 if (FLAG_trace_opt) {
58 PrintF("[failed to optimize ");
59 function->PrintName();
60 PrintF(": is code optimizable: %s, is debugger enabled: %s]\n",
61 function->shared()->optimization_disabled() ? "F" : "T",
62 isolate->DebuggerHasBreakPoints() ? "T" : "F");
63 }
64 function->ReplaceCode(*unoptimized);
65 return function->code();
66 }
67
68 Compiler::ConcurrencyMode mode =
69 concurrent ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT;
70 Handle<Code> code;
71 if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) {
72 function->ReplaceCode(*code);
73 } else {
74 function->ReplaceCode(function->shared()->code());
75 }
76
77 DCHECK(function->code()->kind() == Code::FUNCTION ||
78 function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
79 function->IsInOptimizationQueue());
80 return function->code();
81}
82
83
84RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
85 HandleScope scope(isolate);
86 DCHECK(args.length() == 0);
87 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
88 DCHECK(AllowHeapAllocation::IsAllowed());
89 delete deoptimizer;
90 return isolate->heap()->undefined_value();
91}
92
93
94class ActivationsFinder : public ThreadVisitor {
95 public:
96 Code* code_;
97 bool has_code_activations_;
98
99 explicit ActivationsFinder(Code* code)
100 : code_(code), has_code_activations_(false) {}
101
102 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
103 JavaScriptFrameIterator it(isolate, top);
104 VisitFrames(&it);
105 }
106
107 void VisitFrames(JavaScriptFrameIterator* it) {
108 for (; !it->done(); it->Advance()) {
109 JavaScriptFrame* frame = it->frame();
110 if (code_->contains(frame->pc())) has_code_activations_ = true;
111 }
112 }
113};
114
115
116RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
117 HandleScope scope(isolate);
118 DCHECK(args.length() == 1);
119 CONVERT_SMI_ARG_CHECKED(type_arg, 0);
120 Deoptimizer::BailoutType type =
121 static_cast<Deoptimizer::BailoutType>(type_arg);
122 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
123 DCHECK(AllowHeapAllocation::IsAllowed());
124
125 Handle<JSFunction> function = deoptimizer->function();
126 Handle<Code> optimized_code = deoptimizer->compiled_code();
127
128 DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
129 DCHECK(type == deoptimizer->bailout_type());
130
131 // Make sure to materialize objects before causing any allocation.
132 JavaScriptFrameIterator it(isolate);
133 deoptimizer->MaterializeHeapObjects(&it);
134 delete deoptimizer;
135
136 JavaScriptFrame* frame = it.frame();
137 RUNTIME_ASSERT(frame->function()->IsJSFunction());
138 DCHECK(frame->function() == *function);
139
140 // Avoid doing too much work when running with --always-opt and keep
141 // the optimized code around.
142 if (FLAG_always_opt || type == Deoptimizer::LAZY) {
143 return isolate->heap()->undefined_value();
144 }
145
146 // Search for other activations of the same function and code.
147 ActivationsFinder activations_finder(*optimized_code);
148 activations_finder.VisitFrames(&it);
149 isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
150
151 if (!activations_finder.has_code_activations_) {
152 if (function->code() == *optimized_code) {
153 if (FLAG_trace_deopt) {
154 PrintF("[removing optimized code for: ");
155 function->PrintName();
156 PrintF("]\n");
157 }
158 function->ReplaceCode(function->shared()->code());
159 // Evict optimized code for this function from the cache so that it
160 // doesn't get used for new closures.
161 function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
162 "notify deoptimized");
163 }
164 } else {
165 // TODO(titzer): we should probably do DeoptimizeCodeList(code)
166 // unconditionally if the code is not already marked for deoptimization.
167 // If there is an index by shared function info, all the better.
168 Deoptimizer::DeoptimizeFunction(*function);
169 }
170
171 return isolate->heap()->undefined_value();
172}
173
174
175static bool IsSuitableForOnStackReplacement(Isolate* isolate,
176 Handle<JSFunction> function,
177 Handle<Code> current_code) {
178 // Keep track of whether we've succeeded in optimizing.
179 if (!current_code->optimizable()) return false;
180 // If we are trying to do OSR when there are already optimized
181 // activations of the function, it means (a) the function is directly or
182 // indirectly recursive and (b) an optimized invocation has been
183 // deoptimized so that we are currently in an unoptimized activation.
184 // Check for optimized activations of this function.
185 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
186 JavaScriptFrame* frame = it.frame();
187 if (frame->is_optimized() && frame->function() == *function) return false;
188 }
189
190 return true;
191}
192
193
194RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
195 HandleScope scope(isolate);
196 DCHECK(args.length() == 1);
197 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
198 Handle<Code> caller_code(function->shared()->code());
199
200 // We're not prepared to handle a function with arguments object.
201 DCHECK(!function->shared()->uses_arguments());
202
203 RUNTIME_ASSERT(FLAG_use_osr);
204
205 // Passing the PC in the javascript frame from the caller directly is
206 // not GC safe, so we walk the stack to get it.
207 JavaScriptFrameIterator it(isolate);
208 JavaScriptFrame* frame = it.frame();
209 if (!caller_code->contains(frame->pc())) {
210 // Code on the stack may not be the code object referenced by the shared
211 // function info. It may have been replaced to include deoptimization data.
212 caller_code = Handle<Code>(frame->LookupCode());
213 }
214
215 uint32_t pc_offset =
216 static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
217
218#ifdef DEBUG
219 DCHECK_EQ(frame->function(), *function);
220 DCHECK_EQ(frame->LookupCode(), *caller_code);
221 DCHECK(caller_code->contains(frame->pc()));
222#endif // DEBUG
223
224
225 BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
226 DCHECK(!ast_id.IsNone());
227
228 Compiler::ConcurrencyMode mode =
229 isolate->concurrent_osr_enabled() &&
230 (function->shared()->ast_node_count() > 512)
231 ? Compiler::CONCURRENT
232 : Compiler::NOT_CONCURRENT;
233 Handle<Code> result = Handle<Code>::null();
234
235 OptimizedCompileJob* job = NULL;
236 if (mode == Compiler::CONCURRENT) {
237 // Gate the OSR entry with a stack check.
238 BackEdgeTable::AddStackCheck(caller_code, pc_offset);
239 // Poll already queued compilation jobs.
240 OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread();
241 if (thread->IsQueuedForOSR(function, ast_id)) {
242 if (FLAG_trace_osr) {
243 PrintF("[OSR - Still waiting for queued: ");
244 function->PrintName();
245 PrintF(" at AST id %d]\n", ast_id.ToInt());
246 }
247 return NULL;
248 }
249
250 job = thread->FindReadyOSRCandidate(function, ast_id);
251 }
252
253 if (job != NULL) {
254 if (FLAG_trace_osr) {
255 PrintF("[OSR - Found ready: ");
256 function->PrintName();
257 PrintF(" at AST id %d]\n", ast_id.ToInt());
258 }
259 result = Compiler::GetConcurrentlyOptimizedCode(job);
260 } else if (IsSuitableForOnStackReplacement(isolate, function, caller_code)) {
261 if (FLAG_trace_osr) {
262 PrintF("[OSR - Compiling: ");
263 function->PrintName();
264 PrintF(" at AST id %d]\n", ast_id.ToInt());
265 }
266 MaybeHandle<Code> maybe_result =
267 Compiler::GetOptimizedCode(function, caller_code, mode, ast_id);
268 if (maybe_result.ToHandle(&result) &&
269 result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
270 // Optimization is queued. Return to check later.
271 return NULL;
272 }
273 }
274
275 // Revert the patched back edge table, regardless of whether OSR succeeds.
276 BackEdgeTable::Revert(isolate, *caller_code);
277
278 // Check whether we ended up with usable optimized code.
279 if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
280 DeoptimizationInputData* data =
281 DeoptimizationInputData::cast(result->deoptimization_data());
282
283 if (data->OsrPcOffset()->value() >= 0) {
284 DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
285 if (FLAG_trace_osr) {
286 PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
287 ast_id.ToInt(), data->OsrPcOffset()->value());
288 }
289 // TODO(titzer): this is a massive hack to make the deopt counts
290 // match. Fix heuristics for reenabling optimizations!
291 function->shared()->increment_deopt_count();
292
293 // TODO(titzer): Do not install code into the function.
294 function->ReplaceCode(*result);
295 return *result;
296 }
297 }
298
299 // Failed.
300 if (FLAG_trace_osr) {
301 PrintF("[OSR - Failed: ");
302 function->PrintName();
303 PrintF(" at AST id %d]\n", ast_id.ToInt());
304 }
305
306 if (!function->IsOptimized()) {
307 function->ReplaceCode(function->shared()->code());
308 }
309 return NULL;
310}
311
312
313RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
314 HandleScope scope(isolate);
315 DCHECK(args.length() == 1);
316 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
317
318 // First check if this is a real stack overflow.
319 StackLimitCheck check(isolate);
320 if (check.JsHasOverflowed()) {
321 SealHandleScope shs(isolate);
322 return isolate->StackOverflow();
323 }
324
325 isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
326 return (function->IsOptimized()) ? function->code()
327 : function->shared()->code();
328}
329
330
331bool CodeGenerationFromStringsAllowed(Isolate* isolate,
332 Handle<Context> context) {
333 DCHECK(context->allow_code_gen_from_strings()->IsFalse());
334 // Check with callback if set.
335 AllowCodeGenerationFromStringsCallback callback =
336 isolate->allow_code_gen_callback();
337 if (callback == NULL) {
338 // No callback set and code generation disallowed.
339 return false;
340 } else {
341 // Callback set. Let it decide if code generation is allowed.
342 VMState<EXTERNAL> state(isolate);
343 return callback(v8::Utils::ToLocal(context));
344 }
345}
346
347
348RUNTIME_FUNCTION(Runtime_CompileString) {
349 HandleScope scope(isolate);
350 DCHECK(args.length() == 3);
351 CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
352 CONVERT_BOOLEAN_ARG_CHECKED(function_literal_only, 1);
353 CONVERT_SMI_ARG_CHECKED(source_offset, 2);
354
355 // Extract native context.
356 Handle<Context> context(isolate->native_context());
357
358 // Check if native context allows code generation from
359 // strings. Throw an exception if it doesn't.
360 if (context->allow_code_gen_from_strings()->IsFalse() &&
361 !CodeGenerationFromStringsAllowed(isolate, context)) {
362 Handle<Object> error_message =
363 context->ErrorMessageForCodeGenerationFromStrings();
364 THROW_NEW_ERROR_RETURN_FAILURE(
365 isolate, NewEvalError("code_gen_from_strings",
366 HandleVector<Object>(&error_message, 1)));
367 }
368
369 // Compile source string in the native context.
370 ParseRestriction restriction = function_literal_only
371 ? ONLY_SINGLE_FUNCTION_LITERAL
372 : NO_PARSE_RESTRICTION;
373 Handle<SharedFunctionInfo> outer_info(context->closure()->shared(), isolate);
374 Handle<JSFunction> fun;
375 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
376 isolate, fun,
377 Compiler::GetFunctionFromEval(source, outer_info, context, SLOPPY,
378 restriction, RelocInfo::kNoPosition));
379 if (function_literal_only) {
380 // The actual body is wrapped, which shifts line numbers.
381 Handle<Script> script(Script::cast(fun->shared()->script()), isolate);
382 if (script->line_offset() == 0) {
383 int line_num = Script::GetLineNumber(script, source_offset);
384 script->set_line_offset(Smi::FromInt(-line_num));
385 }
386 }
387 return *fun;
388}
389
390
391static ObjectPair CompileGlobalEval(Isolate* isolate, Handle<String> source,
392 Handle<SharedFunctionInfo> outer_info,
393 Handle<Object> receiver,
394 StrictMode strict_mode,
395 int scope_position) {
396 Handle<Context> context = Handle<Context>(isolate->context());
397 Handle<Context> native_context = Handle<Context>(context->native_context());
398
399 // Check if native context allows code generation from
400 // strings. Throw an exception if it doesn't.
401 if (native_context->allow_code_gen_from_strings()->IsFalse() &&
402 !CodeGenerationFromStringsAllowed(isolate, native_context)) {
403 Handle<Object> error_message =
404 native_context->ErrorMessageForCodeGenerationFromStrings();
405 Handle<Object> error;
406 MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
407 "code_gen_from_strings", HandleVector<Object>(&error_message, 1));
408 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
409 return MakePair(isolate->heap()->exception(), NULL);
410 }
411
412 // Deal with a normal eval call with a string argument. Compile it
413 // and return the compiled function bound in the local context.
414 static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
415 Handle<JSFunction> compiled;
416 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
417 isolate, compiled,
418 Compiler::GetFunctionFromEval(source, outer_info, context, strict_mode,
419 restriction, scope_position),
420 MakePair(isolate->heap()->exception(), NULL));
421 return MakePair(*compiled, *receiver);
422}
423
424
425RUNTIME_FUNCTION_RETURN_PAIR(Runtime_ResolvePossiblyDirectEval) {
426 HandleScope scope(isolate);
427 DCHECK(args.length() == 6);
428
429 Handle<Object> callee = args.at<Object>(0);
430
431 // If "eval" didn't refer to the original GlobalEval, it's not a
432 // direct call to eval.
433 // (And even if it is, but the first argument isn't a string, just let
434 // execution default to an indirect call to eval, which will also return
435 // the first argument without doing anything).
436 if (*callee != isolate->native_context()->global_eval_fun() ||
437 !args[1]->IsString()) {
438 return MakePair(*callee, isolate->heap()->undefined_value());
439 }
440
441 DCHECK(args[4]->IsSmi());
442 DCHECK(args.smi_at(4) == SLOPPY || args.smi_at(4) == STRICT);
443 StrictMode strict_mode = static_cast<StrictMode>(args.smi_at(4));
444 DCHECK(args[5]->IsSmi());
445 Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
446 isolate);
447 return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
448 args.at<Object>(3), strict_mode, args.smi_at(5));
449}
450}
451} // namespace v8::internal