blob: 89a6fa15d243f2ba3b8dd62ff7206220afa3cbe0 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/runtime/runtime-utils.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006
7#include "src/arguments.h"
8#include "src/compiler.h"
9#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/frames-inl.h"
11#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/isolate-inl.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/messages.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014#include "src/v8threads.h"
15#include "src/vm-state-inl.h"
16
17namespace v8 {
18namespace internal {
19
20RUNTIME_FUNCTION(Runtime_CompileLazy) {
21 HandleScope scope(isolate);
22 DCHECK(args.length() == 1);
23 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
Ben Murdochda12d292016-06-02 14:46:10 +010024
Emily Bernierd0a1eb72015-03-24 16:35:39 -040025#ifdef DEBUG
26 if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
27 PrintF("[unoptimized: ");
28 function->PrintName();
29 PrintF("]\n");
30 }
31#endif
Ben Murdochda12d292016-06-02 14:46:10 +010032
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 StackLimitCheck check(isolate);
34 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
Ben Murdochda12d292016-06-02 14:46:10 +010035 if (!Compiler::Compile(function, Compiler::KEEP_EXCEPTION)) {
36 return isolate->heap()->exception();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040037 }
Ben Murdochda12d292016-06-02 14:46:10 +010038 DCHECK(function->is_compiled());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 return function->code();
40}
41
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042
43RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
44 HandleScope scope(isolate);
45 DCHECK(args.length() == 1);
46 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
Ben Murdochda12d292016-06-02 14:46:10 +010047 StackLimitCheck check(isolate);
48 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
49 if (!Compiler::CompileOptimized(function, Compiler::CONCURRENT)) {
50 return isolate->heap()->exception();
51 }
52 DCHECK(function->is_compiled());
53 return function->code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054}
55
56
57RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
58 HandleScope scope(isolate);
59 DCHECK(args.length() == 1);
60 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
Ben Murdochda12d292016-06-02 14:46:10 +010061 StackLimitCheck check(isolate);
62 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
63 if (!Compiler::CompileOptimized(function, Compiler::NOT_CONCURRENT)) {
64 return isolate->heap()->exception();
65 }
66 DCHECK(function->is_compiled());
67 return function->code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068}
69
Emily Bernierd0a1eb72015-03-24 16:35:39 -040070
71RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
72 HandleScope scope(isolate);
73 DCHECK(args.length() == 0);
74 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
75 DCHECK(AllowHeapAllocation::IsAllowed());
76 delete deoptimizer;
77 return isolate->heap()->undefined_value();
78}
79
80
81class ActivationsFinder : public ThreadVisitor {
82 public:
83 Code* code_;
84 bool has_code_activations_;
85
86 explicit ActivationsFinder(Code* code)
87 : code_(code), has_code_activations_(false) {}
88
89 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
90 JavaScriptFrameIterator it(isolate, top);
91 VisitFrames(&it);
92 }
93
94 void VisitFrames(JavaScriptFrameIterator* it) {
95 for (; !it->done(); it->Advance()) {
96 JavaScriptFrame* frame = it->frame();
97 if (code_->contains(frame->pc())) has_code_activations_ = true;
98 }
99 }
100};
101
102
103RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
104 HandleScope scope(isolate);
105 DCHECK(args.length() == 1);
106 CONVERT_SMI_ARG_CHECKED(type_arg, 0);
107 Deoptimizer::BailoutType type =
108 static_cast<Deoptimizer::BailoutType>(type_arg);
109 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
110 DCHECK(AllowHeapAllocation::IsAllowed());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100111 TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
112 TRACE_EVENT0("v8", "V8.DeoptimizeCode");
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400113
114 Handle<JSFunction> function = deoptimizer->function();
115 Handle<Code> optimized_code = deoptimizer->compiled_code();
116
117 DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
118 DCHECK(type == deoptimizer->bailout_type());
119
120 // Make sure to materialize objects before causing any allocation.
121 JavaScriptFrameIterator it(isolate);
122 deoptimizer->MaterializeHeapObjects(&it);
123 delete deoptimizer;
124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000125 // Ensure the context register is updated for materialized objects.
126 JavaScriptFrameIterator top_it(isolate);
127 JavaScriptFrame* top_frame = top_it.frame();
128 isolate->set_context(Context::cast(top_frame->context()));
129
130 if (type == Deoptimizer::LAZY) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400131 return isolate->heap()->undefined_value();
132 }
133
Ben Murdochda12d292016-06-02 14:46:10 +0100134 // Search for other activations of the same optimized code.
135 // At this point {it} is at the topmost frame of all the frames materialized
136 // by the deoptimizer. Note that this frame does not necessarily represent
137 // an activation of {function} because of potential inlined tail-calls.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400138 ActivationsFinder activations_finder(*optimized_code);
139 activations_finder.VisitFrames(&it);
140 isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
141
142 if (!activations_finder.has_code_activations_) {
143 if (function->code() == *optimized_code) {
144 if (FLAG_trace_deopt) {
145 PrintF("[removing optimized code for: ");
146 function->PrintName();
147 PrintF("]\n");
148 }
149 function->ReplaceCode(function->shared()->code());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400150 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151 // Evict optimized code for this function from the cache so that it
152 // doesn't get used for new closures.
153 function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
154 "notify deoptimized");
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400155 } else {
156 // TODO(titzer): we should probably do DeoptimizeCodeList(code)
157 // unconditionally if the code is not already marked for deoptimization.
158 // If there is an index by shared function info, all the better.
159 Deoptimizer::DeoptimizeFunction(*function);
160 }
161
162 return isolate->heap()->undefined_value();
163}
164
165
166static bool IsSuitableForOnStackReplacement(Isolate* isolate,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000167 Handle<JSFunction> function) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400168 // Keep track of whether we've succeeded in optimizing.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 if (function->shared()->optimization_disabled()) return false;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400170 // If we are trying to do OSR when there are already optimized
171 // activations of the function, it means (a) the function is directly or
172 // indirectly recursive and (b) an optimized invocation has been
173 // deoptimized so that we are currently in an unoptimized activation.
174 // Check for optimized activations of this function.
175 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
176 JavaScriptFrame* frame = it.frame();
177 if (frame->is_optimized() && frame->function() == *function) return false;
178 }
179
180 return true;
181}
182
183
184RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
185 HandleScope scope(isolate);
186 DCHECK(args.length() == 1);
187 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
188 Handle<Code> caller_code(function->shared()->code());
189
190 // We're not prepared to handle a function with arguments object.
191 DCHECK(!function->shared()->uses_arguments());
192
193 RUNTIME_ASSERT(FLAG_use_osr);
194
195 // Passing the PC in the javascript frame from the caller directly is
196 // not GC safe, so we walk the stack to get it.
197 JavaScriptFrameIterator it(isolate);
198 JavaScriptFrame* frame = it.frame();
199 if (!caller_code->contains(frame->pc())) {
200 // Code on the stack may not be the code object referenced by the shared
201 // function info. It may have been replaced to include deoptimization data.
202 caller_code = Handle<Code>(frame->LookupCode());
203 }
204
205 uint32_t pc_offset =
206 static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
207
208#ifdef DEBUG
209 DCHECK_EQ(frame->function(), *function);
210 DCHECK_EQ(frame->LookupCode(), *caller_code);
211 DCHECK(caller_code->contains(frame->pc()));
212#endif // DEBUG
213
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400214 BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
215 DCHECK(!ast_id.IsNone());
216
Ben Murdoch097c5b22016-05-18 11:27:45 +0100217 MaybeHandle<Code> maybe_result;
Ben Murdochda12d292016-06-02 14:46:10 +0100218 if (IsSuitableForOnStackReplacement(isolate, function)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400219 if (FLAG_trace_osr) {
220 PrintF("[OSR - Compiling: ");
221 function->PrintName();
222 PrintF(" at AST id %d]\n", ast_id.ToInt());
223 }
Ben Murdochda12d292016-06-02 14:46:10 +0100224 maybe_result = Compiler::GetOptimizedCodeForOSR(function, ast_id, frame);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400225 }
226
227 // Revert the patched back edge table, regardless of whether OSR succeeds.
228 BackEdgeTable::Revert(isolate, *caller_code);
229
230 // Check whether we ended up with usable optimized code.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100231 Handle<Code> result;
232 if (maybe_result.ToHandle(&result) &&
233 result->kind() == Code::OPTIMIZED_FUNCTION) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400234 DeoptimizationInputData* data =
235 DeoptimizationInputData::cast(result->deoptimization_data());
236
237 if (data->OsrPcOffset()->value() >= 0) {
238 DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
239 if (FLAG_trace_osr) {
240 PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
241 ast_id.ToInt(), data->OsrPcOffset()->value());
242 }
243 // TODO(titzer): this is a massive hack to make the deopt counts
244 // match. Fix heuristics for reenabling optimizations!
245 function->shared()->increment_deopt_count();
246
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247 if (result->is_turbofanned()) {
248 // TurboFanned OSR code cannot be installed into the function.
249 // But the function is obviously hot, so optimize it next time.
250 function->ReplaceCode(
251 isolate->builtins()->builtin(Builtins::kCompileOptimized));
252 } else {
253 // Crankshafted OSR code can be installed into the function.
254 function->ReplaceCode(*result);
255 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400256 return *result;
257 }
258 }
259
260 // Failed.
261 if (FLAG_trace_osr) {
262 PrintF("[OSR - Failed: ");
263 function->PrintName();
264 PrintF(" at AST id %d]\n", ast_id.ToInt());
265 }
266
267 if (!function->IsOptimized()) {
268 function->ReplaceCode(function->shared()->code());
269 }
270 return NULL;
271}
272
273
274RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
275 HandleScope scope(isolate);
276 DCHECK(args.length() == 1);
277 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
278
279 // First check if this is a real stack overflow.
280 StackLimitCheck check(isolate);
281 if (check.JsHasOverflowed()) {
282 SealHandleScope shs(isolate);
283 return isolate->StackOverflow();
284 }
285
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400287 return (function->IsOptimized()) ? function->code()
288 : function->shared()->code();
289}
290
291
292bool CodeGenerationFromStringsAllowed(Isolate* isolate,
293 Handle<Context> context) {
294 DCHECK(context->allow_code_gen_from_strings()->IsFalse());
295 // Check with callback if set.
296 AllowCodeGenerationFromStringsCallback callback =
297 isolate->allow_code_gen_callback();
298 if (callback == NULL) {
299 // No callback set and code generation disallowed.
300 return false;
301 } else {
302 // Callback set. Let it decide if code generation is allowed.
303 VMState<EXTERNAL> state(isolate);
304 return callback(v8::Utils::ToLocal(context));
305 }
306}
307
308
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000309static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
310 Handle<SharedFunctionInfo> outer_info,
311 LanguageMode language_mode,
312 int scope_position) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400313 Handle<Context> context = Handle<Context>(isolate->context());
314 Handle<Context> native_context = Handle<Context>(context->native_context());
315
316 // Check if native context allows code generation from
317 // strings. Throw an exception if it doesn't.
318 if (native_context->allow_code_gen_from_strings()->IsFalse() &&
319 !CodeGenerationFromStringsAllowed(isolate, native_context)) {
320 Handle<Object> error_message =
321 native_context->ErrorMessageForCodeGenerationFromStrings();
322 Handle<Object> error;
323 MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 MessageTemplate::kCodeGenFromStrings, error_message);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400325 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 return isolate->heap()->exception();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400327 }
328
329 // Deal with a normal eval call with a string argument. Compile it
330 // and return the compiled function bound in the local context.
331 static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
332 Handle<JSFunction> compiled;
333 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
334 isolate, compiled,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000335 Compiler::GetFunctionFromEval(source, outer_info, context, language_mode,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400336 restriction, scope_position),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 isolate->heap()->exception());
338 return *compiled;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400339}
340
341
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000342RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400343 HandleScope scope(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 DCHECK(args.length() == 5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400345
346 Handle<Object> callee = args.at<Object>(0);
347
348 // If "eval" didn't refer to the original GlobalEval, it's not a
349 // direct call to eval.
350 // (And even if it is, but the first argument isn't a string, just let
351 // execution default to an indirect call to eval, which will also return
352 // the first argument without doing anything).
353 if (*callee != isolate->native_context()->global_eval_fun() ||
354 !args[1]->IsString()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000355 return *callee;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400356 }
357
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000358 DCHECK(args[3]->IsSmi());
359 DCHECK(is_valid_language_mode(args.smi_at(3)));
360 LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400361 DCHECK(args[4]->IsSmi());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400362 Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
363 isolate);
364 return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000365 language_mode, args.smi_at(4));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400366}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000367} // namespace internal
368} // namespace v8