blob: 69aa497f8c31e30b5df9012a09738fe04a85f5a5 [file] [log] [blame]
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_DEOPTIMIZER_H_
29#define V8_DEOPTIMIZER_H_
30
31#include "v8.h"
32
33#include "macro-assembler.h"
34#include "zone-inl.h"
35
36
37namespace v8 {
38namespace internal {
39
40class FrameDescription;
41class TranslationIterator;
42class DeoptimizingCodeListNode;
43
44
45class ValueDescription BASE_EMBEDDED {
46 public:
47 explicit ValueDescription(int index) : stack_index_(index) { }
48 int stack_index() const { return stack_index_; }
49
50 private:
51 // Offset relative to the top of the stack.
52 int stack_index_;
53};
54
55
56class ValueDescriptionInteger32: public ValueDescription {
57 public:
58 ValueDescriptionInteger32(int index, int32_t value)
59 : ValueDescription(index), int32_value_(value) { }
60 int32_t int32_value() const { return int32_value_; }
61
62 private:
63 // Raw value.
64 int32_t int32_value_;
65};
66
67
68class ValueDescriptionDouble: public ValueDescription {
69 public:
70 ValueDescriptionDouble(int index, double value)
71 : ValueDescription(index), double_value_(value) { }
72 double double_value() const { return double_value_; }
73
74 private:
75 // Raw value.
76 double double_value_;
77};
78
79
80class OptimizedFunctionVisitor BASE_EMBEDDED {
81 public:
82 virtual ~OptimizedFunctionVisitor() {}
83
84 // Function which is called before iteration of any optimized functions
85 // from given global context.
86 virtual void EnterContext(Context* context) = 0;
87
88 virtual void VisitFunction(JSFunction* function) = 0;
89
90 // Function which is called after iteration of all optimized functions
91 // from given global context.
92 virtual void LeaveContext(Context* context) = 0;
93};
94
95
96class Deoptimizer : public Malloced {
97 public:
98 enum BailoutType {
99 EAGER,
100 LAZY,
101 OSR
102 };
103
104 int output_count() const { return output_count_; }
105
106 static Deoptimizer* New(JSFunction* function,
107 BailoutType type,
108 unsigned bailout_id,
109 Address from,
110 int fp_to_sp_delta);
111 static Deoptimizer* Grab();
112
113 // Deoptimize the function now. Its current optimized code will never be run
114 // again and any activations of the optimized code will get deoptimized when
115 // execution returns.
116 static void DeoptimizeFunction(JSFunction* function);
117
118 // Deoptimize all functions in the heap.
119 static void DeoptimizeAll();
120
121 static void DeoptimizeGlobalObject(JSObject* object);
122
123 static void VisitAllOptimizedFunctionsForContext(
124 Context* context, OptimizedFunctionVisitor* visitor);
125
126 static void VisitAllOptimizedFunctionsForGlobalObject(
127 JSObject* object, OptimizedFunctionVisitor* visitor);
128
129 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
130
ager@chromium.org0ee099b2011-01-25 14:06:47 +0000131 // Patch all stack guard checks in the unoptimized code to
132 // unconditionally call replacement_code.
133 static void PatchStackCheckCode(Code* unoptimized_code,
134 Code* check_code,
135 Code* replacement_code);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000136
ager@chromium.org0ee099b2011-01-25 14:06:47 +0000137 // Change all patched stack guard checks in the unoptimized code
138 // back to a normal stack guard check.
139 static void RevertStackCheckCode(Code* unoptimized_code,
140 Code* check_code,
141 Code* replacement_code);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000142
143 ~Deoptimizer();
144
145 void InsertHeapNumberValues(int index, JavaScriptFrame* frame);
146
147 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
148
149 static Address GetDeoptimizationEntry(int id, BailoutType type);
150 static int GetDeoptimizationId(Address addr, BailoutType type);
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000151 static int GetOutputInfo(DeoptimizationOutputData* data,
152 unsigned node_id,
153 SharedFunctionInfo* shared);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000154
155 static void Setup();
156 static void TearDown();
157
158 // Code generation support.
159 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
160 static int output_count_offset() {
161 return OFFSET_OF(Deoptimizer, output_count_);
162 }
163 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
164
165 static int GetDeoptimizedCodeCount();
166
167 static const int kNotDeoptimizationEntry = -1;
168
169 // Generators for the deoptimization entry code.
170 class EntryGenerator BASE_EMBEDDED {
171 public:
172 EntryGenerator(MacroAssembler* masm, BailoutType type)
173 : masm_(masm), type_(type) { }
174 virtual ~EntryGenerator() { }
175
176 void Generate();
177
178 protected:
179 MacroAssembler* masm() const { return masm_; }
180 BailoutType type() const { return type_; }
181
182 virtual void GeneratePrologue() { }
183
184 private:
185 MacroAssembler* masm_;
186 Deoptimizer::BailoutType type_;
187 };
188
189 class TableEntryGenerator : public EntryGenerator {
190 public:
191 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
192 : EntryGenerator(masm, type), count_(count) { }
193
194 protected:
195 virtual void GeneratePrologue();
196
197 private:
198 int count() const { return count_; }
199
200 int count_;
201 };
202
203 private:
204 static const int kNumberOfEntries = 4096;
205
206 Deoptimizer(JSFunction* function,
207 BailoutType type,
208 unsigned bailout_id,
209 Address from,
210 int fp_to_sp_delta);
211 void DeleteFrameDescriptions();
212
213 void DoComputeOutputFrames();
214 void DoComputeOsrOutputFrame();
215 void DoComputeFrame(TranslationIterator* iterator, int frame_index);
216 void DoTranslateCommand(TranslationIterator* iterator,
217 int frame_index,
218 unsigned output_offset);
219 // Translate a command for OSR. Updates the input offset to be used for
220 // the next command. Returns false if translation of the command failed
221 // (e.g., a number conversion failed) and may or may not have updated the
222 // input offset.
223 bool DoOsrTranslateCommand(TranslationIterator* iterator,
224 int* input_offset);
225
226 unsigned ComputeInputFrameSize() const;
227 unsigned ComputeFixedSize(JSFunction* function) const;
228
229 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
230 unsigned ComputeOutgoingArgumentSize() const;
231
232 Object* ComputeLiteral(int index) const;
233
234 void InsertHeapNumberValue(JavaScriptFrame* frame,
235 int stack_index,
236 double val,
237 int extra_slot_count);
238
239 void AddInteger32Value(int frame_index, int slot_index, int32_t value);
240 void AddDoubleValue(int frame_index, int slot_index, double value);
241
242 static LargeObjectChunk* CreateCode(BailoutType type);
243 static void GenerateDeoptimizationEntries(
244 MacroAssembler* masm, int count, BailoutType type);
245
246 // Weak handle callback for deoptimizing code objects.
247 static void HandleWeakDeoptimizedCode(
248 v8::Persistent<v8::Value> obj, void* data);
249 static Code* FindDeoptimizingCodeFromAddress(Address addr);
250 static void RemoveDeoptimizingCode(Code* code);
251
252 static LargeObjectChunk* eager_deoptimization_entry_code_;
253 static LargeObjectChunk* lazy_deoptimization_entry_code_;
254 static Deoptimizer* current_;
255
256 // List of deoptimized code which still have references from active stack
257 // frames. These code objects are needed by the deoptimizer when deoptimizing
258 // a frame for which the code object for the function function has been
259 // changed from the code present when deoptimizing was done.
260 static DeoptimizingCodeListNode* deoptimizing_code_list_;
261
262 JSFunction* function_;
263 Code* optimized_code_;
264 unsigned bailout_id_;
265 BailoutType bailout_type_;
266 Address from_;
267 int fp_to_sp_delta_;
268
269 // Input frame description.
270 FrameDescription* input_;
271 // Number of output frames.
272 int output_count_;
273 // Array of output frame descriptions.
274 FrameDescription** output_;
275
276 List<ValueDescriptionInteger32>* integer32_values_;
277 List<ValueDescriptionDouble>* double_values_;
278
279 static int table_entry_size_;
280
281 friend class FrameDescription;
282 friend class DeoptimizingCodeListNode;
283};
284
285
286class FrameDescription {
287 public:
288 FrameDescription(uint32_t frame_size,
289 JSFunction* function);
290
291 void* operator new(size_t size, uint32_t frame_size) {
292 return malloc(size + frame_size);
293 }
294
295 void operator delete(void* description) {
296 free(description);
297 }
298
299 intptr_t GetFrameSize() const { return frame_size_; }
300
301 JSFunction* GetFunction() const { return function_; }
302
303 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
304
305 intptr_t GetFrameSlot(unsigned offset) {
306 return *GetFrameSlotPointer(offset);
307 }
308
309 double GetDoubleFrameSlot(unsigned offset) {
310 return *reinterpret_cast<double*>(GetFrameSlotPointer(offset));
311 }
312
313 void SetFrameSlot(unsigned offset, intptr_t value) {
314 *GetFrameSlotPointer(offset) = value;
315 }
316
317 intptr_t GetRegister(unsigned n) const {
318 ASSERT(n < ARRAY_SIZE(registers_));
319 return registers_[n];
320 }
321
322 double GetDoubleRegister(unsigned n) const {
323 ASSERT(n < ARRAY_SIZE(double_registers_));
324 return double_registers_[n];
325 }
326
327 void SetRegister(unsigned n, intptr_t value) {
328 ASSERT(n < ARRAY_SIZE(registers_));
329 registers_[n] = value;
330 }
331
332 void SetDoubleRegister(unsigned n, double value) {
333 ASSERT(n < ARRAY_SIZE(double_registers_));
334 double_registers_[n] = value;
335 }
336
337 intptr_t GetTop() const { return top_; }
338 void SetTop(intptr_t top) { top_ = top; }
339
340 intptr_t GetPc() const { return pc_; }
341 void SetPc(intptr_t pc) { pc_ = pc; }
342
343 intptr_t GetFp() const { return fp_; }
344 void SetFp(intptr_t fp) { fp_ = fp; }
345
346 Smi* GetState() const { return state_; }
347 void SetState(Smi* state) { state_ = state; }
348
349 void SetContinuation(intptr_t pc) { continuation_ = pc; }
350
351 static int registers_offset() {
352 return OFFSET_OF(FrameDescription, registers_);
353 }
354
355 static int double_registers_offset() {
356 return OFFSET_OF(FrameDescription, double_registers_);
357 }
358
359 static int frame_size_offset() {
360 return OFFSET_OF(FrameDescription, frame_size_);
361 }
362
363 static int pc_offset() {
364 return OFFSET_OF(FrameDescription, pc_);
365 }
366
367 static int state_offset() {
368 return OFFSET_OF(FrameDescription, state_);
369 }
370
371 static int continuation_offset() {
372 return OFFSET_OF(FrameDescription, continuation_);
373 }
374
375 static int frame_content_offset() {
376 return sizeof(FrameDescription);
377 }
378
379 private:
380 static const uint32_t kZapUint32 = 0xbeeddead;
381
382 uintptr_t frame_size_; // Number of bytes.
383 JSFunction* function_;
384 intptr_t registers_[Register::kNumRegisters];
385 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
386 intptr_t top_;
387 intptr_t pc_;
388 intptr_t fp_;
389 Smi* state_;
390
391 // Continuation is the PC where the execution continues after
392 // deoptimizing.
393 intptr_t continuation_;
394
395 intptr_t* GetFrameSlotPointer(unsigned offset) {
396 ASSERT(offset < frame_size_);
397 return reinterpret_cast<intptr_t*>(
398 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
399 }
400};
401
402
403class TranslationBuffer BASE_EMBEDDED {
404 public:
405 TranslationBuffer() : contents_(256) { }
406
407 int CurrentIndex() const { return contents_.length(); }
408 void Add(int32_t value);
409
410 Handle<ByteArray> CreateByteArray();
411
412 private:
413 ZoneList<uint8_t> contents_;
414};
415
416
417class TranslationIterator BASE_EMBEDDED {
418 public:
419 TranslationIterator(ByteArray* buffer, int index)
420 : buffer_(buffer), index_(index) {
421 ASSERT(index >= 0 && index < buffer->length());
422 }
423
424 int32_t Next();
425
426 bool HasNext() const { return index_ >= 0; }
427
428 void Done() { index_ = -1; }
429
430 void Skip(int n) {
431 for (int i = 0; i < n; i++) Next();
432 }
433
434 private:
435 ByteArray* buffer_;
436 int index_;
437};
438
439
440class Translation BASE_EMBEDDED {
441 public:
442 enum Opcode {
443 BEGIN,
444 FRAME,
445 REGISTER,
446 INT32_REGISTER,
447 DOUBLE_REGISTER,
448 STACK_SLOT,
449 INT32_STACK_SLOT,
450 DOUBLE_STACK_SLOT,
451 LITERAL,
452 ARGUMENTS_OBJECT,
453
454 // A prefix indicating that the next command is a duplicate of the one
455 // that follows it.
456 DUPLICATE
457 };
458
459 Translation(TranslationBuffer* buffer, int frame_count)
460 : buffer_(buffer),
461 index_(buffer->CurrentIndex()) {
462 buffer_->Add(BEGIN);
463 buffer_->Add(frame_count);
464 }
465
466 int index() const { return index_; }
467
468 // Commands.
469 void BeginFrame(int node_id, int literal_id, unsigned height);
470 void StoreRegister(Register reg);
471 void StoreInt32Register(Register reg);
472 void StoreDoubleRegister(DoubleRegister reg);
473 void StoreStackSlot(int index);
474 void StoreInt32StackSlot(int index);
475 void StoreDoubleStackSlot(int index);
476 void StoreLiteral(int literal_id);
477 void StoreArgumentsObject();
478 void MarkDuplicate();
479
480 static int NumberOfOperandsFor(Opcode opcode);
481
whesse@chromium.org023421e2010-12-21 12:19:12 +0000482#ifdef OBJECT_PRINT
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000483 static const char* StringFor(Opcode opcode);
484#endif
485
486 private:
487 TranslationBuffer* buffer_;
488 int index_;
489};
490
491
492// Linked list holding deoptimizing code objects. The deoptimizing code objects
493// are kept as weak handles until they are no longer activated on the stack.
494class DeoptimizingCodeListNode : public Malloced {
495 public:
496 explicit DeoptimizingCodeListNode(Code* code);
497 ~DeoptimizingCodeListNode();
498
499 DeoptimizingCodeListNode* next() const { return next_; }
500 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
501 Handle<Code> code() const { return code_; }
502
503 private:
504 // Global (weak) handle to the deoptimizing code object.
505 Handle<Code> code_;
506
507 // Next pointer for linked list.
508 DeoptimizingCodeListNode* next_;
509};
510
511
512} } // namespace v8::internal
513
514#endif // V8_DEOPTIMIZER_H_