blob: f9bf280ea82ffd9ae45559d058d5693e80202236 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_DEOPTIMIZER_H_
29#define V8_DEOPTIMIZER_H_
30
31#include "v8.h"
32
33#include "macro-assembler.h"
34#include "zone-inl.h"
35
36
37namespace v8 {
38namespace internal {
39
40class FrameDescription;
41class TranslationIterator;
42class DeoptimizingCodeListNode;
43
44
45class ValueDescription BASE_EMBEDDED {
46 public:
47 explicit ValueDescription(int index) : stack_index_(index) { }
48 int stack_index() const { return stack_index_; }
49
50 private:
51 // Offset relative to the top of the stack.
52 int stack_index_;
53};
54
55
56class ValueDescriptionInteger32: public ValueDescription {
57 public:
58 ValueDescriptionInteger32(int index, int32_t value)
59 : ValueDescription(index), int32_value_(value) { }
60 int32_t int32_value() const { return int32_value_; }
61
62 private:
63 // Raw value.
64 int32_t int32_value_;
65};
66
67
68class ValueDescriptionDouble: public ValueDescription {
69 public:
70 ValueDescriptionDouble(int index, double value)
71 : ValueDescription(index), double_value_(value) { }
72 double double_value() const { return double_value_; }
73
74 private:
75 // Raw value.
76 double double_value_;
77};
78
79
80class OptimizedFunctionVisitor BASE_EMBEDDED {
81 public:
82 virtual ~OptimizedFunctionVisitor() {}
83
84 // Function which is called before iteration of any optimized functions
85 // from given global context.
86 virtual void EnterContext(Context* context) = 0;
87
88 virtual void VisitFunction(JSFunction* function) = 0;
89
90 // Function which is called after iteration of all optimized functions
91 // from given global context.
92 virtual void LeaveContext(Context* context) = 0;
93};
94
95
96class Deoptimizer : public Malloced {
97 public:
98 enum BailoutType {
99 EAGER,
100 LAZY,
101 OSR
102 };
103
104 int output_count() const { return output_count_; }
105
106 static Deoptimizer* New(JSFunction* function,
107 BailoutType type,
108 unsigned bailout_id,
109 Address from,
110 int fp_to_sp_delta);
111 static Deoptimizer* Grab();
112
113 // Deoptimize the function now. Its current optimized code will never be run
114 // again and any activations of the optimized code will get deoptimized when
115 // execution returns.
116 static void DeoptimizeFunction(JSFunction* function);
117
118 // Deoptimize all functions in the heap.
119 static void DeoptimizeAll();
120
121 static void DeoptimizeGlobalObject(JSObject* object);
122
123 static void VisitAllOptimizedFunctionsForContext(
124 Context* context, OptimizedFunctionVisitor* visitor);
125
126 static void VisitAllOptimizedFunctionsForGlobalObject(
127 JSObject* object, OptimizedFunctionVisitor* visitor);
128
129 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
130
131 // Given the relocation info of a call to the stack check stub, patch the
132 // code so as to go unconditionally to the on-stack replacement builtin
133 // instead.
134 static void PatchStackCheckCode(RelocInfo* rinfo, Code* replacement_code);
135
136 // Given the relocation info of a call to the on-stack replacement
137 // builtin, patch the code back to the original stack check code.
138 static void RevertStackCheckCode(RelocInfo* rinfo, Code* check_code);
139
140 ~Deoptimizer();
141
142 void InsertHeapNumberValues(int index, JavaScriptFrame* frame);
143
144 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
145
146 static Address GetDeoptimizationEntry(int id, BailoutType type);
147 static int GetDeoptimizationId(Address addr, BailoutType type);
Steve Block9fac8402011-05-12 15:51:54 +0100148 static int GetOutputInfo(DeoptimizationOutputData* data,
149 unsigned node_id,
150 SharedFunctionInfo* shared);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100151
152 static void Setup();
153 static void TearDown();
154
155 // Code generation support.
156 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
157 static int output_count_offset() {
158 return OFFSET_OF(Deoptimizer, output_count_);
159 }
160 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
161
162 static int GetDeoptimizedCodeCount();
163
164 static const int kNotDeoptimizationEntry = -1;
165
166 // Generators for the deoptimization entry code.
167 class EntryGenerator BASE_EMBEDDED {
168 public:
169 EntryGenerator(MacroAssembler* masm, BailoutType type)
170 : masm_(masm), type_(type) { }
171 virtual ~EntryGenerator() { }
172
173 void Generate();
174
175 protected:
176 MacroAssembler* masm() const { return masm_; }
177 BailoutType type() const { return type_; }
178
179 virtual void GeneratePrologue() { }
180
181 private:
182 MacroAssembler* masm_;
183 Deoptimizer::BailoutType type_;
184 };
185
186 class TableEntryGenerator : public EntryGenerator {
187 public:
188 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
189 : EntryGenerator(masm, type), count_(count) { }
190
191 protected:
192 virtual void GeneratePrologue();
193
194 private:
195 int count() const { return count_; }
196
197 int count_;
198 };
199
200 private:
201 static const int kNumberOfEntries = 4096;
202
203 Deoptimizer(JSFunction* function,
204 BailoutType type,
205 unsigned bailout_id,
206 Address from,
207 int fp_to_sp_delta);
208 void DeleteFrameDescriptions();
209
210 void DoComputeOutputFrames();
211 void DoComputeOsrOutputFrame();
212 void DoComputeFrame(TranslationIterator* iterator, int frame_index);
213 void DoTranslateCommand(TranslationIterator* iterator,
214 int frame_index,
215 unsigned output_offset);
216 // Translate a command for OSR. Updates the input offset to be used for
217 // the next command. Returns false if translation of the command failed
218 // (e.g., a number conversion failed) and may or may not have updated the
219 // input offset.
220 bool DoOsrTranslateCommand(TranslationIterator* iterator,
221 int* input_offset);
222
223 unsigned ComputeInputFrameSize() const;
224 unsigned ComputeFixedSize(JSFunction* function) const;
225
226 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
227 unsigned ComputeOutgoingArgumentSize() const;
228
229 Object* ComputeLiteral(int index) const;
230
231 void InsertHeapNumberValue(JavaScriptFrame* frame,
232 int stack_index,
233 double val,
234 int extra_slot_count);
235
236 void AddInteger32Value(int frame_index, int slot_index, int32_t value);
237 void AddDoubleValue(int frame_index, int slot_index, double value);
238
239 static LargeObjectChunk* CreateCode(BailoutType type);
240 static void GenerateDeoptimizationEntries(
241 MacroAssembler* masm, int count, BailoutType type);
242
243 // Weak handle callback for deoptimizing code objects.
244 static void HandleWeakDeoptimizedCode(
245 v8::Persistent<v8::Value> obj, void* data);
246 static Code* FindDeoptimizingCodeFromAddress(Address addr);
247 static void RemoveDeoptimizingCode(Code* code);
248
249 static LargeObjectChunk* eager_deoptimization_entry_code_;
250 static LargeObjectChunk* lazy_deoptimization_entry_code_;
251 static Deoptimizer* current_;
252
253 // List of deoptimized code which still have references from active stack
254 // frames. These code objects are needed by the deoptimizer when deoptimizing
255 // a frame for which the code object for the function function has been
256 // changed from the code present when deoptimizing was done.
257 static DeoptimizingCodeListNode* deoptimizing_code_list_;
258
259 JSFunction* function_;
260 Code* optimized_code_;
261 unsigned bailout_id_;
262 BailoutType bailout_type_;
263 Address from_;
264 int fp_to_sp_delta_;
265
266 // Input frame description.
267 FrameDescription* input_;
268 // Number of output frames.
269 int output_count_;
270 // Array of output frame descriptions.
271 FrameDescription** output_;
272
273 List<ValueDescriptionInteger32>* integer32_values_;
274 List<ValueDescriptionDouble>* double_values_;
275
276 static int table_entry_size_;
277
278 friend class FrameDescription;
279 friend class DeoptimizingCodeListNode;
280};
281
282
283class FrameDescription {
284 public:
285 FrameDescription(uint32_t frame_size,
286 JSFunction* function);
287
288 void* operator new(size_t size, uint32_t frame_size) {
289 return malloc(size + frame_size);
290 }
291
292 void operator delete(void* description) {
293 free(description);
294 }
295
296 intptr_t GetFrameSize() const { return frame_size_; }
297
298 JSFunction* GetFunction() const { return function_; }
299
300 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
301
302 intptr_t GetFrameSlot(unsigned offset) {
303 return *GetFrameSlotPointer(offset);
304 }
305
306 double GetDoubleFrameSlot(unsigned offset) {
307 return *reinterpret_cast<double*>(GetFrameSlotPointer(offset));
308 }
309
310 void SetFrameSlot(unsigned offset, intptr_t value) {
311 *GetFrameSlotPointer(offset) = value;
312 }
313
314 intptr_t GetRegister(unsigned n) const {
315 ASSERT(n < ARRAY_SIZE(registers_));
316 return registers_[n];
317 }
318
319 double GetDoubleRegister(unsigned n) const {
320 ASSERT(n < ARRAY_SIZE(double_registers_));
321 return double_registers_[n];
322 }
323
324 void SetRegister(unsigned n, intptr_t value) {
325 ASSERT(n < ARRAY_SIZE(registers_));
326 registers_[n] = value;
327 }
328
329 void SetDoubleRegister(unsigned n, double value) {
330 ASSERT(n < ARRAY_SIZE(double_registers_));
331 double_registers_[n] = value;
332 }
333
334 intptr_t GetTop() const { return top_; }
335 void SetTop(intptr_t top) { top_ = top; }
336
337 intptr_t GetPc() const { return pc_; }
338 void SetPc(intptr_t pc) { pc_ = pc; }
339
340 intptr_t GetFp() const { return fp_; }
341 void SetFp(intptr_t fp) { fp_ = fp; }
342
343 Smi* GetState() const { return state_; }
344 void SetState(Smi* state) { state_ = state; }
345
346 void SetContinuation(intptr_t pc) { continuation_ = pc; }
347
348 static int registers_offset() {
349 return OFFSET_OF(FrameDescription, registers_);
350 }
351
352 static int double_registers_offset() {
353 return OFFSET_OF(FrameDescription, double_registers_);
354 }
355
356 static int frame_size_offset() {
357 return OFFSET_OF(FrameDescription, frame_size_);
358 }
359
360 static int pc_offset() {
361 return OFFSET_OF(FrameDescription, pc_);
362 }
363
364 static int state_offset() {
365 return OFFSET_OF(FrameDescription, state_);
366 }
367
368 static int continuation_offset() {
369 return OFFSET_OF(FrameDescription, continuation_);
370 }
371
372 static int frame_content_offset() {
373 return sizeof(FrameDescription);
374 }
375
376 private:
377 static const uint32_t kZapUint32 = 0xbeeddead;
378
379 uintptr_t frame_size_; // Number of bytes.
380 JSFunction* function_;
381 intptr_t registers_[Register::kNumRegisters];
382 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
383 intptr_t top_;
384 intptr_t pc_;
385 intptr_t fp_;
386 Smi* state_;
387
388 // Continuation is the PC where the execution continues after
389 // deoptimizing.
390 intptr_t continuation_;
391
392 intptr_t* GetFrameSlotPointer(unsigned offset) {
393 ASSERT(offset < frame_size_);
394 return reinterpret_cast<intptr_t*>(
395 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
396 }
397};
398
399
400class TranslationBuffer BASE_EMBEDDED {
401 public:
402 TranslationBuffer() : contents_(256) { }
403
404 int CurrentIndex() const { return contents_.length(); }
405 void Add(int32_t value);
406
407 Handle<ByteArray> CreateByteArray();
408
409 private:
410 ZoneList<uint8_t> contents_;
411};
412
413
414class TranslationIterator BASE_EMBEDDED {
415 public:
416 TranslationIterator(ByteArray* buffer, int index)
417 : buffer_(buffer), index_(index) {
418 ASSERT(index >= 0 && index < buffer->length());
419 }
420
421 int32_t Next();
422
423 bool HasNext() const { return index_ >= 0; }
424
425 void Done() { index_ = -1; }
426
427 void Skip(int n) {
428 for (int i = 0; i < n; i++) Next();
429 }
430
431 private:
432 ByteArray* buffer_;
433 int index_;
434};
435
436
437class Translation BASE_EMBEDDED {
438 public:
439 enum Opcode {
440 BEGIN,
441 FRAME,
442 REGISTER,
443 INT32_REGISTER,
444 DOUBLE_REGISTER,
445 STACK_SLOT,
446 INT32_STACK_SLOT,
447 DOUBLE_STACK_SLOT,
448 LITERAL,
449 ARGUMENTS_OBJECT,
450
451 // A prefix indicating that the next command is a duplicate of the one
452 // that follows it.
453 DUPLICATE
454 };
455
456 Translation(TranslationBuffer* buffer, int frame_count)
457 : buffer_(buffer),
458 index_(buffer->CurrentIndex()) {
459 buffer_->Add(BEGIN);
460 buffer_->Add(frame_count);
461 }
462
463 int index() const { return index_; }
464
465 // Commands.
466 void BeginFrame(int node_id, int literal_id, unsigned height);
467 void StoreRegister(Register reg);
468 void StoreInt32Register(Register reg);
469 void StoreDoubleRegister(DoubleRegister reg);
470 void StoreStackSlot(int index);
471 void StoreInt32StackSlot(int index);
472 void StoreDoubleStackSlot(int index);
473 void StoreLiteral(int literal_id);
474 void StoreArgumentsObject();
475 void MarkDuplicate();
476
477 static int NumberOfOperandsFor(Opcode opcode);
478
479#ifdef OBJECT_PRINT
480 static const char* StringFor(Opcode opcode);
481#endif
482
483 private:
484 TranslationBuffer* buffer_;
485 int index_;
486};
487
488
489// Linked list holding deoptimizing code objects. The deoptimizing code objects
490// are kept as weak handles until they are no longer activated on the stack.
491class DeoptimizingCodeListNode : public Malloced {
492 public:
493 explicit DeoptimizingCodeListNode(Code* code);
494 ~DeoptimizingCodeListNode();
495
496 DeoptimizingCodeListNode* next() const { return next_; }
497 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
498 Handle<Code> code() const { return code_; }
499
500 private:
501 // Global (weak) handle to the deoptimizing code object.
502 Handle<Code> code_;
503
504 // Next pointer for linked list.
505 DeoptimizingCodeListNode* next_;
506};
507
508
509} } // namespace v8::internal
510
511#endif // V8_DEOPTIMIZER_H_