blob: 1d4f4770f9762465b49c95f4e35dcddd969b6d31 [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_DEOPTIMIZER_H_
29#define V8_DEOPTIMIZER_H_
30
31#include "v8.h"
32
33#include "macro-assembler.h"
34#include "zone-inl.h"
35
36
37namespace v8 {
38namespace internal {
39
40class FrameDescription;
41class TranslationIterator;
42class DeoptimizingCodeListNode;
43
44
45class ValueDescription BASE_EMBEDDED {
46 public:
47 explicit ValueDescription(int index) : stack_index_(index) { }
48 int stack_index() const { return stack_index_; }
49
50 private:
51 // Offset relative to the top of the stack.
52 int stack_index_;
53};
54
55
56class ValueDescriptionInteger32: public ValueDescription {
57 public:
58 ValueDescriptionInteger32(int index, int32_t value)
59 : ValueDescription(index), int32_value_(value) { }
60 int32_t int32_value() const { return int32_value_; }
61
62 private:
63 // Raw value.
64 int32_t int32_value_;
65};
66
67
68class ValueDescriptionDouble: public ValueDescription {
69 public:
70 ValueDescriptionDouble(int index, double value)
71 : ValueDescription(index), double_value_(value) { }
72 double double_value() const { return double_value_; }
73
74 private:
75 // Raw value.
76 double double_value_;
77};
78
79
80class OptimizedFunctionVisitor BASE_EMBEDDED {
81 public:
82 virtual ~OptimizedFunctionVisitor() {}
83
84 // Function which is called before iteration of any optimized functions
85 // from given global context.
86 virtual void EnterContext(Context* context) = 0;
87
88 virtual void VisitFunction(JSFunction* function) = 0;
89
90 // Function which is called after iteration of all optimized functions
91 // from given global context.
92 virtual void LeaveContext(Context* context) = 0;
93};
94
95
96class Deoptimizer : public Malloced {
97 public:
98 enum BailoutType {
99 EAGER,
100 LAZY,
101 OSR
102 };
103
104 int output_count() const { return output_count_; }
105
106 static Deoptimizer* New(JSFunction* function,
107 BailoutType type,
108 unsigned bailout_id,
109 Address from,
110 int fp_to_sp_delta);
111 static Deoptimizer* Grab();
112
113 // Deoptimize the function now. Its current optimized code will never be run
114 // again and any activations of the optimized code will get deoptimized when
115 // execution returns.
116 static void DeoptimizeFunction(JSFunction* function);
117
118 // Deoptimize all functions in the heap.
119 static void DeoptimizeAll();
120
121 static void DeoptimizeGlobalObject(JSObject* object);
122
123 static void VisitAllOptimizedFunctionsForContext(
124 Context* context, OptimizedFunctionVisitor* visitor);
125
126 static void VisitAllOptimizedFunctionsForGlobalObject(
127 JSObject* object, OptimizedFunctionVisitor* visitor);
128
129 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
130
Steve Block1e0659c2011-05-24 12:43:12 +0100131 // The size in bytes of the code required at a lazy deopt patch site.
132 static int patch_size();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100133
Steve Block1e0659c2011-05-24 12:43:12 +0100134 // Patch all stack guard checks in the unoptimized code to
135 // unconditionally call replacement_code.
136 static void PatchStackCheckCode(Code* unoptimized_code,
137 Code* check_code,
138 Code* replacement_code);
139
140 // Patch stack guard check at instruction before pc_after in
141 // the unoptimized code to unconditionally call replacement_code.
142 static void PatchStackCheckCodeAt(Address pc_after,
143 Code* check_code,
144 Code* replacement_code);
145
146 // Change all patched stack guard checks in the unoptimized code
147 // back to a normal stack guard check.
148 static void RevertStackCheckCode(Code* unoptimized_code,
149 Code* check_code,
150 Code* replacement_code);
151
152 // Change all patched stack guard checks in the unoptimized code
153 // back to a normal stack guard check.
154 static void RevertStackCheckCodeAt(Address pc_after,
155 Code* check_code,
156 Code* replacement_code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100157
158 ~Deoptimizer();
159
160 void InsertHeapNumberValues(int index, JavaScriptFrame* frame);
161
162 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
163
164 static Address GetDeoptimizationEntry(int id, BailoutType type);
165 static int GetDeoptimizationId(Address addr, BailoutType type);
Steve Block9fac8402011-05-12 15:51:54 +0100166 static int GetOutputInfo(DeoptimizationOutputData* data,
167 unsigned node_id,
168 SharedFunctionInfo* shared);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100169
170 static void Setup();
171 static void TearDown();
172
173 // Code generation support.
174 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
175 static int output_count_offset() {
176 return OFFSET_OF(Deoptimizer, output_count_);
177 }
178 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
179
180 static int GetDeoptimizedCodeCount();
181
182 static const int kNotDeoptimizationEntry = -1;
183
184 // Generators for the deoptimization entry code.
185 class EntryGenerator BASE_EMBEDDED {
186 public:
187 EntryGenerator(MacroAssembler* masm, BailoutType type)
188 : masm_(masm), type_(type) { }
189 virtual ~EntryGenerator() { }
190
191 void Generate();
192
193 protected:
194 MacroAssembler* masm() const { return masm_; }
195 BailoutType type() const { return type_; }
196
197 virtual void GeneratePrologue() { }
198
199 private:
200 MacroAssembler* masm_;
201 Deoptimizer::BailoutType type_;
202 };
203
204 class TableEntryGenerator : public EntryGenerator {
205 public:
206 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
207 : EntryGenerator(masm, type), count_(count) { }
208
209 protected:
210 virtual void GeneratePrologue();
211
212 private:
213 int count() const { return count_; }
214
215 int count_;
216 };
217
218 private:
219 static const int kNumberOfEntries = 4096;
220
221 Deoptimizer(JSFunction* function,
222 BailoutType type,
223 unsigned bailout_id,
224 Address from,
225 int fp_to_sp_delta);
226 void DeleteFrameDescriptions();
227
228 void DoComputeOutputFrames();
229 void DoComputeOsrOutputFrame();
230 void DoComputeFrame(TranslationIterator* iterator, int frame_index);
231 void DoTranslateCommand(TranslationIterator* iterator,
232 int frame_index,
233 unsigned output_offset);
234 // Translate a command for OSR. Updates the input offset to be used for
235 // the next command. Returns false if translation of the command failed
236 // (e.g., a number conversion failed) and may or may not have updated the
237 // input offset.
238 bool DoOsrTranslateCommand(TranslationIterator* iterator,
239 int* input_offset);
240
241 unsigned ComputeInputFrameSize() const;
242 unsigned ComputeFixedSize(JSFunction* function) const;
243
244 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
245 unsigned ComputeOutgoingArgumentSize() const;
246
247 Object* ComputeLiteral(int index) const;
248
249 void InsertHeapNumberValue(JavaScriptFrame* frame,
250 int stack_index,
251 double val,
252 int extra_slot_count);
253
254 void AddInteger32Value(int frame_index, int slot_index, int32_t value);
255 void AddDoubleValue(int frame_index, int slot_index, double value);
256
257 static LargeObjectChunk* CreateCode(BailoutType type);
258 static void GenerateDeoptimizationEntries(
259 MacroAssembler* masm, int count, BailoutType type);
260
261 // Weak handle callback for deoptimizing code objects.
262 static void HandleWeakDeoptimizedCode(
263 v8::Persistent<v8::Value> obj, void* data);
264 static Code* FindDeoptimizingCodeFromAddress(Address addr);
265 static void RemoveDeoptimizingCode(Code* code);
266
267 static LargeObjectChunk* eager_deoptimization_entry_code_;
268 static LargeObjectChunk* lazy_deoptimization_entry_code_;
269 static Deoptimizer* current_;
270
271 // List of deoptimized code which still have references from active stack
272 // frames. These code objects are needed by the deoptimizer when deoptimizing
273 // a frame for which the code object for the function function has been
274 // changed from the code present when deoptimizing was done.
275 static DeoptimizingCodeListNode* deoptimizing_code_list_;
276
277 JSFunction* function_;
278 Code* optimized_code_;
279 unsigned bailout_id_;
280 BailoutType bailout_type_;
281 Address from_;
282 int fp_to_sp_delta_;
283
284 // Input frame description.
285 FrameDescription* input_;
286 // Number of output frames.
287 int output_count_;
288 // Array of output frame descriptions.
289 FrameDescription** output_;
290
291 List<ValueDescriptionInteger32>* integer32_values_;
292 List<ValueDescriptionDouble>* double_values_;
293
294 static int table_entry_size_;
295
296 friend class FrameDescription;
297 friend class DeoptimizingCodeListNode;
298};
299
300
301class FrameDescription {
302 public:
303 FrameDescription(uint32_t frame_size,
304 JSFunction* function);
305
306 void* operator new(size_t size, uint32_t frame_size) {
307 return malloc(size + frame_size);
308 }
309
310 void operator delete(void* description) {
311 free(description);
312 }
313
314 intptr_t GetFrameSize() const { return frame_size_; }
315
316 JSFunction* GetFunction() const { return function_; }
317
318 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
319
320 intptr_t GetFrameSlot(unsigned offset) {
321 return *GetFrameSlotPointer(offset);
322 }
323
324 double GetDoubleFrameSlot(unsigned offset) {
325 return *reinterpret_cast<double*>(GetFrameSlotPointer(offset));
326 }
327
328 void SetFrameSlot(unsigned offset, intptr_t value) {
329 *GetFrameSlotPointer(offset) = value;
330 }
331
332 intptr_t GetRegister(unsigned n) const {
333 ASSERT(n < ARRAY_SIZE(registers_));
334 return registers_[n];
335 }
336
337 double GetDoubleRegister(unsigned n) const {
338 ASSERT(n < ARRAY_SIZE(double_registers_));
339 return double_registers_[n];
340 }
341
342 void SetRegister(unsigned n, intptr_t value) {
343 ASSERT(n < ARRAY_SIZE(registers_));
344 registers_[n] = value;
345 }
346
347 void SetDoubleRegister(unsigned n, double value) {
348 ASSERT(n < ARRAY_SIZE(double_registers_));
349 double_registers_[n] = value;
350 }
351
352 intptr_t GetTop() const { return top_; }
353 void SetTop(intptr_t top) { top_ = top; }
354
355 intptr_t GetPc() const { return pc_; }
356 void SetPc(intptr_t pc) { pc_ = pc; }
357
358 intptr_t GetFp() const { return fp_; }
359 void SetFp(intptr_t fp) { fp_ = fp; }
360
361 Smi* GetState() const { return state_; }
362 void SetState(Smi* state) { state_ = state; }
363
364 void SetContinuation(intptr_t pc) { continuation_ = pc; }
365
366 static int registers_offset() {
367 return OFFSET_OF(FrameDescription, registers_);
368 }
369
370 static int double_registers_offset() {
371 return OFFSET_OF(FrameDescription, double_registers_);
372 }
373
374 static int frame_size_offset() {
375 return OFFSET_OF(FrameDescription, frame_size_);
376 }
377
378 static int pc_offset() {
379 return OFFSET_OF(FrameDescription, pc_);
380 }
381
382 static int state_offset() {
383 return OFFSET_OF(FrameDescription, state_);
384 }
385
386 static int continuation_offset() {
387 return OFFSET_OF(FrameDescription, continuation_);
388 }
389
390 static int frame_content_offset() {
391 return sizeof(FrameDescription);
392 }
393
394 private:
395 static const uint32_t kZapUint32 = 0xbeeddead;
396
397 uintptr_t frame_size_; // Number of bytes.
398 JSFunction* function_;
399 intptr_t registers_[Register::kNumRegisters];
400 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
401 intptr_t top_;
402 intptr_t pc_;
403 intptr_t fp_;
404 Smi* state_;
405
406 // Continuation is the PC where the execution continues after
407 // deoptimizing.
408 intptr_t continuation_;
409
410 intptr_t* GetFrameSlotPointer(unsigned offset) {
411 ASSERT(offset < frame_size_);
412 return reinterpret_cast<intptr_t*>(
413 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
414 }
415};
416
417
418class TranslationBuffer BASE_EMBEDDED {
419 public:
420 TranslationBuffer() : contents_(256) { }
421
422 int CurrentIndex() const { return contents_.length(); }
423 void Add(int32_t value);
424
425 Handle<ByteArray> CreateByteArray();
426
427 private:
428 ZoneList<uint8_t> contents_;
429};
430
431
432class TranslationIterator BASE_EMBEDDED {
433 public:
434 TranslationIterator(ByteArray* buffer, int index)
435 : buffer_(buffer), index_(index) {
436 ASSERT(index >= 0 && index < buffer->length());
437 }
438
439 int32_t Next();
440
441 bool HasNext() const { return index_ >= 0; }
442
443 void Done() { index_ = -1; }
444
445 void Skip(int n) {
446 for (int i = 0; i < n; i++) Next();
447 }
448
449 private:
450 ByteArray* buffer_;
451 int index_;
452};
453
454
455class Translation BASE_EMBEDDED {
456 public:
457 enum Opcode {
458 BEGIN,
459 FRAME,
460 REGISTER,
461 INT32_REGISTER,
462 DOUBLE_REGISTER,
463 STACK_SLOT,
464 INT32_STACK_SLOT,
465 DOUBLE_STACK_SLOT,
466 LITERAL,
467 ARGUMENTS_OBJECT,
468
469 // A prefix indicating that the next command is a duplicate of the one
470 // that follows it.
471 DUPLICATE
472 };
473
474 Translation(TranslationBuffer* buffer, int frame_count)
475 : buffer_(buffer),
476 index_(buffer->CurrentIndex()) {
477 buffer_->Add(BEGIN);
478 buffer_->Add(frame_count);
479 }
480
481 int index() const { return index_; }
482
483 // Commands.
484 void BeginFrame(int node_id, int literal_id, unsigned height);
485 void StoreRegister(Register reg);
486 void StoreInt32Register(Register reg);
487 void StoreDoubleRegister(DoubleRegister reg);
488 void StoreStackSlot(int index);
489 void StoreInt32StackSlot(int index);
490 void StoreDoubleStackSlot(int index);
491 void StoreLiteral(int literal_id);
492 void StoreArgumentsObject();
493 void MarkDuplicate();
494
495 static int NumberOfOperandsFor(Opcode opcode);
496
497#ifdef OBJECT_PRINT
498 static const char* StringFor(Opcode opcode);
499#endif
500
501 private:
502 TranslationBuffer* buffer_;
503 int index_;
504};
505
506
507// Linked list holding deoptimizing code objects. The deoptimizing code objects
508// are kept as weak handles until they are no longer activated on the stack.
509class DeoptimizingCodeListNode : public Malloced {
510 public:
511 explicit DeoptimizingCodeListNode(Code* code);
512 ~DeoptimizingCodeListNode();
513
514 DeoptimizingCodeListNode* next() const { return next_; }
515 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
516 Handle<Code> code() const { return code_; }
517
518 private:
519 // Global (weak) handle to the deoptimizing code object.
520 Handle<Code> code_;
521
522 // Next pointer for linked list.
523 DeoptimizingCodeListNode* next_;
524};
525
526
527} } // namespace v8::internal
528
529#endif // V8_DEOPTIMIZER_H_