blob: a53de3da9819a7e112a9b0198fc4b63e3dc81b9a [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_DEOPTIMIZER_H_
29#define V8_DEOPTIMIZER_H_
30
31#include "v8.h"
32
33#include "macro-assembler.h"
34#include "zone-inl.h"
35
36
37namespace v8 {
38namespace internal {
39
40class FrameDescription;
41class TranslationIterator;
42class DeoptimizingCodeListNode;
43
44
45class ValueDescription BASE_EMBEDDED {
46 public:
47 explicit ValueDescription(int index) : stack_index_(index) { }
48 int stack_index() const { return stack_index_; }
49
50 private:
51 // Offset relative to the top of the stack.
52 int stack_index_;
53};
54
55
56class ValueDescriptionInteger32: public ValueDescription {
57 public:
58 ValueDescriptionInteger32(int index, int32_t value)
59 : ValueDescription(index), int32_value_(value) { }
60 int32_t int32_value() const { return int32_value_; }
61
62 private:
63 // Raw value.
64 int32_t int32_value_;
65};
66
67
68class ValueDescriptionDouble: public ValueDescription {
69 public:
70 ValueDescriptionDouble(int index, double value)
71 : ValueDescription(index), double_value_(value) { }
72 double double_value() const { return double_value_; }
73
74 private:
75 // Raw value.
76 double double_value_;
77};
78
79
80class OptimizedFunctionVisitor BASE_EMBEDDED {
81 public:
82 virtual ~OptimizedFunctionVisitor() {}
83
84 // Function which is called before iteration of any optimized functions
85 // from given global context.
86 virtual void EnterContext(Context* context) = 0;
87
88 virtual void VisitFunction(JSFunction* function) = 0;
89
90 // Function which is called after iteration of all optimized functions
91 // from given global context.
92 virtual void LeaveContext(Context* context) = 0;
93};
94
95
Steve Block44f0eee2011-05-26 01:26:41 +010096class Deoptimizer;
97
98
99class DeoptimizerData {
100 public:
101 DeoptimizerData();
102 ~DeoptimizerData();
103
104 private:
105 LargeObjectChunk* eager_deoptimization_entry_code_;
106 LargeObjectChunk* lazy_deoptimization_entry_code_;
107 Deoptimizer* current_;
108
109 // List of deoptimized code which still have references from active stack
110 // frames. These code objects are needed by the deoptimizer when deoptimizing
111 // a frame for which the code object for the function function has been
112 // changed from the code present when deoptimizing was done.
113 DeoptimizingCodeListNode* deoptimizing_code_list_;
114
115 friend class Deoptimizer;
116
117 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
118};
119
120
Ben Murdochb0fe1622011-05-05 13:52:32 +0100121class Deoptimizer : public Malloced {
122 public:
123 enum BailoutType {
124 EAGER,
125 LAZY,
126 OSR
127 };
128
129 int output_count() const { return output_count_; }
130
131 static Deoptimizer* New(JSFunction* function,
132 BailoutType type,
133 unsigned bailout_id,
134 Address from,
Steve Block44f0eee2011-05-26 01:26:41 +0100135 int fp_to_sp_delta,
136 Isolate* isolate);
137 static Deoptimizer* Grab(Isolate* isolate);
138
139 // Makes sure that there is enough room in the relocation
140 // information of a code object to perform lazy deoptimization
141 // patching. If there is not enough room a new relocation
142 // information object is allocated and comments are added until it
143 // is big enough.
144 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100145
146 // Deoptimize the function now. Its current optimized code will never be run
147 // again and any activations of the optimized code will get deoptimized when
148 // execution returns.
149 static void DeoptimizeFunction(JSFunction* function);
150
151 // Deoptimize all functions in the heap.
152 static void DeoptimizeAll();
153
154 static void DeoptimizeGlobalObject(JSObject* object);
155
156 static void VisitAllOptimizedFunctionsForContext(
157 Context* context, OptimizedFunctionVisitor* visitor);
158
159 static void VisitAllOptimizedFunctionsForGlobalObject(
160 JSObject* object, OptimizedFunctionVisitor* visitor);
161
162 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
163
Steve Block1e0659c2011-05-24 12:43:12 +0100164 // The size in bytes of the code required at a lazy deopt patch site.
165 static int patch_size();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100166
Steve Block1e0659c2011-05-24 12:43:12 +0100167 // Patch all stack guard checks in the unoptimized code to
168 // unconditionally call replacement_code.
169 static void PatchStackCheckCode(Code* unoptimized_code,
170 Code* check_code,
171 Code* replacement_code);
172
173 // Patch stack guard check at instruction before pc_after in
174 // the unoptimized code to unconditionally call replacement_code.
175 static void PatchStackCheckCodeAt(Address pc_after,
176 Code* check_code,
177 Code* replacement_code);
178
179 // Change all patched stack guard checks in the unoptimized code
180 // back to a normal stack guard check.
181 static void RevertStackCheckCode(Code* unoptimized_code,
182 Code* check_code,
183 Code* replacement_code);
184
185 // Change all patched stack guard checks in the unoptimized code
186 // back to a normal stack guard check.
187 static void RevertStackCheckCodeAt(Address pc_after,
188 Code* check_code,
189 Code* replacement_code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100190
191 ~Deoptimizer();
192
193 void InsertHeapNumberValues(int index, JavaScriptFrame* frame);
194
Steve Block44f0eee2011-05-26 01:26:41 +0100195 static void ComputeOutputFrames(Deoptimizer* deoptimizer, Isolate* isolate);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100196
197 static Address GetDeoptimizationEntry(int id, BailoutType type);
198 static int GetDeoptimizationId(Address addr, BailoutType type);
Steve Block9fac8402011-05-12 15:51:54 +0100199 static int GetOutputInfo(DeoptimizationOutputData* data,
200 unsigned node_id,
201 SharedFunctionInfo* shared);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100202
Ben Murdochb0fe1622011-05-05 13:52:32 +0100203 // Code generation support.
204 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
205 static int output_count_offset() {
206 return OFFSET_OF(Deoptimizer, output_count_);
207 }
208 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
209
Steve Block44f0eee2011-05-26 01:26:41 +0100210 static int GetDeoptimizedCodeCount(Isolate* isolate);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100211
212 static const int kNotDeoptimizationEntry = -1;
213
214 // Generators for the deoptimization entry code.
215 class EntryGenerator BASE_EMBEDDED {
216 public:
217 EntryGenerator(MacroAssembler* masm, BailoutType type)
218 : masm_(masm), type_(type) { }
219 virtual ~EntryGenerator() { }
220
221 void Generate();
222
223 protected:
224 MacroAssembler* masm() const { return masm_; }
225 BailoutType type() const { return type_; }
226
227 virtual void GeneratePrologue() { }
228
229 private:
230 MacroAssembler* masm_;
231 Deoptimizer::BailoutType type_;
232 };
233
234 class TableEntryGenerator : public EntryGenerator {
235 public:
236 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
237 : EntryGenerator(masm, type), count_(count) { }
238
239 protected:
240 virtual void GeneratePrologue();
241
242 private:
243 int count() const { return count_; }
244
245 int count_;
246 };
247
248 private:
249 static const int kNumberOfEntries = 4096;
250
Steve Block44f0eee2011-05-26 01:26:41 +0100251 Deoptimizer(Isolate* isolate,
252 JSFunction* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100253 BailoutType type,
254 unsigned bailout_id,
255 Address from,
256 int fp_to_sp_delta);
257 void DeleteFrameDescriptions();
258
259 void DoComputeOutputFrames();
260 void DoComputeOsrOutputFrame();
261 void DoComputeFrame(TranslationIterator* iterator, int frame_index);
262 void DoTranslateCommand(TranslationIterator* iterator,
263 int frame_index,
264 unsigned output_offset);
265 // Translate a command for OSR. Updates the input offset to be used for
266 // the next command. Returns false if translation of the command failed
267 // (e.g., a number conversion failed) and may or may not have updated the
268 // input offset.
269 bool DoOsrTranslateCommand(TranslationIterator* iterator,
270 int* input_offset);
271
272 unsigned ComputeInputFrameSize() const;
273 unsigned ComputeFixedSize(JSFunction* function) const;
274
275 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
276 unsigned ComputeOutgoingArgumentSize() const;
277
278 Object* ComputeLiteral(int index) const;
279
280 void InsertHeapNumberValue(JavaScriptFrame* frame,
281 int stack_index,
282 double val,
283 int extra_slot_count);
284
285 void AddInteger32Value(int frame_index, int slot_index, int32_t value);
286 void AddDoubleValue(int frame_index, int slot_index, double value);
287
288 static LargeObjectChunk* CreateCode(BailoutType type);
289 static void GenerateDeoptimizationEntries(
290 MacroAssembler* masm, int count, BailoutType type);
291
292 // Weak handle callback for deoptimizing code objects.
293 static void HandleWeakDeoptimizedCode(
294 v8::Persistent<v8::Value> obj, void* data);
295 static Code* FindDeoptimizingCodeFromAddress(Address addr);
296 static void RemoveDeoptimizingCode(Code* code);
297
Steve Block44f0eee2011-05-26 01:26:41 +0100298 Isolate* isolate_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100299 JSFunction* function_;
300 Code* optimized_code_;
301 unsigned bailout_id_;
302 BailoutType bailout_type_;
303 Address from_;
304 int fp_to_sp_delta_;
305
306 // Input frame description.
307 FrameDescription* input_;
308 // Number of output frames.
309 int output_count_;
310 // Array of output frame descriptions.
311 FrameDescription** output_;
312
313 List<ValueDescriptionInteger32>* integer32_values_;
314 List<ValueDescriptionDouble>* double_values_;
315
316 static int table_entry_size_;
317
318 friend class FrameDescription;
319 friend class DeoptimizingCodeListNode;
320};
321
322
323class FrameDescription {
324 public:
325 FrameDescription(uint32_t frame_size,
326 JSFunction* function);
327
328 void* operator new(size_t size, uint32_t frame_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100329 // Subtracts kPointerSize, as the member frame_content_ already supplies
330 // the first element of the area to store the frame.
331 return malloc(size + frame_size - kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100332 }
333
334 void operator delete(void* description) {
335 free(description);
336 }
337
338 intptr_t GetFrameSize() const { return frame_size_; }
339
340 JSFunction* GetFunction() const { return function_; }
341
342 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
343
344 intptr_t GetFrameSlot(unsigned offset) {
345 return *GetFrameSlotPointer(offset);
346 }
347
348 double GetDoubleFrameSlot(unsigned offset) {
349 return *reinterpret_cast<double*>(GetFrameSlotPointer(offset));
350 }
351
352 void SetFrameSlot(unsigned offset, intptr_t value) {
353 *GetFrameSlotPointer(offset) = value;
354 }
355
356 intptr_t GetRegister(unsigned n) const {
357 ASSERT(n < ARRAY_SIZE(registers_));
358 return registers_[n];
359 }
360
361 double GetDoubleRegister(unsigned n) const {
362 ASSERT(n < ARRAY_SIZE(double_registers_));
363 return double_registers_[n];
364 }
365
366 void SetRegister(unsigned n, intptr_t value) {
367 ASSERT(n < ARRAY_SIZE(registers_));
368 registers_[n] = value;
369 }
370
371 void SetDoubleRegister(unsigned n, double value) {
372 ASSERT(n < ARRAY_SIZE(double_registers_));
373 double_registers_[n] = value;
374 }
375
376 intptr_t GetTop() const { return top_; }
377 void SetTop(intptr_t top) { top_ = top; }
378
379 intptr_t GetPc() const { return pc_; }
380 void SetPc(intptr_t pc) { pc_ = pc; }
381
382 intptr_t GetFp() const { return fp_; }
383 void SetFp(intptr_t fp) { fp_ = fp; }
384
385 Smi* GetState() const { return state_; }
386 void SetState(Smi* state) { state_ = state; }
387
388 void SetContinuation(intptr_t pc) { continuation_ = pc; }
389
390 static int registers_offset() {
391 return OFFSET_OF(FrameDescription, registers_);
392 }
393
394 static int double_registers_offset() {
395 return OFFSET_OF(FrameDescription, double_registers_);
396 }
397
398 static int frame_size_offset() {
399 return OFFSET_OF(FrameDescription, frame_size_);
400 }
401
402 static int pc_offset() {
403 return OFFSET_OF(FrameDescription, pc_);
404 }
405
406 static int state_offset() {
407 return OFFSET_OF(FrameDescription, state_);
408 }
409
410 static int continuation_offset() {
411 return OFFSET_OF(FrameDescription, continuation_);
412 }
413
414 static int frame_content_offset() {
Steve Block44f0eee2011-05-26 01:26:41 +0100415 return OFFSET_OF(FrameDescription, frame_content_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100416 }
417
418 private:
419 static const uint32_t kZapUint32 = 0xbeeddead;
420
421 uintptr_t frame_size_; // Number of bytes.
422 JSFunction* function_;
423 intptr_t registers_[Register::kNumRegisters];
424 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
425 intptr_t top_;
426 intptr_t pc_;
427 intptr_t fp_;
428 Smi* state_;
429
430 // Continuation is the PC where the execution continues after
431 // deoptimizing.
432 intptr_t continuation_;
433
Steve Block44f0eee2011-05-26 01:26:41 +0100434 // This must be at the end of the object as the object is allocated larger
435 // than it's definition indicate to extend this array.
436 intptr_t frame_content_[1];
437
Ben Murdochb0fe1622011-05-05 13:52:32 +0100438 intptr_t* GetFrameSlotPointer(unsigned offset) {
439 ASSERT(offset < frame_size_);
440 return reinterpret_cast<intptr_t*>(
441 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
442 }
443};
444
445
446class TranslationBuffer BASE_EMBEDDED {
447 public:
448 TranslationBuffer() : contents_(256) { }
449
450 int CurrentIndex() const { return contents_.length(); }
451 void Add(int32_t value);
452
453 Handle<ByteArray> CreateByteArray();
454
455 private:
456 ZoneList<uint8_t> contents_;
457};
458
459
460class TranslationIterator BASE_EMBEDDED {
461 public:
462 TranslationIterator(ByteArray* buffer, int index)
463 : buffer_(buffer), index_(index) {
464 ASSERT(index >= 0 && index < buffer->length());
465 }
466
467 int32_t Next();
468
469 bool HasNext() const { return index_ >= 0; }
470
471 void Done() { index_ = -1; }
472
473 void Skip(int n) {
474 for (int i = 0; i < n; i++) Next();
475 }
476
477 private:
478 ByteArray* buffer_;
479 int index_;
480};
481
482
483class Translation BASE_EMBEDDED {
484 public:
485 enum Opcode {
486 BEGIN,
487 FRAME,
488 REGISTER,
489 INT32_REGISTER,
490 DOUBLE_REGISTER,
491 STACK_SLOT,
492 INT32_STACK_SLOT,
493 DOUBLE_STACK_SLOT,
494 LITERAL,
495 ARGUMENTS_OBJECT,
496
497 // A prefix indicating that the next command is a duplicate of the one
498 // that follows it.
499 DUPLICATE
500 };
501
502 Translation(TranslationBuffer* buffer, int frame_count)
503 : buffer_(buffer),
504 index_(buffer->CurrentIndex()) {
505 buffer_->Add(BEGIN);
506 buffer_->Add(frame_count);
507 }
508
509 int index() const { return index_; }
510
511 // Commands.
512 void BeginFrame(int node_id, int literal_id, unsigned height);
513 void StoreRegister(Register reg);
514 void StoreInt32Register(Register reg);
515 void StoreDoubleRegister(DoubleRegister reg);
516 void StoreStackSlot(int index);
517 void StoreInt32StackSlot(int index);
518 void StoreDoubleStackSlot(int index);
519 void StoreLiteral(int literal_id);
520 void StoreArgumentsObject();
521 void MarkDuplicate();
522
523 static int NumberOfOperandsFor(Opcode opcode);
524
525#ifdef OBJECT_PRINT
526 static const char* StringFor(Opcode opcode);
527#endif
528
529 private:
530 TranslationBuffer* buffer_;
531 int index_;
532};
533
534
535// Linked list holding deoptimizing code objects. The deoptimizing code objects
536// are kept as weak handles until they are no longer activated on the stack.
537class DeoptimizingCodeListNode : public Malloced {
538 public:
539 explicit DeoptimizingCodeListNode(Code* code);
540 ~DeoptimizingCodeListNode();
541
542 DeoptimizingCodeListNode* next() const { return next_; }
543 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
544 Handle<Code> code() const { return code_; }
545
546 private:
547 // Global (weak) handle to the deoptimizing code object.
548 Handle<Code> code_;
549
550 // Next pointer for linked list.
551 DeoptimizingCodeListNode* next_;
552};
553
554
555} } // namespace v8::internal
556
557#endif // V8_DEOPTIMIZER_H_