blob: cb82f446beacc838a46c292def294ffe5ba3c767 [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_DEOPTIMIZER_H_
29#define V8_DEOPTIMIZER_H_
30
31#include "v8.h"
32
33#include "macro-assembler.h"
34#include "zone-inl.h"
35
36
37namespace v8 {
38namespace internal {
39
40class FrameDescription;
41class TranslationIterator;
42class DeoptimizingCodeListNode;
43
44
Ben Murdoch8b112d22011-06-08 16:22:53 +010045class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
Ben Murdochb0fe1622011-05-05 13:52:32 +010046 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010047 HeapNumberMaterializationDescriptor(Address slot_address, double val)
48 : slot_address_(slot_address), val_(val) { }
49
50 Address slot_address() const { return slot_address_; }
51 double value() const { return val_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +010052
53 private:
Ben Murdoch8b112d22011-06-08 16:22:53 +010054 Address slot_address_;
55 double val_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010056};
57
58
59class OptimizedFunctionVisitor BASE_EMBEDDED {
60 public:
61 virtual ~OptimizedFunctionVisitor() {}
62
63 // Function which is called before iteration of any optimized functions
64 // from given global context.
65 virtual void EnterContext(Context* context) = 0;
66
67 virtual void VisitFunction(JSFunction* function) = 0;
68
69 // Function which is called after iteration of all optimized functions
70 // from given global context.
71 virtual void LeaveContext(Context* context) = 0;
72};
73
74
Steve Block44f0eee2011-05-26 01:26:41 +010075class Deoptimizer;
76
77
78class DeoptimizerData {
79 public:
80 DeoptimizerData();
81 ~DeoptimizerData();
82
83 private:
84 LargeObjectChunk* eager_deoptimization_entry_code_;
85 LargeObjectChunk* lazy_deoptimization_entry_code_;
86 Deoptimizer* current_;
87
88 // List of deoptimized code which still have references from active stack
89 // frames. These code objects are needed by the deoptimizer when deoptimizing
90 // a frame for which the code object for the function function has been
91 // changed from the code present when deoptimizing was done.
92 DeoptimizingCodeListNode* deoptimizing_code_list_;
93
94 friend class Deoptimizer;
95
96 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
97};
98
99
Ben Murdochb0fe1622011-05-05 13:52:32 +0100100class Deoptimizer : public Malloced {
101 public:
102 enum BailoutType {
103 EAGER,
104 LAZY,
105 OSR
106 };
107
108 int output_count() const { return output_count_; }
109
110 static Deoptimizer* New(JSFunction* function,
111 BailoutType type,
112 unsigned bailout_id,
113 Address from,
Steve Block44f0eee2011-05-26 01:26:41 +0100114 int fp_to_sp_delta,
115 Isolate* isolate);
116 static Deoptimizer* Grab(Isolate* isolate);
117
118 // Makes sure that there is enough room in the relocation
119 // information of a code object to perform lazy deoptimization
120 // patching. If there is not enough room a new relocation
121 // information object is allocated and comments are added until it
122 // is big enough.
123 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100124
125 // Deoptimize the function now. Its current optimized code will never be run
126 // again and any activations of the optimized code will get deoptimized when
127 // execution returns.
128 static void DeoptimizeFunction(JSFunction* function);
129
130 // Deoptimize all functions in the heap.
131 static void DeoptimizeAll();
132
133 static void DeoptimizeGlobalObject(JSObject* object);
134
135 static void VisitAllOptimizedFunctionsForContext(
136 Context* context, OptimizedFunctionVisitor* visitor);
137
138 static void VisitAllOptimizedFunctionsForGlobalObject(
139 JSObject* object, OptimizedFunctionVisitor* visitor);
140
141 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
142
Steve Block1e0659c2011-05-24 12:43:12 +0100143 // The size in bytes of the code required at a lazy deopt patch site.
144 static int patch_size();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100145
Steve Block1e0659c2011-05-24 12:43:12 +0100146 // Patch all stack guard checks in the unoptimized code to
147 // unconditionally call replacement_code.
148 static void PatchStackCheckCode(Code* unoptimized_code,
149 Code* check_code,
150 Code* replacement_code);
151
152 // Patch stack guard check at instruction before pc_after in
153 // the unoptimized code to unconditionally call replacement_code.
154 static void PatchStackCheckCodeAt(Address pc_after,
155 Code* check_code,
156 Code* replacement_code);
157
158 // Change all patched stack guard checks in the unoptimized code
159 // back to a normal stack guard check.
160 static void RevertStackCheckCode(Code* unoptimized_code,
161 Code* check_code,
162 Code* replacement_code);
163
164 // Change all patched stack guard checks in the unoptimized code
165 // back to a normal stack guard check.
166 static void RevertStackCheckCodeAt(Address pc_after,
167 Code* check_code,
168 Code* replacement_code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100169
170 ~Deoptimizer();
171
Ben Murdoch8b112d22011-06-08 16:22:53 +0100172 void MaterializeHeapNumbers();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100173
Ben Murdoch8b112d22011-06-08 16:22:53 +0100174 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100175
176 static Address GetDeoptimizationEntry(int id, BailoutType type);
177 static int GetDeoptimizationId(Address addr, BailoutType type);
Steve Block9fac8402011-05-12 15:51:54 +0100178 static int GetOutputInfo(DeoptimizationOutputData* data,
179 unsigned node_id,
180 SharedFunctionInfo* shared);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100181
Ben Murdochb0fe1622011-05-05 13:52:32 +0100182 // Code generation support.
183 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
184 static int output_count_offset() {
185 return OFFSET_OF(Deoptimizer, output_count_);
186 }
187 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
188
Steve Block44f0eee2011-05-26 01:26:41 +0100189 static int GetDeoptimizedCodeCount(Isolate* isolate);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100190
191 static const int kNotDeoptimizationEntry = -1;
192
193 // Generators for the deoptimization entry code.
194 class EntryGenerator BASE_EMBEDDED {
195 public:
196 EntryGenerator(MacroAssembler* masm, BailoutType type)
197 : masm_(masm), type_(type) { }
198 virtual ~EntryGenerator() { }
199
200 void Generate();
201
202 protected:
203 MacroAssembler* masm() const { return masm_; }
204 BailoutType type() const { return type_; }
205
206 virtual void GeneratePrologue() { }
207
208 private:
209 MacroAssembler* masm_;
210 Deoptimizer::BailoutType type_;
211 };
212
213 class TableEntryGenerator : public EntryGenerator {
214 public:
215 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
216 : EntryGenerator(masm, type), count_(count) { }
217
218 protected:
219 virtual void GeneratePrologue();
220
221 private:
222 int count() const { return count_; }
223
224 int count_;
225 };
226
227 private:
228 static const int kNumberOfEntries = 4096;
229
Steve Block44f0eee2011-05-26 01:26:41 +0100230 Deoptimizer(Isolate* isolate,
231 JSFunction* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100232 BailoutType type,
233 unsigned bailout_id,
234 Address from,
235 int fp_to_sp_delta);
236 void DeleteFrameDescriptions();
237
238 void DoComputeOutputFrames();
239 void DoComputeOsrOutputFrame();
240 void DoComputeFrame(TranslationIterator* iterator, int frame_index);
241 void DoTranslateCommand(TranslationIterator* iterator,
242 int frame_index,
243 unsigned output_offset);
244 // Translate a command for OSR. Updates the input offset to be used for
245 // the next command. Returns false if translation of the command failed
246 // (e.g., a number conversion failed) and may or may not have updated the
247 // input offset.
248 bool DoOsrTranslateCommand(TranslationIterator* iterator,
249 int* input_offset);
250
251 unsigned ComputeInputFrameSize() const;
252 unsigned ComputeFixedSize(JSFunction* function) const;
253
254 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
255 unsigned ComputeOutgoingArgumentSize() const;
256
257 Object* ComputeLiteral(int index) const;
258
Ben Murdoch8b112d22011-06-08 16:22:53 +0100259 void AddDoubleValue(intptr_t slot_address, double value);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100260
261 static LargeObjectChunk* CreateCode(BailoutType type);
262 static void GenerateDeoptimizationEntries(
263 MacroAssembler* masm, int count, BailoutType type);
264
265 // Weak handle callback for deoptimizing code objects.
266 static void HandleWeakDeoptimizedCode(
267 v8::Persistent<v8::Value> obj, void* data);
268 static Code* FindDeoptimizingCodeFromAddress(Address addr);
269 static void RemoveDeoptimizingCode(Code* code);
270
Steve Block44f0eee2011-05-26 01:26:41 +0100271 Isolate* isolate_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100272 JSFunction* function_;
273 Code* optimized_code_;
274 unsigned bailout_id_;
275 BailoutType bailout_type_;
276 Address from_;
277 int fp_to_sp_delta_;
278
279 // Input frame description.
280 FrameDescription* input_;
281 // Number of output frames.
282 int output_count_;
283 // Array of output frame descriptions.
284 FrameDescription** output_;
285
Ben Murdoch8b112d22011-06-08 16:22:53 +0100286 List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100287
288 static int table_entry_size_;
289
290 friend class FrameDescription;
291 friend class DeoptimizingCodeListNode;
292};
293
294
295class FrameDescription {
296 public:
297 FrameDescription(uint32_t frame_size,
298 JSFunction* function);
299
300 void* operator new(size_t size, uint32_t frame_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100301 // Subtracts kPointerSize, as the member frame_content_ already supplies
302 // the first element of the area to store the frame.
303 return malloc(size + frame_size - kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100304 }
305
306 void operator delete(void* description) {
307 free(description);
308 }
309
310 intptr_t GetFrameSize() const { return frame_size_; }
311
312 JSFunction* GetFunction() const { return function_; }
313
314 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
315
316 intptr_t GetFrameSlot(unsigned offset) {
317 return *GetFrameSlotPointer(offset);
318 }
319
320 double GetDoubleFrameSlot(unsigned offset) {
321 return *reinterpret_cast<double*>(GetFrameSlotPointer(offset));
322 }
323
324 void SetFrameSlot(unsigned offset, intptr_t value) {
325 *GetFrameSlotPointer(offset) = value;
326 }
327
328 intptr_t GetRegister(unsigned n) const {
329 ASSERT(n < ARRAY_SIZE(registers_));
330 return registers_[n];
331 }
332
333 double GetDoubleRegister(unsigned n) const {
334 ASSERT(n < ARRAY_SIZE(double_registers_));
335 return double_registers_[n];
336 }
337
338 void SetRegister(unsigned n, intptr_t value) {
339 ASSERT(n < ARRAY_SIZE(registers_));
340 registers_[n] = value;
341 }
342
343 void SetDoubleRegister(unsigned n, double value) {
344 ASSERT(n < ARRAY_SIZE(double_registers_));
345 double_registers_[n] = value;
346 }
347
348 intptr_t GetTop() const { return top_; }
349 void SetTop(intptr_t top) { top_ = top; }
350
351 intptr_t GetPc() const { return pc_; }
352 void SetPc(intptr_t pc) { pc_ = pc; }
353
354 intptr_t GetFp() const { return fp_; }
355 void SetFp(intptr_t fp) { fp_ = fp; }
356
357 Smi* GetState() const { return state_; }
358 void SetState(Smi* state) { state_ = state; }
359
360 void SetContinuation(intptr_t pc) { continuation_ = pc; }
361
362 static int registers_offset() {
363 return OFFSET_OF(FrameDescription, registers_);
364 }
365
366 static int double_registers_offset() {
367 return OFFSET_OF(FrameDescription, double_registers_);
368 }
369
370 static int frame_size_offset() {
371 return OFFSET_OF(FrameDescription, frame_size_);
372 }
373
374 static int pc_offset() {
375 return OFFSET_OF(FrameDescription, pc_);
376 }
377
378 static int state_offset() {
379 return OFFSET_OF(FrameDescription, state_);
380 }
381
382 static int continuation_offset() {
383 return OFFSET_OF(FrameDescription, continuation_);
384 }
385
386 static int frame_content_offset() {
Steve Block44f0eee2011-05-26 01:26:41 +0100387 return OFFSET_OF(FrameDescription, frame_content_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100388 }
389
390 private:
391 static const uint32_t kZapUint32 = 0xbeeddead;
392
393 uintptr_t frame_size_; // Number of bytes.
394 JSFunction* function_;
395 intptr_t registers_[Register::kNumRegisters];
396 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
397 intptr_t top_;
398 intptr_t pc_;
399 intptr_t fp_;
400 Smi* state_;
401
402 // Continuation is the PC where the execution continues after
403 // deoptimizing.
404 intptr_t continuation_;
405
Steve Block44f0eee2011-05-26 01:26:41 +0100406 // This must be at the end of the object as the object is allocated larger
407 // than it's definition indicate to extend this array.
408 intptr_t frame_content_[1];
409
Ben Murdochb0fe1622011-05-05 13:52:32 +0100410 intptr_t* GetFrameSlotPointer(unsigned offset) {
411 ASSERT(offset < frame_size_);
412 return reinterpret_cast<intptr_t*>(
413 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
414 }
415};
416
417
418class TranslationBuffer BASE_EMBEDDED {
419 public:
420 TranslationBuffer() : contents_(256) { }
421
422 int CurrentIndex() const { return contents_.length(); }
423 void Add(int32_t value);
424
425 Handle<ByteArray> CreateByteArray();
426
427 private:
428 ZoneList<uint8_t> contents_;
429};
430
431
432class TranslationIterator BASE_EMBEDDED {
433 public:
434 TranslationIterator(ByteArray* buffer, int index)
435 : buffer_(buffer), index_(index) {
436 ASSERT(index >= 0 && index < buffer->length());
437 }
438
439 int32_t Next();
440
441 bool HasNext() const { return index_ >= 0; }
442
443 void Done() { index_ = -1; }
444
445 void Skip(int n) {
446 for (int i = 0; i < n; i++) Next();
447 }
448
449 private:
450 ByteArray* buffer_;
451 int index_;
452};
453
454
455class Translation BASE_EMBEDDED {
456 public:
457 enum Opcode {
458 BEGIN,
459 FRAME,
460 REGISTER,
461 INT32_REGISTER,
462 DOUBLE_REGISTER,
463 STACK_SLOT,
464 INT32_STACK_SLOT,
465 DOUBLE_STACK_SLOT,
466 LITERAL,
467 ARGUMENTS_OBJECT,
468
469 // A prefix indicating that the next command is a duplicate of the one
470 // that follows it.
471 DUPLICATE
472 };
473
474 Translation(TranslationBuffer* buffer, int frame_count)
475 : buffer_(buffer),
476 index_(buffer->CurrentIndex()) {
477 buffer_->Add(BEGIN);
478 buffer_->Add(frame_count);
479 }
480
481 int index() const { return index_; }
482
483 // Commands.
484 void BeginFrame(int node_id, int literal_id, unsigned height);
485 void StoreRegister(Register reg);
486 void StoreInt32Register(Register reg);
487 void StoreDoubleRegister(DoubleRegister reg);
488 void StoreStackSlot(int index);
489 void StoreInt32StackSlot(int index);
490 void StoreDoubleStackSlot(int index);
491 void StoreLiteral(int literal_id);
492 void StoreArgumentsObject();
493 void MarkDuplicate();
494
495 static int NumberOfOperandsFor(Opcode opcode);
496
497#ifdef OBJECT_PRINT
498 static const char* StringFor(Opcode opcode);
499#endif
500
501 private:
502 TranslationBuffer* buffer_;
503 int index_;
504};
505
506
507// Linked list holding deoptimizing code objects. The deoptimizing code objects
508// are kept as weak handles until they are no longer activated on the stack.
509class DeoptimizingCodeListNode : public Malloced {
510 public:
511 explicit DeoptimizingCodeListNode(Code* code);
512 ~DeoptimizingCodeListNode();
513
514 DeoptimizingCodeListNode* next() const { return next_; }
515 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
516 Handle<Code> code() const { return code_; }
517
518 private:
519 // Global (weak) handle to the deoptimizing code object.
520 Handle<Code> code_;
521
522 // Next pointer for linked list.
523 DeoptimizingCodeListNode* next_;
524};
525
526
Ben Murdoch8b112d22011-06-08 16:22:53 +0100527class SlotRef BASE_EMBEDDED {
528 public:
529 enum SlotRepresentation {
530 UNKNOWN,
531 TAGGED,
532 INT32,
533 DOUBLE,
534 LITERAL
535 };
536
537 SlotRef()
538 : addr_(NULL), representation_(UNKNOWN) { }
539
540 SlotRef(Address addr, SlotRepresentation representation)
541 : addr_(addr), representation_(representation) { }
542
543 explicit SlotRef(Object* literal)
544 : literal_(literal), representation_(LITERAL) { }
545
546 Handle<Object> GetValue() {
547 switch (representation_) {
548 case TAGGED:
549 return Handle<Object>(Memory::Object_at(addr_));
550
551 case INT32: {
552 int value = Memory::int32_at(addr_);
553 if (Smi::IsValid(value)) {
554 return Handle<Object>(Smi::FromInt(value));
555 } else {
556 return Isolate::Current()->factory()->NewNumberFromInt(value);
557 }
558 }
559
560 case DOUBLE: {
561 double value = Memory::double_at(addr_);
562 return Isolate::Current()->factory()->NewNumber(value);
563 }
564
565 case LITERAL:
566 return literal_;
567
568 default:
569 UNREACHABLE();
570 return Handle<Object>::null();
571 }
572 }
573
574 static void ComputeSlotMappingForArguments(JavaScriptFrame* frame,
575 int inlined_frame_index,
576 Vector<SlotRef>* args_slots);
577
578 private:
579 Address addr_;
580 Handle<Object> literal_;
581 SlotRepresentation representation_;
582
583 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
584 if (slot_index >= 0) {
585 const int offset = JavaScriptFrameConstants::kLocal0Offset;
586 return frame->fp() + offset - (slot_index * kPointerSize);
587 } else {
588 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
589 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
590 }
591 }
592
593 static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
594 DeoptimizationInputData* data,
595 JavaScriptFrame* frame);
596};
597
598
Ben Murdochb0fe1622011-05-05 13:52:32 +0100599} } // namespace v8::internal
600
601#endif // V8_DEOPTIMIZER_H_