blob: 8641261b1738d4610cee5f115ad1591dd1c80bcd [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_DEOPTIMIZER_H_
29#define V8_DEOPTIMIZER_H_
30
31#include "v8.h"
32
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "allocation.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010034#include "macro-assembler.h"
35#include "zone-inl.h"
36
37
38namespace v8 {
39namespace internal {
40
41class FrameDescription;
42class TranslationIterator;
43class DeoptimizingCodeListNode;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000044class DeoptimizedFrameInfo;
Ben Murdochb0fe1622011-05-05 13:52:32 +010045
Ben Murdoch8b112d22011-06-08 16:22:53 +010046class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
Ben Murdochb0fe1622011-05-05 13:52:32 +010047 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010048 HeapNumberMaterializationDescriptor(Address slot_address, double val)
49 : slot_address_(slot_address), val_(val) { }
50
51 Address slot_address() const { return slot_address_; }
52 double value() const { return val_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +010053
54 private:
Ben Murdoch8b112d22011-06-08 16:22:53 +010055 Address slot_address_;
56 double val_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010057};
58
59
60class OptimizedFunctionVisitor BASE_EMBEDDED {
61 public:
62 virtual ~OptimizedFunctionVisitor() {}
63
64 // Function which is called before iteration of any optimized functions
65 // from given global context.
66 virtual void EnterContext(Context* context) = 0;
67
68 virtual void VisitFunction(JSFunction* function) = 0;
69
70 // Function which is called after iteration of all optimized functions
71 // from given global context.
72 virtual void LeaveContext(Context* context) = 0;
73};
74
75
Steve Block44f0eee2011-05-26 01:26:41 +010076class Deoptimizer;
77
78
79class DeoptimizerData {
80 public:
81 DeoptimizerData();
82 ~DeoptimizerData();
83
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000084#ifdef ENABLE_DEBUGGER_SUPPORT
85 void Iterate(ObjectVisitor* v);
86#endif
87
Steve Block44f0eee2011-05-26 01:26:41 +010088 private:
89 LargeObjectChunk* eager_deoptimization_entry_code_;
90 LargeObjectChunk* lazy_deoptimization_entry_code_;
91 Deoptimizer* current_;
92
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000093#ifdef ENABLE_DEBUGGER_SUPPORT
94 DeoptimizedFrameInfo* deoptimized_frame_info_;
95#endif
96
Steve Block44f0eee2011-05-26 01:26:41 +010097 // List of deoptimized code which still have references from active stack
98 // frames. These code objects are needed by the deoptimizer when deoptimizing
99 // a frame for which the code object for the function function has been
100 // changed from the code present when deoptimizing was done.
101 DeoptimizingCodeListNode* deoptimizing_code_list_;
102
103 friend class Deoptimizer;
104
105 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
106};
107
108
Ben Murdochb0fe1622011-05-05 13:52:32 +0100109class Deoptimizer : public Malloced {
110 public:
111 enum BailoutType {
112 EAGER,
113 LAZY,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000114 OSR,
115 // This last bailout type is not really a bailout, but used by the
116 // debugger to deoptimize stack frames to allow inspection.
117 DEBUGGER
Ben Murdochb0fe1622011-05-05 13:52:32 +0100118 };
119
120 int output_count() const { return output_count_; }
121
122 static Deoptimizer* New(JSFunction* function,
123 BailoutType type,
124 unsigned bailout_id,
125 Address from,
Steve Block44f0eee2011-05-26 01:26:41 +0100126 int fp_to_sp_delta,
127 Isolate* isolate);
128 static Deoptimizer* Grab(Isolate* isolate);
129
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000130#ifdef ENABLE_DEBUGGER_SUPPORT
131 // The returned object with information on the optimized frame needs to be
132 // freed before another one can be generated.
133 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
134 int frame_index,
135 Isolate* isolate);
136 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
137 Isolate* isolate);
138#endif
139
Steve Block44f0eee2011-05-26 01:26:41 +0100140 // Makes sure that there is enough room in the relocation
141 // information of a code object to perform lazy deoptimization
142 // patching. If there is not enough room a new relocation
143 // information object is allocated and comments are added until it
144 // is big enough.
145 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100146
147 // Deoptimize the function now. Its current optimized code will never be run
148 // again and any activations of the optimized code will get deoptimized when
149 // execution returns.
150 static void DeoptimizeFunction(JSFunction* function);
151
152 // Deoptimize all functions in the heap.
153 static void DeoptimizeAll();
154
155 static void DeoptimizeGlobalObject(JSObject* object);
156
157 static void VisitAllOptimizedFunctionsForContext(
158 Context* context, OptimizedFunctionVisitor* visitor);
159
160 static void VisitAllOptimizedFunctionsForGlobalObject(
161 JSObject* object, OptimizedFunctionVisitor* visitor);
162
163 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
164
Steve Block1e0659c2011-05-24 12:43:12 +0100165 // The size in bytes of the code required at a lazy deopt patch site.
166 static int patch_size();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100167
Steve Block1e0659c2011-05-24 12:43:12 +0100168 // Patch all stack guard checks in the unoptimized code to
169 // unconditionally call replacement_code.
170 static void PatchStackCheckCode(Code* unoptimized_code,
171 Code* check_code,
172 Code* replacement_code);
173
174 // Patch stack guard check at instruction before pc_after in
175 // the unoptimized code to unconditionally call replacement_code.
176 static void PatchStackCheckCodeAt(Address pc_after,
177 Code* check_code,
178 Code* replacement_code);
179
180 // Change all patched stack guard checks in the unoptimized code
181 // back to a normal stack guard check.
182 static void RevertStackCheckCode(Code* unoptimized_code,
183 Code* check_code,
184 Code* replacement_code);
185
186 // Change all patched stack guard checks in the unoptimized code
187 // back to a normal stack guard check.
188 static void RevertStackCheckCodeAt(Address pc_after,
189 Code* check_code,
190 Code* replacement_code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100191
192 ~Deoptimizer();
193
Ben Murdoch8b112d22011-06-08 16:22:53 +0100194 void MaterializeHeapNumbers();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000195#ifdef ENABLE_DEBUGGER_SUPPORT
196 void MaterializeHeapNumbersForDebuggerInspectableFrame(
197 Address top, uint32_t size, DeoptimizedFrameInfo* info);
198#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100199
Ben Murdoch8b112d22011-06-08 16:22:53 +0100200 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100201
202 static Address GetDeoptimizationEntry(int id, BailoutType type);
203 static int GetDeoptimizationId(Address addr, BailoutType type);
Steve Block9fac8402011-05-12 15:51:54 +0100204 static int GetOutputInfo(DeoptimizationOutputData* data,
205 unsigned node_id,
206 SharedFunctionInfo* shared);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100207
Ben Murdochb0fe1622011-05-05 13:52:32 +0100208 // Code generation support.
209 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
210 static int output_count_offset() {
211 return OFFSET_OF(Deoptimizer, output_count_);
212 }
213 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
214
Steve Block44f0eee2011-05-26 01:26:41 +0100215 static int GetDeoptimizedCodeCount(Isolate* isolate);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100216
217 static const int kNotDeoptimizationEntry = -1;
218
219 // Generators for the deoptimization entry code.
220 class EntryGenerator BASE_EMBEDDED {
221 public:
222 EntryGenerator(MacroAssembler* masm, BailoutType type)
223 : masm_(masm), type_(type) { }
224 virtual ~EntryGenerator() { }
225
226 void Generate();
227
228 protected:
229 MacroAssembler* masm() const { return masm_; }
230 BailoutType type() const { return type_; }
231
232 virtual void GeneratePrologue() { }
233
234 private:
235 MacroAssembler* masm_;
236 Deoptimizer::BailoutType type_;
237 };
238
239 class TableEntryGenerator : public EntryGenerator {
240 public:
241 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
242 : EntryGenerator(masm, type), count_(count) { }
243
244 protected:
245 virtual void GeneratePrologue();
246
247 private:
248 int count() const { return count_; }
249
250 int count_;
251 };
252
253 private:
254 static const int kNumberOfEntries = 4096;
255
Steve Block44f0eee2011-05-26 01:26:41 +0100256 Deoptimizer(Isolate* isolate,
257 JSFunction* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100258 BailoutType type,
259 unsigned bailout_id,
260 Address from,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000261 int fp_to_sp_delta,
262 Code* optimized_code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100263 void DeleteFrameDescriptions();
264
265 void DoComputeOutputFrames();
266 void DoComputeOsrOutputFrame();
267 void DoComputeFrame(TranslationIterator* iterator, int frame_index);
268 void DoTranslateCommand(TranslationIterator* iterator,
269 int frame_index,
270 unsigned output_offset);
271 // Translate a command for OSR. Updates the input offset to be used for
272 // the next command. Returns false if translation of the command failed
273 // (e.g., a number conversion failed) and may or may not have updated the
274 // input offset.
275 bool DoOsrTranslateCommand(TranslationIterator* iterator,
276 int* input_offset);
277
278 unsigned ComputeInputFrameSize() const;
279 unsigned ComputeFixedSize(JSFunction* function) const;
280
281 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
282 unsigned ComputeOutgoingArgumentSize() const;
283
284 Object* ComputeLiteral(int index) const;
285
Ben Murdoch8b112d22011-06-08 16:22:53 +0100286 void AddDoubleValue(intptr_t slot_address, double value);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100287
288 static LargeObjectChunk* CreateCode(BailoutType type);
289 static void GenerateDeoptimizationEntries(
290 MacroAssembler* masm, int count, BailoutType type);
291
292 // Weak handle callback for deoptimizing code objects.
293 static void HandleWeakDeoptimizedCode(
294 v8::Persistent<v8::Value> obj, void* data);
295 static Code* FindDeoptimizingCodeFromAddress(Address addr);
296 static void RemoveDeoptimizingCode(Code* code);
297
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000298 // Fill the input from from a JavaScript frame. This is used when
299 // the debugger needs to inspect an optimized frame. For normal
300 // deoptimizations the input frame is filled in generated code.
301 void FillInputFrame(Address tos, JavaScriptFrame* frame);
302
Steve Block44f0eee2011-05-26 01:26:41 +0100303 Isolate* isolate_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100304 JSFunction* function_;
305 Code* optimized_code_;
306 unsigned bailout_id_;
307 BailoutType bailout_type_;
308 Address from_;
309 int fp_to_sp_delta_;
310
311 // Input frame description.
312 FrameDescription* input_;
313 // Number of output frames.
314 int output_count_;
315 // Array of output frame descriptions.
316 FrameDescription** output_;
317
Ben Murdoch8b112d22011-06-08 16:22:53 +0100318 List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100319
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000320 static const int table_entry_size_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100321
322 friend class FrameDescription;
323 friend class DeoptimizingCodeListNode;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000324 friend class DeoptimizedFrameInfo;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100325};
326
327
328class FrameDescription {
329 public:
330 FrameDescription(uint32_t frame_size,
331 JSFunction* function);
332
333 void* operator new(size_t size, uint32_t frame_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100334 // Subtracts kPointerSize, as the member frame_content_ already supplies
335 // the first element of the area to store the frame.
336 return malloc(size + frame_size - kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100337 }
338
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000339 void operator delete(void* pointer, uint32_t frame_size) {
340 free(pointer);
341 }
342
Ben Murdochb0fe1622011-05-05 13:52:32 +0100343 void operator delete(void* description) {
344 free(description);
345 }
346
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000347 uint32_t GetFrameSize() const {
348 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
349 return static_cast<uint32_t>(frame_size_);
350 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100351
352 JSFunction* GetFunction() const { return function_; }
353
354 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
355
356 intptr_t GetFrameSlot(unsigned offset) {
357 return *GetFrameSlotPointer(offset);
358 }
359
360 double GetDoubleFrameSlot(unsigned offset) {
361 return *reinterpret_cast<double*>(GetFrameSlotPointer(offset));
362 }
363
364 void SetFrameSlot(unsigned offset, intptr_t value) {
365 *GetFrameSlotPointer(offset) = value;
366 }
367
368 intptr_t GetRegister(unsigned n) const {
369 ASSERT(n < ARRAY_SIZE(registers_));
370 return registers_[n];
371 }
372
373 double GetDoubleRegister(unsigned n) const {
374 ASSERT(n < ARRAY_SIZE(double_registers_));
375 return double_registers_[n];
376 }
377
378 void SetRegister(unsigned n, intptr_t value) {
379 ASSERT(n < ARRAY_SIZE(registers_));
380 registers_[n] = value;
381 }
382
383 void SetDoubleRegister(unsigned n, double value) {
384 ASSERT(n < ARRAY_SIZE(double_registers_));
385 double_registers_[n] = value;
386 }
387
388 intptr_t GetTop() const { return top_; }
389 void SetTop(intptr_t top) { top_ = top; }
390
391 intptr_t GetPc() const { return pc_; }
392 void SetPc(intptr_t pc) { pc_ = pc; }
393
394 intptr_t GetFp() const { return fp_; }
395 void SetFp(intptr_t fp) { fp_ = fp; }
396
397 Smi* GetState() const { return state_; }
398 void SetState(Smi* state) { state_ = state; }
399
400 void SetContinuation(intptr_t pc) { continuation_ = pc; }
401
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000402#ifdef DEBUG
403 Code::Kind GetKind() const { return kind_; }
404 void SetKind(Code::Kind kind) { kind_ = kind; }
405#endif
406
407 // Get the incoming arguments count.
408 int ComputeParametersCount();
409
410 // Get a parameter value for an unoptimized frame.
411 Object* GetParameter(Deoptimizer* deoptimizer, int index);
412
413 // Get the expression stack height for a unoptimized frame.
414 unsigned GetExpressionCount(Deoptimizer* deoptimizer);
415
416 // Get the expression stack value for an unoptimized frame.
417 Object* GetExpression(Deoptimizer* deoptimizer, int index);
418
Ben Murdochb0fe1622011-05-05 13:52:32 +0100419 static int registers_offset() {
420 return OFFSET_OF(FrameDescription, registers_);
421 }
422
423 static int double_registers_offset() {
424 return OFFSET_OF(FrameDescription, double_registers_);
425 }
426
427 static int frame_size_offset() {
428 return OFFSET_OF(FrameDescription, frame_size_);
429 }
430
431 static int pc_offset() {
432 return OFFSET_OF(FrameDescription, pc_);
433 }
434
435 static int state_offset() {
436 return OFFSET_OF(FrameDescription, state_);
437 }
438
439 static int continuation_offset() {
440 return OFFSET_OF(FrameDescription, continuation_);
441 }
442
443 static int frame_content_offset() {
Steve Block44f0eee2011-05-26 01:26:41 +0100444 return OFFSET_OF(FrameDescription, frame_content_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100445 }
446
447 private:
448 static const uint32_t kZapUint32 = 0xbeeddead;
449
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000450 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
451 // keep the variable-size array frame_content_ of type intptr_t at
452 // the end of the structure aligned.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100453 uintptr_t frame_size_; // Number of bytes.
454 JSFunction* function_;
455 intptr_t registers_[Register::kNumRegisters];
456 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
457 intptr_t top_;
458 intptr_t pc_;
459 intptr_t fp_;
460 Smi* state_;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000461#ifdef DEBUG
462 Code::Kind kind_;
463#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100464
465 // Continuation is the PC where the execution continues after
466 // deoptimizing.
467 intptr_t continuation_;
468
Steve Block44f0eee2011-05-26 01:26:41 +0100469 // This must be at the end of the object as the object is allocated larger
470 // than it's definition indicate to extend this array.
471 intptr_t frame_content_[1];
472
Ben Murdochb0fe1622011-05-05 13:52:32 +0100473 intptr_t* GetFrameSlotPointer(unsigned offset) {
474 ASSERT(offset < frame_size_);
475 return reinterpret_cast<intptr_t*>(
476 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
477 }
478};
479
480
481class TranslationBuffer BASE_EMBEDDED {
482 public:
483 TranslationBuffer() : contents_(256) { }
484
485 int CurrentIndex() const { return contents_.length(); }
486 void Add(int32_t value);
487
488 Handle<ByteArray> CreateByteArray();
489
490 private:
491 ZoneList<uint8_t> contents_;
492};
493
494
495class TranslationIterator BASE_EMBEDDED {
496 public:
497 TranslationIterator(ByteArray* buffer, int index)
498 : buffer_(buffer), index_(index) {
499 ASSERT(index >= 0 && index < buffer->length());
500 }
501
502 int32_t Next();
503
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000504 bool HasNext() const { return index_ < buffer_->length(); }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100505
506 void Skip(int n) {
507 for (int i = 0; i < n; i++) Next();
508 }
509
510 private:
511 ByteArray* buffer_;
512 int index_;
513};
514
515
516class Translation BASE_EMBEDDED {
517 public:
518 enum Opcode {
519 BEGIN,
520 FRAME,
521 REGISTER,
522 INT32_REGISTER,
523 DOUBLE_REGISTER,
524 STACK_SLOT,
525 INT32_STACK_SLOT,
526 DOUBLE_STACK_SLOT,
527 LITERAL,
528 ARGUMENTS_OBJECT,
529
530 // A prefix indicating that the next command is a duplicate of the one
531 // that follows it.
532 DUPLICATE
533 };
534
535 Translation(TranslationBuffer* buffer, int frame_count)
536 : buffer_(buffer),
537 index_(buffer->CurrentIndex()) {
538 buffer_->Add(BEGIN);
539 buffer_->Add(frame_count);
540 }
541
542 int index() const { return index_; }
543
544 // Commands.
545 void BeginFrame(int node_id, int literal_id, unsigned height);
546 void StoreRegister(Register reg);
547 void StoreInt32Register(Register reg);
548 void StoreDoubleRegister(DoubleRegister reg);
549 void StoreStackSlot(int index);
550 void StoreInt32StackSlot(int index);
551 void StoreDoubleStackSlot(int index);
552 void StoreLiteral(int literal_id);
553 void StoreArgumentsObject();
554 void MarkDuplicate();
555
556 static int NumberOfOperandsFor(Opcode opcode);
557
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000558#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
Ben Murdochb0fe1622011-05-05 13:52:32 +0100559 static const char* StringFor(Opcode opcode);
560#endif
561
562 private:
563 TranslationBuffer* buffer_;
564 int index_;
565};
566
567
568// Linked list holding deoptimizing code objects. The deoptimizing code objects
569// are kept as weak handles until they are no longer activated on the stack.
570class DeoptimizingCodeListNode : public Malloced {
571 public:
572 explicit DeoptimizingCodeListNode(Code* code);
573 ~DeoptimizingCodeListNode();
574
575 DeoptimizingCodeListNode* next() const { return next_; }
576 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
577 Handle<Code> code() const { return code_; }
578
579 private:
580 // Global (weak) handle to the deoptimizing code object.
581 Handle<Code> code_;
582
583 // Next pointer for linked list.
584 DeoptimizingCodeListNode* next_;
585};
586
587
Ben Murdoch8b112d22011-06-08 16:22:53 +0100588class SlotRef BASE_EMBEDDED {
589 public:
590 enum SlotRepresentation {
591 UNKNOWN,
592 TAGGED,
593 INT32,
594 DOUBLE,
595 LITERAL
596 };
597
598 SlotRef()
599 : addr_(NULL), representation_(UNKNOWN) { }
600
601 SlotRef(Address addr, SlotRepresentation representation)
602 : addr_(addr), representation_(representation) { }
603
604 explicit SlotRef(Object* literal)
605 : literal_(literal), representation_(LITERAL) { }
606
607 Handle<Object> GetValue() {
608 switch (representation_) {
609 case TAGGED:
610 return Handle<Object>(Memory::Object_at(addr_));
611
612 case INT32: {
613 int value = Memory::int32_at(addr_);
614 if (Smi::IsValid(value)) {
615 return Handle<Object>(Smi::FromInt(value));
616 } else {
617 return Isolate::Current()->factory()->NewNumberFromInt(value);
618 }
619 }
620
621 case DOUBLE: {
622 double value = Memory::double_at(addr_);
623 return Isolate::Current()->factory()->NewNumber(value);
624 }
625
626 case LITERAL:
627 return literal_;
628
629 default:
630 UNREACHABLE();
631 return Handle<Object>::null();
632 }
633 }
634
635 static void ComputeSlotMappingForArguments(JavaScriptFrame* frame,
636 int inlined_frame_index,
637 Vector<SlotRef>* args_slots);
638
639 private:
640 Address addr_;
641 Handle<Object> literal_;
642 SlotRepresentation representation_;
643
644 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
645 if (slot_index >= 0) {
646 const int offset = JavaScriptFrameConstants::kLocal0Offset;
647 return frame->fp() + offset - (slot_index * kPointerSize);
648 } else {
649 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
650 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
651 }
652 }
653
654 static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
655 DeoptimizationInputData* data,
656 JavaScriptFrame* frame);
657};
658
659
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000660#ifdef ENABLE_DEBUGGER_SUPPORT
661// Class used to represent an unoptimized frame when the debugger
662// needs to inspect a frame that is part of an optimized frame. The
663// internally used FrameDescription objects are not GC safe so for use
664// by the debugger frame information is copied to an object of this type.
665class DeoptimizedFrameInfo : public Malloced {
666 public:
667 DeoptimizedFrameInfo(Deoptimizer* deoptimizer, int frame_index);
668 virtual ~DeoptimizedFrameInfo();
669
670 // GC support.
671 void Iterate(ObjectVisitor* v);
672
673 // Return the number of incoming arguments.
674 int parameters_count() { return parameters_count_; }
675
676 // Return the height of the expression stack.
677 int expression_count() { return expression_count_; }
678
679 // Get the frame function.
680 JSFunction* GetFunction() {
681 return function_;
682 }
683
684 // Get an incoming argument.
685 Object* GetParameter(int index) {
686 ASSERT(0 <= index && index < parameters_count());
687 return parameters_[index];
688 }
689
690 // Get an expression from the expression stack.
691 Object* GetExpression(int index) {
692 ASSERT(0 <= index && index < expression_count());
693 return expression_stack_[index];
694 }
695
696 private:
697 // Set the frame function.
698 void SetFunction(JSFunction* function) {
699 function_ = function;
700 }
701
702 // Set an incoming argument.
703 void SetParameter(int index, Object* obj) {
704 ASSERT(0 <= index && index < parameters_count());
705 parameters_[index] = obj;
706 }
707
708 // Set an expression on the expression stack.
709 void SetExpression(int index, Object* obj) {
710 ASSERT(0 <= index && index < expression_count());
711 expression_stack_[index] = obj;
712 }
713
714 JSFunction* function_;
715 int parameters_count_;
716 int expression_count_;
717 Object** parameters_;
718 Object** expression_stack_;
719
720 friend class Deoptimizer;
721};
722#endif
723
Ben Murdochb0fe1622011-05-05 13:52:32 +0100724} } // namespace v8::internal
725
726#endif // V8_DEOPTIMIZER_H_