blob: 6bc4a5103627aeb4fb3e6b7358be0edc96e1330c [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_DEOPTIMIZER_H_
29#define V8_DEOPTIMIZER_H_
30
31#include "v8.h"
32
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "allocation.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010034#include "macro-assembler.h"
35#include "zone-inl.h"
36
37
38namespace v8 {
39namespace internal {
40
41class FrameDescription;
42class TranslationIterator;
43class DeoptimizingCodeListNode;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000044class DeoptimizedFrameInfo;
Ben Murdochb0fe1622011-05-05 13:52:32 +010045
Ben Murdoch8b112d22011-06-08 16:22:53 +010046class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
Ben Murdochb0fe1622011-05-05 13:52:32 +010047 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010048 HeapNumberMaterializationDescriptor(Address slot_address, double val)
49 : slot_address_(slot_address), val_(val) { }
50
51 Address slot_address() const { return slot_address_; }
52 double value() const { return val_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +010053
54 private:
Ben Murdoch8b112d22011-06-08 16:22:53 +010055 Address slot_address_;
56 double val_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010057};
58
59
60class OptimizedFunctionVisitor BASE_EMBEDDED {
61 public:
62 virtual ~OptimizedFunctionVisitor() {}
63
64 // Function which is called before iteration of any optimized functions
65 // from given global context.
66 virtual void EnterContext(Context* context) = 0;
67
68 virtual void VisitFunction(JSFunction* function) = 0;
69
70 // Function which is called after iteration of all optimized functions
71 // from given global context.
72 virtual void LeaveContext(Context* context) = 0;
73};
74
75
Steve Block44f0eee2011-05-26 01:26:41 +010076class Deoptimizer;
77
78
79class DeoptimizerData {
80 public:
81 DeoptimizerData();
82 ~DeoptimizerData();
83
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000084#ifdef ENABLE_DEBUGGER_SUPPORT
85 void Iterate(ObjectVisitor* v);
86#endif
87
Steve Block44f0eee2011-05-26 01:26:41 +010088 private:
Ben Murdoch3ef787d2012-04-12 10:51:47 +010089 MemoryChunk* eager_deoptimization_entry_code_;
90 MemoryChunk* lazy_deoptimization_entry_code_;
Steve Block44f0eee2011-05-26 01:26:41 +010091 Deoptimizer* current_;
92
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000093#ifdef ENABLE_DEBUGGER_SUPPORT
94 DeoptimizedFrameInfo* deoptimized_frame_info_;
95#endif
96
Steve Block44f0eee2011-05-26 01:26:41 +010097 // List of deoptimized code which still have references from active stack
98 // frames. These code objects are needed by the deoptimizer when deoptimizing
99 // a frame for which the code object for the function function has been
100 // changed from the code present when deoptimizing was done.
101 DeoptimizingCodeListNode* deoptimizing_code_list_;
102
103 friend class Deoptimizer;
104
105 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
106};
107
108
Ben Murdochb0fe1622011-05-05 13:52:32 +0100109class Deoptimizer : public Malloced {
110 public:
111 enum BailoutType {
112 EAGER,
113 LAZY,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000114 OSR,
115 // This last bailout type is not really a bailout, but used by the
116 // debugger to deoptimize stack frames to allow inspection.
117 DEBUGGER
Ben Murdochb0fe1622011-05-05 13:52:32 +0100118 };
119
120 int output_count() const { return output_count_; }
121
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100122 // Number of created JS frames. Not all created frames are necessarily JS.
123 int jsframe_count() const { return jsframe_count_; }
124
Ben Murdochb0fe1622011-05-05 13:52:32 +0100125 static Deoptimizer* New(JSFunction* function,
126 BailoutType type,
127 unsigned bailout_id,
128 Address from,
Steve Block44f0eee2011-05-26 01:26:41 +0100129 int fp_to_sp_delta,
130 Isolate* isolate);
131 static Deoptimizer* Grab(Isolate* isolate);
132
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000133#ifdef ENABLE_DEBUGGER_SUPPORT
134 // The returned object with information on the optimized frame needs to be
135 // freed before another one can be generated.
136 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100137 int jsframe_index,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000138 Isolate* isolate);
139 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
140 Isolate* isolate);
141#endif
142
Steve Block44f0eee2011-05-26 01:26:41 +0100143 // Makes sure that there is enough room in the relocation
144 // information of a code object to perform lazy deoptimization
145 // patching. If there is not enough room a new relocation
146 // information object is allocated and comments are added until it
147 // is big enough.
148 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100149
150 // Deoptimize the function now. Its current optimized code will never be run
151 // again and any activations of the optimized code will get deoptimized when
152 // execution returns.
153 static void DeoptimizeFunction(JSFunction* function);
154
155 // Deoptimize all functions in the heap.
156 static void DeoptimizeAll();
157
158 static void DeoptimizeGlobalObject(JSObject* object);
159
160 static void VisitAllOptimizedFunctionsForContext(
161 Context* context, OptimizedFunctionVisitor* visitor);
162
163 static void VisitAllOptimizedFunctionsForGlobalObject(
164 JSObject* object, OptimizedFunctionVisitor* visitor);
165
166 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
167
Steve Block1e0659c2011-05-24 12:43:12 +0100168 // The size in bytes of the code required at a lazy deopt patch site.
169 static int patch_size();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100170
Steve Block1e0659c2011-05-24 12:43:12 +0100171 // Patch all stack guard checks in the unoptimized code to
172 // unconditionally call replacement_code.
173 static void PatchStackCheckCode(Code* unoptimized_code,
174 Code* check_code,
175 Code* replacement_code);
176
177 // Patch stack guard check at instruction before pc_after in
178 // the unoptimized code to unconditionally call replacement_code.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100179 static void PatchStackCheckCodeAt(Code* unoptimized_code,
180 Address pc_after,
Steve Block1e0659c2011-05-24 12:43:12 +0100181 Code* check_code,
182 Code* replacement_code);
183
184 // Change all patched stack guard checks in the unoptimized code
185 // back to a normal stack guard check.
186 static void RevertStackCheckCode(Code* unoptimized_code,
187 Code* check_code,
188 Code* replacement_code);
189
190 // Change all patched stack guard checks in the unoptimized code
191 // back to a normal stack guard check.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100192 static void RevertStackCheckCodeAt(Code* unoptimized_code,
193 Address pc_after,
Steve Block1e0659c2011-05-24 12:43:12 +0100194 Code* check_code,
195 Code* replacement_code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100196
197 ~Deoptimizer();
198
Ben Murdoch8b112d22011-06-08 16:22:53 +0100199 void MaterializeHeapNumbers();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000200#ifdef ENABLE_DEBUGGER_SUPPORT
201 void MaterializeHeapNumbersForDebuggerInspectableFrame(
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100202 Address parameters_top,
203 uint32_t parameters_size,
204 Address expressions_top,
205 uint32_t expressions_size,
206 DeoptimizedFrameInfo* info);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000207#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100208
Ben Murdoch8b112d22011-06-08 16:22:53 +0100209 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100210
211 static Address GetDeoptimizationEntry(int id, BailoutType type);
212 static int GetDeoptimizationId(Address addr, BailoutType type);
Steve Block9fac8402011-05-12 15:51:54 +0100213 static int GetOutputInfo(DeoptimizationOutputData* data,
214 unsigned node_id,
215 SharedFunctionInfo* shared);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100216
Ben Murdochb0fe1622011-05-05 13:52:32 +0100217 // Code generation support.
218 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
219 static int output_count_offset() {
220 return OFFSET_OF(Deoptimizer, output_count_);
221 }
222 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
223
Steve Block44f0eee2011-05-26 01:26:41 +0100224 static int GetDeoptimizedCodeCount(Isolate* isolate);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100225
226 static const int kNotDeoptimizationEntry = -1;
227
228 // Generators for the deoptimization entry code.
229 class EntryGenerator BASE_EMBEDDED {
230 public:
231 EntryGenerator(MacroAssembler* masm, BailoutType type)
232 : masm_(masm), type_(type) { }
233 virtual ~EntryGenerator() { }
234
235 void Generate();
236
237 protected:
238 MacroAssembler* masm() const { return masm_; }
239 BailoutType type() const { return type_; }
240
241 virtual void GeneratePrologue() { }
242
243 private:
244 MacroAssembler* masm_;
245 Deoptimizer::BailoutType type_;
246 };
247
248 class TableEntryGenerator : public EntryGenerator {
249 public:
250 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
251 : EntryGenerator(masm, type), count_(count) { }
252
253 protected:
254 virtual void GeneratePrologue();
255
256 private:
257 int count() const { return count_; }
258
259 int count_;
260 };
261
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100262 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
263
Ben Murdochb0fe1622011-05-05 13:52:32 +0100264 private:
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265 static const int kNumberOfEntries = 16384;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100266
Steve Block44f0eee2011-05-26 01:26:41 +0100267 Deoptimizer(Isolate* isolate,
268 JSFunction* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100269 BailoutType type,
270 unsigned bailout_id,
271 Address from,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000272 int fp_to_sp_delta,
273 Code* optimized_code);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100274 void DeleteFrameDescriptions();
275
276 void DoComputeOutputFrames();
277 void DoComputeOsrOutputFrame();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100278 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
279 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
280 int frame_index);
281 void DoComputeConstructStubFrame(TranslationIterator* iterator,
282 int frame_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100283 void DoTranslateCommand(TranslationIterator* iterator,
284 int frame_index,
285 unsigned output_offset);
286 // Translate a command for OSR. Updates the input offset to be used for
287 // the next command. Returns false if translation of the command failed
288 // (e.g., a number conversion failed) and may or may not have updated the
289 // input offset.
290 bool DoOsrTranslateCommand(TranslationIterator* iterator,
291 int* input_offset);
292
293 unsigned ComputeInputFrameSize() const;
294 unsigned ComputeFixedSize(JSFunction* function) const;
295
296 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
297 unsigned ComputeOutgoingArgumentSize() const;
298
299 Object* ComputeLiteral(int index) const;
300
Ben Murdoch8b112d22011-06-08 16:22:53 +0100301 void AddDoubleValue(intptr_t slot_address, double value);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100302
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 static MemoryChunk* CreateCode(BailoutType type);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100304 static void GenerateDeoptimizationEntries(
305 MacroAssembler* masm, int count, BailoutType type);
306
307 // Weak handle callback for deoptimizing code objects.
308 static void HandleWeakDeoptimizedCode(
309 v8::Persistent<v8::Value> obj, void* data);
310 static Code* FindDeoptimizingCodeFromAddress(Address addr);
311 static void RemoveDeoptimizingCode(Code* code);
312
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000313 // Fill the input from from a JavaScript frame. This is used when
314 // the debugger needs to inspect an optimized frame. For normal
315 // deoptimizations the input frame is filled in generated code.
316 void FillInputFrame(Address tos, JavaScriptFrame* frame);
317
Steve Block44f0eee2011-05-26 01:26:41 +0100318 Isolate* isolate_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100319 JSFunction* function_;
320 Code* optimized_code_;
321 unsigned bailout_id_;
322 BailoutType bailout_type_;
323 Address from_;
324 int fp_to_sp_delta_;
325
326 // Input frame description.
327 FrameDescription* input_;
328 // Number of output frames.
329 int output_count_;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100330 // Number of output js frames.
331 int jsframe_count_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100332 // Array of output frame descriptions.
333 FrameDescription** output_;
334
Ben Murdoch8b112d22011-06-08 16:22:53 +0100335 List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100336
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000337 static const int table_entry_size_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100338
339 friend class FrameDescription;
340 friend class DeoptimizingCodeListNode;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000341 friend class DeoptimizedFrameInfo;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100342};
343
344
345class FrameDescription {
346 public:
347 FrameDescription(uint32_t frame_size,
348 JSFunction* function);
349
350 void* operator new(size_t size, uint32_t frame_size) {
Steve Block44f0eee2011-05-26 01:26:41 +0100351 // Subtracts kPointerSize, as the member frame_content_ already supplies
352 // the first element of the area to store the frame.
353 return malloc(size + frame_size - kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100354 }
355
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000356 void operator delete(void* pointer, uint32_t frame_size) {
357 free(pointer);
358 }
359
Ben Murdochb0fe1622011-05-05 13:52:32 +0100360 void operator delete(void* description) {
361 free(description);
362 }
363
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000364 uint32_t GetFrameSize() const {
365 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
366 return static_cast<uint32_t>(frame_size_);
367 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100368
369 JSFunction* GetFunction() const { return function_; }
370
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100371 unsigned GetOffsetFromSlotIndex(int slot_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100372
373 intptr_t GetFrameSlot(unsigned offset) {
374 return *GetFrameSlotPointer(offset);
375 }
376
377 double GetDoubleFrameSlot(unsigned offset) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100378 intptr_t* ptr = GetFrameSlotPointer(offset);
379#if V8_TARGET_ARCH_MIPS
380 // Prevent gcc from using load-double (mips ldc1) on (possibly)
381 // non-64-bit aligned double. Uses two lwc1 instructions.
382 union conversion {
383 double d;
384 uint32_t u[2];
385 } c;
386 c.u[0] = *reinterpret_cast<uint32_t*>(ptr);
387 c.u[1] = *(reinterpret_cast<uint32_t*>(ptr) + 1);
388 return c.d;
389#else
390 return *reinterpret_cast<double*>(ptr);
391#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100392 }
393
394 void SetFrameSlot(unsigned offset, intptr_t value) {
395 *GetFrameSlotPointer(offset) = value;
396 }
397
398 intptr_t GetRegister(unsigned n) const {
399 ASSERT(n < ARRAY_SIZE(registers_));
400 return registers_[n];
401 }
402
403 double GetDoubleRegister(unsigned n) const {
404 ASSERT(n < ARRAY_SIZE(double_registers_));
405 return double_registers_[n];
406 }
407
408 void SetRegister(unsigned n, intptr_t value) {
409 ASSERT(n < ARRAY_SIZE(registers_));
410 registers_[n] = value;
411 }
412
413 void SetDoubleRegister(unsigned n, double value) {
414 ASSERT(n < ARRAY_SIZE(double_registers_));
415 double_registers_[n] = value;
416 }
417
418 intptr_t GetTop() const { return top_; }
419 void SetTop(intptr_t top) { top_ = top; }
420
421 intptr_t GetPc() const { return pc_; }
422 void SetPc(intptr_t pc) { pc_ = pc; }
423
424 intptr_t GetFp() const { return fp_; }
425 void SetFp(intptr_t fp) { fp_ = fp; }
426
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100427 intptr_t GetContext() const { return context_; }
428 void SetContext(intptr_t context) { context_ = context; }
429
Ben Murdochb0fe1622011-05-05 13:52:32 +0100430 Smi* GetState() const { return state_; }
431 void SetState(Smi* state) { state_ = state; }
432
433 void SetContinuation(intptr_t pc) { continuation_ = pc; }
434
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 StackFrame::Type GetFrameType() const { return type_; }
436 void SetFrameType(StackFrame::Type type) { type_ = type; }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000437
438 // Get the incoming arguments count.
439 int ComputeParametersCount();
440
441 // Get a parameter value for an unoptimized frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100442 Object* GetParameter(int index);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000443
444 // Get the expression stack height for a unoptimized frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100445 unsigned GetExpressionCount();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000446
447 // Get the expression stack value for an unoptimized frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100448 Object* GetExpression(int index);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000449
Ben Murdochb0fe1622011-05-05 13:52:32 +0100450 static int registers_offset() {
451 return OFFSET_OF(FrameDescription, registers_);
452 }
453
454 static int double_registers_offset() {
455 return OFFSET_OF(FrameDescription, double_registers_);
456 }
457
458 static int frame_size_offset() {
459 return OFFSET_OF(FrameDescription, frame_size_);
460 }
461
462 static int pc_offset() {
463 return OFFSET_OF(FrameDescription, pc_);
464 }
465
466 static int state_offset() {
467 return OFFSET_OF(FrameDescription, state_);
468 }
469
470 static int continuation_offset() {
471 return OFFSET_OF(FrameDescription, continuation_);
472 }
473
474 static int frame_content_offset() {
Steve Block44f0eee2011-05-26 01:26:41 +0100475 return OFFSET_OF(FrameDescription, frame_content_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100476 }
477
478 private:
479 static const uint32_t kZapUint32 = 0xbeeddead;
480
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000481 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
482 // keep the variable-size array frame_content_ of type intptr_t at
483 // the end of the structure aligned.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100484 uintptr_t frame_size_; // Number of bytes.
485 JSFunction* function_;
486 intptr_t registers_[Register::kNumRegisters];
487 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
488 intptr_t top_;
489 intptr_t pc_;
490 intptr_t fp_;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100491 intptr_t context_;
492 StackFrame::Type type_;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100493 Smi* state_;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000494#ifdef DEBUG
495 Code::Kind kind_;
496#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100497
498 // Continuation is the PC where the execution continues after
499 // deoptimizing.
500 intptr_t continuation_;
501
Steve Block44f0eee2011-05-26 01:26:41 +0100502 // This must be at the end of the object as the object is allocated larger
503 // than it's definition indicate to extend this array.
504 intptr_t frame_content_[1];
505
Ben Murdochb0fe1622011-05-05 13:52:32 +0100506 intptr_t* GetFrameSlotPointer(unsigned offset) {
507 ASSERT(offset < frame_size_);
508 return reinterpret_cast<intptr_t*>(
509 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
510 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100511
512 int ComputeFixedSize();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100513};
514
515
516class TranslationBuffer BASE_EMBEDDED {
517 public:
518 TranslationBuffer() : contents_(256) { }
519
520 int CurrentIndex() const { return contents_.length(); }
521 void Add(int32_t value);
522
523 Handle<ByteArray> CreateByteArray();
524
525 private:
526 ZoneList<uint8_t> contents_;
527};
528
529
530class TranslationIterator BASE_EMBEDDED {
531 public:
532 TranslationIterator(ByteArray* buffer, int index)
533 : buffer_(buffer), index_(index) {
534 ASSERT(index >= 0 && index < buffer->length());
535 }
536
537 int32_t Next();
538
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000539 bool HasNext() const { return index_ < buffer_->length(); }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100540
541 void Skip(int n) {
542 for (int i = 0; i < n; i++) Next();
543 }
544
545 private:
546 ByteArray* buffer_;
547 int index_;
548};
549
550
551class Translation BASE_EMBEDDED {
552 public:
553 enum Opcode {
554 BEGIN,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100555 JS_FRAME,
556 CONSTRUCT_STUB_FRAME,
557 ARGUMENTS_ADAPTOR_FRAME,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100558 REGISTER,
559 INT32_REGISTER,
560 DOUBLE_REGISTER,
561 STACK_SLOT,
562 INT32_STACK_SLOT,
563 DOUBLE_STACK_SLOT,
564 LITERAL,
565 ARGUMENTS_OBJECT,
566
567 // A prefix indicating that the next command is a duplicate of the one
568 // that follows it.
569 DUPLICATE
570 };
571
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100572 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count)
Ben Murdochb0fe1622011-05-05 13:52:32 +0100573 : buffer_(buffer),
574 index_(buffer->CurrentIndex()) {
575 buffer_->Add(BEGIN);
576 buffer_->Add(frame_count);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100577 buffer_->Add(jsframe_count);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100578 }
579
580 int index() const { return index_; }
581
582 // Commands.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100583 void BeginJSFrame(int node_id, int literal_id, unsigned height);
584 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
585 void BeginConstructStubFrame(int literal_id, unsigned height);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100586 void StoreRegister(Register reg);
587 void StoreInt32Register(Register reg);
588 void StoreDoubleRegister(DoubleRegister reg);
589 void StoreStackSlot(int index);
590 void StoreInt32StackSlot(int index);
591 void StoreDoubleStackSlot(int index);
592 void StoreLiteral(int literal_id);
593 void StoreArgumentsObject();
594 void MarkDuplicate();
595
596 static int NumberOfOperandsFor(Opcode opcode);
597
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000598#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
Ben Murdochb0fe1622011-05-05 13:52:32 +0100599 static const char* StringFor(Opcode opcode);
600#endif
601
602 private:
603 TranslationBuffer* buffer_;
604 int index_;
605};
606
607
608// Linked list holding deoptimizing code objects. The deoptimizing code objects
609// are kept as weak handles until they are no longer activated on the stack.
610class DeoptimizingCodeListNode : public Malloced {
611 public:
612 explicit DeoptimizingCodeListNode(Code* code);
613 ~DeoptimizingCodeListNode();
614
615 DeoptimizingCodeListNode* next() const { return next_; }
616 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
617 Handle<Code> code() const { return code_; }
618
619 private:
620 // Global (weak) handle to the deoptimizing code object.
621 Handle<Code> code_;
622
623 // Next pointer for linked list.
624 DeoptimizingCodeListNode* next_;
625};
626
627
Ben Murdoch8b112d22011-06-08 16:22:53 +0100628class SlotRef BASE_EMBEDDED {
629 public:
630 enum SlotRepresentation {
631 UNKNOWN,
632 TAGGED,
633 INT32,
634 DOUBLE,
635 LITERAL
636 };
637
638 SlotRef()
639 : addr_(NULL), representation_(UNKNOWN) { }
640
641 SlotRef(Address addr, SlotRepresentation representation)
642 : addr_(addr), representation_(representation) { }
643
644 explicit SlotRef(Object* literal)
645 : literal_(literal), representation_(LITERAL) { }
646
647 Handle<Object> GetValue() {
648 switch (representation_) {
649 case TAGGED:
650 return Handle<Object>(Memory::Object_at(addr_));
651
652 case INT32: {
653 int value = Memory::int32_at(addr_);
654 if (Smi::IsValid(value)) {
655 return Handle<Object>(Smi::FromInt(value));
656 } else {
657 return Isolate::Current()->factory()->NewNumberFromInt(value);
658 }
659 }
660
661 case DOUBLE: {
662 double value = Memory::double_at(addr_);
663 return Isolate::Current()->factory()->NewNumber(value);
664 }
665
666 case LITERAL:
667 return literal_;
668
669 default:
670 UNREACHABLE();
671 return Handle<Object>::null();
672 }
673 }
674
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100675 static Vector<SlotRef> ComputeSlotMappingForArguments(
676 JavaScriptFrame* frame,
677 int inlined_frame_index,
678 int formal_parameter_count);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100679
680 private:
681 Address addr_;
682 Handle<Object> literal_;
683 SlotRepresentation representation_;
684
685 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
686 if (slot_index >= 0) {
687 const int offset = JavaScriptFrameConstants::kLocal0Offset;
688 return frame->fp() + offset - (slot_index * kPointerSize);
689 } else {
690 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
691 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
692 }
693 }
694
695 static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
696 DeoptimizationInputData* data,
697 JavaScriptFrame* frame);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100698
699 static void ComputeSlotsForArguments(
700 Vector<SlotRef>* args_slots,
701 TranslationIterator* iterator,
702 DeoptimizationInputData* data,
703 JavaScriptFrame* frame);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100704};
705
706
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000707#ifdef ENABLE_DEBUGGER_SUPPORT
708// Class used to represent an unoptimized frame when the debugger
709// needs to inspect a frame that is part of an optimized frame. The
710// internally used FrameDescription objects are not GC safe so for use
711// by the debugger frame information is copied to an object of this type.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100712// Represents parameters in unadapted form so their number might mismatch
713// formal parameter count.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000714class DeoptimizedFrameInfo : public Malloced {
715 public:
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100716 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
717 int frame_index,
718 bool has_arguments_adaptor,
719 bool has_construct_stub);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000720 virtual ~DeoptimizedFrameInfo();
721
722 // GC support.
723 void Iterate(ObjectVisitor* v);
724
725 // Return the number of incoming arguments.
726 int parameters_count() { return parameters_count_; }
727
728 // Return the height of the expression stack.
729 int expression_count() { return expression_count_; }
730
731 // Get the frame function.
732 JSFunction* GetFunction() {
733 return function_;
734 }
735
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100736 // Check if this frame is preceded by construct stub frame. The bottom-most
737 // inlined frame might still be called by an uninlined construct stub.
738 bool HasConstructStub() {
739 return has_construct_stub_;
740 }
741
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000742 // Get an incoming argument.
743 Object* GetParameter(int index) {
744 ASSERT(0 <= index && index < parameters_count());
745 return parameters_[index];
746 }
747
748 // Get an expression from the expression stack.
749 Object* GetExpression(int index) {
750 ASSERT(0 <= index && index < expression_count());
751 return expression_stack_[index];
752 }
753
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100754 int GetSourcePosition() {
755 return source_position_;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000756 }
757
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100758 private:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000759 // Set an incoming argument.
760 void SetParameter(int index, Object* obj) {
761 ASSERT(0 <= index && index < parameters_count());
762 parameters_[index] = obj;
763 }
764
765 // Set an expression on the expression stack.
766 void SetExpression(int index, Object* obj) {
767 ASSERT(0 <= index && index < expression_count());
768 expression_stack_[index] = obj;
769 }
770
771 JSFunction* function_;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100772 bool has_construct_stub_;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000773 int parameters_count_;
774 int expression_count_;
775 Object** parameters_;
776 Object** expression_stack_;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100777 int source_position_;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000778
779 friend class Deoptimizer;
780};
781#endif
782
Ben Murdochb0fe1622011-05-05 13:52:32 +0100783} } // namespace v8::internal
784
785#endif // V8_DEOPTIMIZER_H_