blob: 4b3e0bc63a8d7279d37bcf42946215b5a501578d [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/lithium.h"
6
7#include "src/ast/scopes.h"
8
9#if V8_TARGET_ARCH_IA32
10#include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
11#include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
12#elif V8_TARGET_ARCH_X64
13#include "src/crankshaft/x64/lithium-x64.h" // NOLINT
14#include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
15#elif V8_TARGET_ARCH_ARM
16#include "src/crankshaft/arm/lithium-arm.h" // NOLINT
17#include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
18#elif V8_TARGET_ARCH_PPC
19#include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
20#include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
21#elif V8_TARGET_ARCH_MIPS
22#include "src/crankshaft/mips/lithium-mips.h" // NOLINT
23#include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
24#elif V8_TARGET_ARCH_ARM64
25#include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
26#include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
27#elif V8_TARGET_ARCH_MIPS64
28#include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
29#include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
30#elif V8_TARGET_ARCH_X87
31#include "src/crankshaft/x87/lithium-x87.h" // NOLINT
32#include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +010033#elif V8_TARGET_ARCH_S390
34#include "src/crankshaft/s390/lithium-s390.h" // NOLINT
35#include "src/crankshaft/s390/lithium-codegen-s390.h" // NOLINT
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036#else
37#error "Unknown architecture."
38#endif
39
40namespace v8 {
41namespace internal {
42
Ben Murdoch61f157c2016-09-16 13:49:30 +010043const auto GetRegConfig = RegisterConfiguration::Crankshaft;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044
45void LOperand::PrintTo(StringStream* stream) {
46 LUnallocated* unalloc = NULL;
47 switch (kind()) {
48 case INVALID:
49 stream->Add("(0)");
50 break;
51 case UNALLOCATED:
52 unalloc = LUnallocated::cast(this);
53 stream->Add("v%d", unalloc->virtual_register());
54 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
55 stream->Add("(=%dS)", unalloc->fixed_slot_index());
56 break;
57 }
58 switch (unalloc->extended_policy()) {
59 case LUnallocated::NONE:
60 break;
61 case LUnallocated::FIXED_REGISTER: {
62 int reg_index = unalloc->fixed_register_index();
63 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
64 stream->Add("(=invalid_reg#%d)", reg_index);
65 } else {
66 const char* register_name =
Ben Murdoch61f157c2016-09-16 13:49:30 +010067 GetRegConfig()->GetGeneralRegisterName(reg_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 stream->Add("(=%s)", register_name);
69 }
70 break;
71 }
72 case LUnallocated::FIXED_DOUBLE_REGISTER: {
73 int reg_index = unalloc->fixed_register_index();
74 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
75 stream->Add("(=invalid_double_reg#%d)", reg_index);
76 } else {
77 const char* double_register_name =
Ben Murdoch61f157c2016-09-16 13:49:30 +010078 GetRegConfig()->GetDoubleRegisterName(reg_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000079 stream->Add("(=%s)", double_register_name);
80 }
81 break;
82 }
83 case LUnallocated::MUST_HAVE_REGISTER:
84 stream->Add("(R)");
85 break;
86 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
87 stream->Add("(D)");
88 break;
89 case LUnallocated::WRITABLE_REGISTER:
90 stream->Add("(WR)");
91 break;
92 case LUnallocated::SAME_AS_FIRST_INPUT:
93 stream->Add("(1)");
94 break;
95 case LUnallocated::ANY:
96 stream->Add("(-)");
97 break;
98 }
99 break;
100 case CONSTANT_OPERAND:
101 stream->Add("[constant:%d]", index());
102 break;
103 case STACK_SLOT:
104 stream->Add("[stack:%d]", index());
105 break;
106 case DOUBLE_STACK_SLOT:
107 stream->Add("[double_stack:%d]", index());
108 break;
109 case REGISTER: {
110 int reg_index = index();
111 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
112 stream->Add("(=invalid_reg#%d|R)", reg_index);
113 } else {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100114 stream->Add("[%s|R]",
115 GetRegConfig()->GetGeneralRegisterName(reg_index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000116 }
117 break;
118 }
119 case DOUBLE_REGISTER: {
120 int reg_index = index();
121 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
122 stream->Add("(=invalid_double_reg#%d|R)", reg_index);
123 } else {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100124 stream->Add("[%s|R]", GetRegConfig()->GetDoubleRegisterName(reg_index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000125 }
126 break;
127 }
128 }
129}
130
131
132template<LOperand::Kind kOperandKind, int kNumCachedOperands>
133LSubKindOperand<kOperandKind, kNumCachedOperands>*
134LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
135
136
137template<LOperand::Kind kOperandKind, int kNumCachedOperands>
138void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
139 if (cache) return;
140 cache = new LSubKindOperand[kNumCachedOperands];
141 for (int i = 0; i < kNumCachedOperands; i++) {
142 cache[i].ConvertTo(kOperandKind, i);
143 }
144}
145
146
147template<LOperand::Kind kOperandKind, int kNumCachedOperands>
148void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
149 delete[] cache;
150 cache = NULL;
151}
152
153
154void LOperand::SetUpCaches() {
155#define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
156 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
157#undef LITHIUM_OPERAND_SETUP
158}
159
160
161void LOperand::TearDownCaches() {
162#define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
163 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
164#undef LITHIUM_OPERAND_TEARDOWN
165}
166
167
168bool LParallelMove::IsRedundant() const {
169 for (int i = 0; i < move_operands_.length(); ++i) {
170 if (!move_operands_[i].IsRedundant()) return false;
171 }
172 return true;
173}
174
175
176void LParallelMove::PrintDataTo(StringStream* stream) const {
177 bool first = true;
178 for (int i = 0; i < move_operands_.length(); ++i) {
179 if (!move_operands_[i].IsEliminated()) {
180 LOperand* source = move_operands_[i].source();
181 LOperand* destination = move_operands_[i].destination();
182 if (!first) stream->Add(" ");
183 first = false;
184 if (source->Equals(destination)) {
185 destination->PrintTo(stream);
186 } else {
187 destination->PrintTo(stream);
188 stream->Add(" = ");
189 source->PrintTo(stream);
190 }
191 stream->Add(";");
192 }
193 }
194}
195
196
197void LEnvironment::PrintTo(StringStream* stream) {
198 stream->Add("[id=%d|", ast_id().ToInt());
199 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
200 stream->Add("deopt_id=%d|", deoptimization_index());
201 }
202 stream->Add("parameters=%d|", parameter_count());
203 stream->Add("arguments_stack_height=%d|", arguments_stack_height());
204 for (int i = 0; i < values_.length(); ++i) {
205 if (i != 0) stream->Add(";");
206 if (values_[i] == NULL) {
207 stream->Add("[hole]");
208 } else {
209 values_[i]->PrintTo(stream);
210 }
211 }
212 stream->Add("]");
213}
214
215
216void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
217 // Do not record arguments as pointers.
218 if (op->IsStackSlot() && op->index() < 0) return;
219 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
220 pointer_operands_.Add(op, zone);
221}
222
223
224void LPointerMap::RemovePointer(LOperand* op) {
225 // Do not record arguments as pointers.
226 if (op->IsStackSlot() && op->index() < 0) return;
227 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
228 for (int i = 0; i < pointer_operands_.length(); ++i) {
229 if (pointer_operands_[i]->Equals(op)) {
230 pointer_operands_.Remove(i);
231 --i;
232 }
233 }
234}
235
236
237void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
238 // Do not record arguments as pointers.
239 if (op->IsStackSlot() && op->index() < 0) return;
240 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
241 untagged_operands_.Add(op, zone);
242}
243
244
245void LPointerMap::PrintTo(StringStream* stream) {
246 stream->Add("{");
247 for (int i = 0; i < pointer_operands_.length(); ++i) {
248 if (i != 0) stream->Add(";");
249 pointer_operands_[i]->PrintTo(stream);
250 }
251 stream->Add("}");
252}
253
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000254LChunk::LChunk(CompilationInfo* info, HGraph* graph)
Ben Murdochda12d292016-06-02 14:46:10 +0100255 : base_frame_slots_(info->IsStub()
256 ? TypedFrameConstants::kFixedSlotCount
257 : StandardFrameConstants::kFixedSlotCount),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100258 current_frame_slots_(base_frame_slots_),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 info_(info),
260 graph_(graph),
261 instructions_(32, info->zone()),
262 pointer_maps_(8, info->zone()),
263 inlined_functions_(1, info->zone()),
264 deprecation_dependencies_(32, info->zone()),
265 stability_dependencies_(8, info->zone()) {}
266
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267LLabel* LChunk::GetLabel(int block_id) const {
268 HBasicBlock* block = graph_->blocks()->at(block_id);
269 int first_instruction = block->first_instruction_index();
270 return LLabel::cast(instructions_[first_instruction]);
271}
272
273
274int LChunk::LookupDestination(int block_id) const {
275 LLabel* cur = GetLabel(block_id);
276 while (cur->replacement() != NULL) {
277 cur = cur->replacement();
278 }
279 return cur->block_id();
280}
281
282Label* LChunk::GetAssemblyLabel(int block_id) const {
283 LLabel* label = GetLabel(block_id);
284 DCHECK(!label->HasReplacement());
285 return label->label();
286}
287
288
289void LChunk::MarkEmptyBlocks() {
290 LPhase phase("L_Mark empty blocks", this);
291 for (int i = 0; i < graph()->blocks()->length(); ++i) {
292 HBasicBlock* block = graph()->blocks()->at(i);
293 int first = block->first_instruction_index();
294 int last = block->last_instruction_index();
295 LInstruction* first_instr = instructions()->at(first);
296 LInstruction* last_instr = instructions()->at(last);
297
298 LLabel* label = LLabel::cast(first_instr);
299 if (last_instr->IsGoto()) {
300 LGoto* goto_instr = LGoto::cast(last_instr);
301 if (label->IsRedundant() &&
302 !label->is_loop_header()) {
303 bool can_eliminate = true;
304 for (int i = first + 1; i < last && can_eliminate; ++i) {
305 LInstruction* cur = instructions()->at(i);
306 if (cur->IsGap()) {
307 LGap* gap = LGap::cast(cur);
308 if (!gap->IsRedundant()) {
309 can_eliminate = false;
310 }
311 } else {
312 can_eliminate = false;
313 }
314 }
315 if (can_eliminate) {
316 label->set_replacement(GetLabel(goto_instr->block_id()));
317 }
318 }
319 }
320 }
321}
322
323
324void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
325 LInstructionGap* gap = new (zone()) LInstructionGap(block);
326 gap->set_hydrogen_value(instr->hydrogen_value());
327 int index = -1;
328 if (instr->IsControl()) {
329 instructions_.Add(gap, zone());
330 index = instructions_.length();
331 instructions_.Add(instr, zone());
332 } else {
333 index = instructions_.length();
334 instructions_.Add(instr, zone());
335 instructions_.Add(gap, zone());
336 }
337 if (instr->HasPointerMap()) {
338 pointer_maps_.Add(instr->pointer_map(), zone());
339 instr->pointer_map()->set_lithium_position(index);
340 }
341}
342
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
344 return LConstantOperand::Create(constant->id(), zone());
345}
346
347
348int LChunk::GetParameterStackSlot(int index) const {
349 // The receiver is at index 0, the first parameter at index 1, so we
350 // shift all parameter indexes down by the number of parameters, and
351 // make sure they end up negative so they are distinguishable from
352 // spill slots.
353 int result = index - info()->num_parameters() - 1;
354
355 DCHECK(result < 0);
356 return result;
357}
358
359
360// A parameter relative to ebp in the arguments stub.
361int LChunk::ParameterAt(int index) {
362 DCHECK(-1 <= index); // -1 is the receiver.
363 return (1 + info()->scope()->num_parameters() - index) *
364 kPointerSize;
365}
366
367
368LGap* LChunk::GetGapAt(int index) const {
369 return LGap::cast(instructions_[index]);
370}
371
372
373bool LChunk::IsGapAt(int index) const {
374 return instructions_[index]->IsGap();
375}
376
377
378int LChunk::NearestGapPos(int index) const {
379 while (!IsGapAt(index)) index--;
380 return index;
381}
382
383
384void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
385 GetGapAt(index)->GetOrCreateParallelMove(
386 LGap::START, zone())->AddMove(from, to, zone());
387}
388
389
390HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
391 return HConstant::cast(graph_->LookupValue(operand->index()));
392}
393
394
395Representation LChunk::LookupLiteralRepresentation(
396 LConstantOperand* operand) const {
397 return graph_->LookupValue(operand->index())->representation();
398}
399
400
401void LChunk::CommitDependencies(Handle<Code> code) const {
402 if (!code->is_optimized_code()) return;
403 HandleScope scope(isolate());
404
405 for (Handle<Map> map : deprecation_dependencies_) {
406 DCHECK(!map->is_deprecated());
407 DCHECK(map->CanBeDeprecated());
408 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
409 }
410
411 for (Handle<Map> map : stability_dependencies_) {
412 DCHECK(map->is_stable());
413 DCHECK(map->CanTransition());
414 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
415 }
416
417 info_->dependencies()->Commit(code);
418}
419
420
421LChunk* LChunk::NewChunk(HGraph* graph) {
422 DisallowHandleAllocation no_handles;
423 DisallowHeapAllocation no_gc;
424 graph->DisallowAddingNewValues();
425 int values = graph->GetMaximumValueID();
426 CompilationInfo* info = graph->info();
427 if (values > LUnallocated::kMaxVirtualRegisters) {
428 info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
429 return NULL;
430 }
431 LAllocator allocator(values, graph);
432 LChunkBuilder builder(info, graph, &allocator);
433 LChunk* chunk = builder.Build();
434 if (chunk == NULL) return NULL;
435
436 if (!allocator.Allocate(chunk)) {
437 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
438 return NULL;
439 }
440
441 chunk->set_allocated_double_registers(
442 allocator.assigned_double_registers());
443
444 return chunk;
445}
446
447
448Handle<Code> LChunk::Codegen() {
449 MacroAssembler assembler(info()->isolate(), NULL, 0,
450 CodeObjectRequired::kYes);
451 LOG_CODE_EVENT(info()->isolate(),
452 CodeStartLinePosInfoRecordEvent(
453 assembler.positions_recorder()));
454 // Code serializer only takes unoptimized code.
455 DCHECK(!info()->will_serialize());
456 LCodeGen generator(this, &assembler, info());
457
458 MarkEmptyBlocks();
459
460 if (generator.GenerateCode()) {
461 generator.CheckEnvironmentUsage();
462 CodeGenerator::MakeCodePrologue(info(), "optimized");
463 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&assembler, info());
464 generator.FinishCode(code);
465 CommitDependencies(code);
466 code->set_is_crankshafted(true);
467 void* jit_handler_data =
468 assembler.positions_recorder()->DetachJITHandlerData();
469 LOG_CODE_EVENT(info()->isolate(),
Ben Murdochda12d292016-06-02 14:46:10 +0100470 CodeEndLinePosInfoRecordEvent(AbstractCode::cast(*code),
471 jit_handler_data));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000472
473 CodeGenerator::PrintCode(code, info());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100474 DCHECK(!(info()->GetMustNotHaveEagerFrame() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000475 generator.NeedsEagerFrame()));
476 return code;
477 }
478 assembler.AbortedCodeGeneration();
479 return Handle<Code>::null();
480}
481
482
483void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
484 allocated_double_registers_ = allocated_registers;
485 BitVector* doubles = allocated_double_registers();
486 BitVector::Iterator iterator(doubles);
487 while (!iterator.Done()) {
488 if (info()->saves_caller_doubles()) {
489 if (kDoubleSize == kPointerSize * 2) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100490 current_frame_slots_ += 2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000491 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100492 current_frame_slots_++;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 }
494 }
495 iterator.Advance();
496 }
497}
498
499
500void LChunkBuilderBase::Abort(BailoutReason reason) {
501 info()->AbortOptimization(reason);
502 status_ = ABORTED;
503}
504
505
506void LChunkBuilderBase::Retry(BailoutReason reason) {
507 info()->RetryOptimization(reason);
508 status_ = ABORTED;
509}
510
Ben Murdochda12d292016-06-02 14:46:10 +0100511void LChunkBuilderBase::CreateLazyBailoutForCall(HBasicBlock* current_block,
512 LInstruction* instr,
513 HInstruction* hydrogen_val) {
514 if (!instr->IsCall()) return;
515
516 HEnvironment* hydrogen_env = current_block->last_environment();
517 HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
518 DCHECK_NOT_NULL(hydrogen_env);
519 if (instr->IsSyntacticTailCall()) {
520 // If it was a syntactic tail call we need to drop the current frame and
521 // all the frames on top of it that are either an arguments adaptor frame
522 // or a tail caller frame.
523 hydrogen_env = hydrogen_env->outer();
524 while (hydrogen_env != nullptr &&
525 (hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR ||
526 hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION)) {
527 hydrogen_env = hydrogen_env->outer();
528 }
529 if (hydrogen_env != nullptr) {
530 if (hydrogen_env->frame_type() == JS_FUNCTION) {
531 // In case an outer frame is a function frame we have to replay
532 // environment manually because
533 // 1) it does not contain a result of inlined function yet,
534 // 2) we can't find the proper simulate that corresponds to the point
535 // after inlined call to do a ReplayEnvironment() on.
536 // So we push return value on top of outer environment.
537 // As for JS_GETTER/JS_SETTER/JS_CONSTRUCT nothing has to be done here,
538 // the deoptimizer ensures that the result of the callee is correctly
539 // propagated to result register during deoptimization.
540 hydrogen_env = hydrogen_env->Copy();
541 hydrogen_env->Push(hydrogen_val);
542 }
543 } else {
544 // Although we don't need this lazy bailout for normal execution
545 // (because when we tail call from the outermost function we should pop
546 // its frame) we still need it when debugger is on.
547 hydrogen_env = current_block->last_environment();
548 }
549 } else {
550 if (hydrogen_val->HasObservableSideEffects()) {
551 HSimulate* sim = HSimulate::cast(hydrogen_val->next());
552 sim->ReplayEnvironment(hydrogen_env);
553 hydrogen_value_for_lazy_bailout = sim;
554 }
555 }
556 LInstruction* bailout = LChunkBuilderBase::AssignEnvironment(
557 new (zone()) LLazyBailout(), hydrogen_env);
558 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
559 chunk_->AddInstruction(bailout, current_block);
560}
561
562LInstruction* LChunkBuilderBase::AssignEnvironment(LInstruction* instr,
563 HEnvironment* hydrogen_env) {
564 int argument_index_accumulator = 0;
565 ZoneList<HValue*> objects_to_materialize(0, zone());
566 DCHECK_NE(TAIL_CALLER_FUNCTION, hydrogen_env->frame_type());
567 instr->set_environment(CreateEnvironment(
568 hydrogen_env, &argument_index_accumulator, &objects_to_materialize));
569 return instr;
570}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000571
572LEnvironment* LChunkBuilderBase::CreateEnvironment(
573 HEnvironment* hydrogen_env, int* argument_index_accumulator,
574 ZoneList<HValue*>* objects_to_materialize) {
575 if (hydrogen_env == NULL) return NULL;
576
Ben Murdochda12d292016-06-02 14:46:10 +0100577 BailoutId ast_id = hydrogen_env->ast_id();
578 DCHECK(!ast_id.IsNone() ||
579 (hydrogen_env->frame_type() != JS_FUNCTION &&
580 hydrogen_env->frame_type() != TAIL_CALLER_FUNCTION));
581
582 if (hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION) {
583 // Skip potential outer arguments adaptor frame.
584 HEnvironment* outer_hydrogen_env = hydrogen_env->outer();
585 if (outer_hydrogen_env != nullptr &&
586 outer_hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR) {
587 outer_hydrogen_env = outer_hydrogen_env->outer();
588 }
589 LEnvironment* outer = CreateEnvironment(
590 outer_hydrogen_env, argument_index_accumulator, objects_to_materialize);
591 return new (zone())
592 LEnvironment(hydrogen_env->closure(), hydrogen_env->frame_type(),
593 ast_id, 0, 0, 0, outer, hydrogen_env->entry(), zone());
594 }
595
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000596 LEnvironment* outer =
597 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
598 objects_to_materialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000599
600 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
601 ? 0
602 : hydrogen_env->specials_count();
603
604 int value_count = hydrogen_env->length() - omitted_count;
605 LEnvironment* result =
606 new(zone()) LEnvironment(hydrogen_env->closure(),
607 hydrogen_env->frame_type(),
608 ast_id,
609 hydrogen_env->parameter_count(),
610 argument_count_,
611 value_count,
612 outer,
613 hydrogen_env->entry(),
614 zone());
615 int argument_index = *argument_index_accumulator;
616
617 // Store the environment description into the environment
618 // (with holes for nested objects)
619 for (int i = 0; i < hydrogen_env->length(); ++i) {
620 if (hydrogen_env->is_special_index(i) &&
621 hydrogen_env->frame_type() != JS_FUNCTION) {
622 continue;
623 }
624 LOperand* op;
625 HValue* value = hydrogen_env->values()->at(i);
626 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
627 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
628 op = LEnvironment::materialization_marker();
629 } else {
630 op = UseAny(value);
631 }
632 result->AddValue(op,
633 value->representation(),
634 value->CheckFlag(HInstruction::kUint32));
635 }
636
637 // Recursively store the nested objects into the environment
638 for (int i = 0; i < hydrogen_env->length(); ++i) {
639 if (hydrogen_env->is_special_index(i)) continue;
640
641 HValue* value = hydrogen_env->values()->at(i);
642 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
643 AddObjectToMaterialize(value, objects_to_materialize, result);
644 }
645 }
646
647 if (hydrogen_env->frame_type() == JS_FUNCTION) {
648 *argument_index_accumulator = argument_index;
649 }
650
651 return result;
652}
653
654
655// Add an object to the supplied environment and object materialization list.
656//
657// Notes:
658//
659// We are building three lists here:
660//
661// 1. In the result->object_mapping_ list (added to by the
662// LEnvironment::Add*Object methods), we store the lengths (number
663// of fields) of the captured objects in depth-first traversal order, or
664// in case of duplicated objects, we store the index to the duplicate object
665// (with a tag to differentiate between captured and duplicated objects).
666//
667// 2. The object fields are stored in the result->values_ list
668// (added to by the LEnvironment.AddValue method) sequentially as lists
669// of fields with holes for nested objects (the holes will be expanded
670// later by LCodegen::AddToTranslation according to the
671// LEnvironment.object_mapping_ list).
672//
673// 3. The auxiliary objects_to_materialize array stores the hydrogen values
674// in the same order as result->object_mapping_ list. This is used
675// to detect duplicate values and calculate the corresponding object index.
676void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
677 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
678 int object_index = objects_to_materialize->length();
679 // Store the hydrogen value into the de-duplication array
680 objects_to_materialize->Add(value, zone());
681 // Find out whether we are storing a duplicated value
682 int previously_materialized_object = -1;
683 for (int prev = 0; prev < object_index; ++prev) {
684 if (objects_to_materialize->at(prev) == value) {
685 previously_materialized_object = prev;
686 break;
687 }
688 }
689 // Store the captured object length (or duplicated object index)
690 // into the environment. For duplicated objects, we stop here.
691 int length = value->OperandCount();
692 bool is_arguments = value->IsArgumentsObject();
693 if (previously_materialized_object >= 0) {
694 result->AddDuplicateObject(previously_materialized_object);
695 return;
696 } else {
697 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
698 }
699 // Store the captured object's fields into the environment
700 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
701 LOperand* op;
702 HValue* arg_value = value->OperandAt(i);
703 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
704 // Insert a hole for nested objects
705 op = LEnvironment::materialization_marker();
706 } else {
707 DCHECK(!arg_value->IsPushArguments());
708 // For ordinary values, tell the register allocator we need the value
709 // to be alive here
710 op = UseAny(arg_value);
711 }
712 result->AddValue(op,
713 arg_value->representation(),
714 arg_value->CheckFlag(HInstruction::kUint32));
715 }
716 // Recursively store all the nested captured objects into the environment
717 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
718 HValue* arg_value = value->OperandAt(i);
719 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
720 AddObjectToMaterialize(arg_value, objects_to_materialize, result);
721 }
722 }
723}
724
725
726LPhase::~LPhase() {
727 if (ShouldProduceTraceOutput()) {
728 isolate()->GetHTracer()->TraceLithium(name(), chunk_);
729 }
730}
731
732
733} // namespace internal
734} // namespace v8