blob: d34b04f5da022ccaa582777c14db0fbb18f051cd [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/lithium.h"
6
7#include "src/ast/scopes.h"
8
9#if V8_TARGET_ARCH_IA32
10#include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
11#include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
12#elif V8_TARGET_ARCH_X64
13#include "src/crankshaft/x64/lithium-x64.h" // NOLINT
14#include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
15#elif V8_TARGET_ARCH_ARM
16#include "src/crankshaft/arm/lithium-arm.h" // NOLINT
17#include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
18#elif V8_TARGET_ARCH_PPC
19#include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
20#include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
21#elif V8_TARGET_ARCH_MIPS
22#include "src/crankshaft/mips/lithium-mips.h" // NOLINT
23#include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
24#elif V8_TARGET_ARCH_ARM64
25#include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
26#include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
27#elif V8_TARGET_ARCH_MIPS64
28#include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
29#include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
30#elif V8_TARGET_ARCH_X87
31#include "src/crankshaft/x87/lithium-x87.h" // NOLINT
32#include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +010033#elif V8_TARGET_ARCH_S390
34#include "src/crankshaft/s390/lithium-s390.h" // NOLINT
35#include "src/crankshaft/s390/lithium-codegen-s390.h" // NOLINT
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036#else
37#error "Unknown architecture."
38#endif
39
40namespace v8 {
41namespace internal {
42
43
44void LOperand::PrintTo(StringStream* stream) {
45 LUnallocated* unalloc = NULL;
46 switch (kind()) {
47 case INVALID:
48 stream->Add("(0)");
49 break;
50 case UNALLOCATED:
51 unalloc = LUnallocated::cast(this);
52 stream->Add("v%d", unalloc->virtual_register());
53 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
54 stream->Add("(=%dS)", unalloc->fixed_slot_index());
55 break;
56 }
57 switch (unalloc->extended_policy()) {
58 case LUnallocated::NONE:
59 break;
60 case LUnallocated::FIXED_REGISTER: {
61 int reg_index = unalloc->fixed_register_index();
62 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
63 stream->Add("(=invalid_reg#%d)", reg_index);
64 } else {
65 const char* register_name =
66 Register::from_code(reg_index).ToString();
67 stream->Add("(=%s)", register_name);
68 }
69 break;
70 }
71 case LUnallocated::FIXED_DOUBLE_REGISTER: {
72 int reg_index = unalloc->fixed_register_index();
73 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
74 stream->Add("(=invalid_double_reg#%d)", reg_index);
75 } else {
76 const char* double_register_name =
77 DoubleRegister::from_code(reg_index).ToString();
78 stream->Add("(=%s)", double_register_name);
79 }
80 break;
81 }
82 case LUnallocated::MUST_HAVE_REGISTER:
83 stream->Add("(R)");
84 break;
85 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
86 stream->Add("(D)");
87 break;
88 case LUnallocated::WRITABLE_REGISTER:
89 stream->Add("(WR)");
90 break;
91 case LUnallocated::SAME_AS_FIRST_INPUT:
92 stream->Add("(1)");
93 break;
94 case LUnallocated::ANY:
95 stream->Add("(-)");
96 break;
97 }
98 break;
99 case CONSTANT_OPERAND:
100 stream->Add("[constant:%d]", index());
101 break;
102 case STACK_SLOT:
103 stream->Add("[stack:%d]", index());
104 break;
105 case DOUBLE_STACK_SLOT:
106 stream->Add("[double_stack:%d]", index());
107 break;
108 case REGISTER: {
109 int reg_index = index();
110 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
111 stream->Add("(=invalid_reg#%d|R)", reg_index);
112 } else {
113 stream->Add("[%s|R]", Register::from_code(reg_index).ToString());
114 }
115 break;
116 }
117 case DOUBLE_REGISTER: {
118 int reg_index = index();
119 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
120 stream->Add("(=invalid_double_reg#%d|R)", reg_index);
121 } else {
122 stream->Add("[%s|R]", DoubleRegister::from_code(reg_index).ToString());
123 }
124 break;
125 }
126 }
127}
128
129
130template<LOperand::Kind kOperandKind, int kNumCachedOperands>
131LSubKindOperand<kOperandKind, kNumCachedOperands>*
132LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
133
134
135template<LOperand::Kind kOperandKind, int kNumCachedOperands>
136void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
137 if (cache) return;
138 cache = new LSubKindOperand[kNumCachedOperands];
139 for (int i = 0; i < kNumCachedOperands; i++) {
140 cache[i].ConvertTo(kOperandKind, i);
141 }
142}
143
144
145template<LOperand::Kind kOperandKind, int kNumCachedOperands>
146void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
147 delete[] cache;
148 cache = NULL;
149}
150
151
152void LOperand::SetUpCaches() {
153#define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
154 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
155#undef LITHIUM_OPERAND_SETUP
156}
157
158
159void LOperand::TearDownCaches() {
160#define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
161 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
162#undef LITHIUM_OPERAND_TEARDOWN
163}
164
165
166bool LParallelMove::IsRedundant() const {
167 for (int i = 0; i < move_operands_.length(); ++i) {
168 if (!move_operands_[i].IsRedundant()) return false;
169 }
170 return true;
171}
172
173
174void LParallelMove::PrintDataTo(StringStream* stream) const {
175 bool first = true;
176 for (int i = 0; i < move_operands_.length(); ++i) {
177 if (!move_operands_[i].IsEliminated()) {
178 LOperand* source = move_operands_[i].source();
179 LOperand* destination = move_operands_[i].destination();
180 if (!first) stream->Add(" ");
181 first = false;
182 if (source->Equals(destination)) {
183 destination->PrintTo(stream);
184 } else {
185 destination->PrintTo(stream);
186 stream->Add(" = ");
187 source->PrintTo(stream);
188 }
189 stream->Add(";");
190 }
191 }
192}
193
194
195void LEnvironment::PrintTo(StringStream* stream) {
196 stream->Add("[id=%d|", ast_id().ToInt());
197 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
198 stream->Add("deopt_id=%d|", deoptimization_index());
199 }
200 stream->Add("parameters=%d|", parameter_count());
201 stream->Add("arguments_stack_height=%d|", arguments_stack_height());
202 for (int i = 0; i < values_.length(); ++i) {
203 if (i != 0) stream->Add(";");
204 if (values_[i] == NULL) {
205 stream->Add("[hole]");
206 } else {
207 values_[i]->PrintTo(stream);
208 }
209 }
210 stream->Add("]");
211}
212
213
214void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
215 // Do not record arguments as pointers.
216 if (op->IsStackSlot() && op->index() < 0) return;
217 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
218 pointer_operands_.Add(op, zone);
219}
220
221
222void LPointerMap::RemovePointer(LOperand* op) {
223 // Do not record arguments as pointers.
224 if (op->IsStackSlot() && op->index() < 0) return;
225 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
226 for (int i = 0; i < pointer_operands_.length(); ++i) {
227 if (pointer_operands_[i]->Equals(op)) {
228 pointer_operands_.Remove(i);
229 --i;
230 }
231 }
232}
233
234
235void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
236 // Do not record arguments as pointers.
237 if (op->IsStackSlot() && op->index() < 0) return;
238 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
239 untagged_operands_.Add(op, zone);
240}
241
242
243void LPointerMap::PrintTo(StringStream* stream) {
244 stream->Add("{");
245 for (int i = 0; i < pointer_operands_.length(); ++i) {
246 if (i != 0) stream->Add(";");
247 pointer_operands_[i]->PrintTo(stream);
248 }
249 stream->Add("}");
250}
251
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252LChunk::LChunk(CompilationInfo* info, HGraph* graph)
Ben Murdochda12d292016-06-02 14:46:10 +0100253 : base_frame_slots_(info->IsStub()
254 ? TypedFrameConstants::kFixedSlotCount
255 : StandardFrameConstants::kFixedSlotCount),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100256 current_frame_slots_(base_frame_slots_),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 info_(info),
258 graph_(graph),
259 instructions_(32, info->zone()),
260 pointer_maps_(8, info->zone()),
261 inlined_functions_(1, info->zone()),
262 deprecation_dependencies_(32, info->zone()),
263 stability_dependencies_(8, info->zone()) {}
264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265LLabel* LChunk::GetLabel(int block_id) const {
266 HBasicBlock* block = graph_->blocks()->at(block_id);
267 int first_instruction = block->first_instruction_index();
268 return LLabel::cast(instructions_[first_instruction]);
269}
270
271
272int LChunk::LookupDestination(int block_id) const {
273 LLabel* cur = GetLabel(block_id);
274 while (cur->replacement() != NULL) {
275 cur = cur->replacement();
276 }
277 return cur->block_id();
278}
279
280Label* LChunk::GetAssemblyLabel(int block_id) const {
281 LLabel* label = GetLabel(block_id);
282 DCHECK(!label->HasReplacement());
283 return label->label();
284}
285
286
287void LChunk::MarkEmptyBlocks() {
288 LPhase phase("L_Mark empty blocks", this);
289 for (int i = 0; i < graph()->blocks()->length(); ++i) {
290 HBasicBlock* block = graph()->blocks()->at(i);
291 int first = block->first_instruction_index();
292 int last = block->last_instruction_index();
293 LInstruction* first_instr = instructions()->at(first);
294 LInstruction* last_instr = instructions()->at(last);
295
296 LLabel* label = LLabel::cast(first_instr);
297 if (last_instr->IsGoto()) {
298 LGoto* goto_instr = LGoto::cast(last_instr);
299 if (label->IsRedundant() &&
300 !label->is_loop_header()) {
301 bool can_eliminate = true;
302 for (int i = first + 1; i < last && can_eliminate; ++i) {
303 LInstruction* cur = instructions()->at(i);
304 if (cur->IsGap()) {
305 LGap* gap = LGap::cast(cur);
306 if (!gap->IsRedundant()) {
307 can_eliminate = false;
308 }
309 } else {
310 can_eliminate = false;
311 }
312 }
313 if (can_eliminate) {
314 label->set_replacement(GetLabel(goto_instr->block_id()));
315 }
316 }
317 }
318 }
319}
320
321
322void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
323 LInstructionGap* gap = new (zone()) LInstructionGap(block);
324 gap->set_hydrogen_value(instr->hydrogen_value());
325 int index = -1;
326 if (instr->IsControl()) {
327 instructions_.Add(gap, zone());
328 index = instructions_.length();
329 instructions_.Add(instr, zone());
330 } else {
331 index = instructions_.length();
332 instructions_.Add(instr, zone());
333 instructions_.Add(gap, zone());
334 }
335 if (instr->HasPointerMap()) {
336 pointer_maps_.Add(instr->pointer_map(), zone());
337 instr->pointer_map()->set_lithium_position(index);
338 }
339}
340
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000341LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
342 return LConstantOperand::Create(constant->id(), zone());
343}
344
345
346int LChunk::GetParameterStackSlot(int index) const {
347 // The receiver is at index 0, the first parameter at index 1, so we
348 // shift all parameter indexes down by the number of parameters, and
349 // make sure they end up negative so they are distinguishable from
350 // spill slots.
351 int result = index - info()->num_parameters() - 1;
352
353 DCHECK(result < 0);
354 return result;
355}
356
357
358// A parameter relative to ebp in the arguments stub.
359int LChunk::ParameterAt(int index) {
360 DCHECK(-1 <= index); // -1 is the receiver.
361 return (1 + info()->scope()->num_parameters() - index) *
362 kPointerSize;
363}
364
365
366LGap* LChunk::GetGapAt(int index) const {
367 return LGap::cast(instructions_[index]);
368}
369
370
371bool LChunk::IsGapAt(int index) const {
372 return instructions_[index]->IsGap();
373}
374
375
376int LChunk::NearestGapPos(int index) const {
377 while (!IsGapAt(index)) index--;
378 return index;
379}
380
381
382void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
383 GetGapAt(index)->GetOrCreateParallelMove(
384 LGap::START, zone())->AddMove(from, to, zone());
385}
386
387
388HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
389 return HConstant::cast(graph_->LookupValue(operand->index()));
390}
391
392
393Representation LChunk::LookupLiteralRepresentation(
394 LConstantOperand* operand) const {
395 return graph_->LookupValue(operand->index())->representation();
396}
397
398
399void LChunk::CommitDependencies(Handle<Code> code) const {
400 if (!code->is_optimized_code()) return;
401 HandleScope scope(isolate());
402
403 for (Handle<Map> map : deprecation_dependencies_) {
404 DCHECK(!map->is_deprecated());
405 DCHECK(map->CanBeDeprecated());
406 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
407 }
408
409 for (Handle<Map> map : stability_dependencies_) {
410 DCHECK(map->is_stable());
411 DCHECK(map->CanTransition());
412 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
413 }
414
415 info_->dependencies()->Commit(code);
416}
417
418
419LChunk* LChunk::NewChunk(HGraph* graph) {
420 DisallowHandleAllocation no_handles;
421 DisallowHeapAllocation no_gc;
422 graph->DisallowAddingNewValues();
423 int values = graph->GetMaximumValueID();
424 CompilationInfo* info = graph->info();
425 if (values > LUnallocated::kMaxVirtualRegisters) {
426 info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
427 return NULL;
428 }
429 LAllocator allocator(values, graph);
430 LChunkBuilder builder(info, graph, &allocator);
431 LChunk* chunk = builder.Build();
432 if (chunk == NULL) return NULL;
433
434 if (!allocator.Allocate(chunk)) {
435 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
436 return NULL;
437 }
438
439 chunk->set_allocated_double_registers(
440 allocator.assigned_double_registers());
441
442 return chunk;
443}
444
445
446Handle<Code> LChunk::Codegen() {
447 MacroAssembler assembler(info()->isolate(), NULL, 0,
448 CodeObjectRequired::kYes);
449 LOG_CODE_EVENT(info()->isolate(),
450 CodeStartLinePosInfoRecordEvent(
451 assembler.positions_recorder()));
452 // Code serializer only takes unoptimized code.
453 DCHECK(!info()->will_serialize());
454 LCodeGen generator(this, &assembler, info());
455
456 MarkEmptyBlocks();
457
458 if (generator.GenerateCode()) {
459 generator.CheckEnvironmentUsage();
460 CodeGenerator::MakeCodePrologue(info(), "optimized");
461 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&assembler, info());
462 generator.FinishCode(code);
463 CommitDependencies(code);
464 code->set_is_crankshafted(true);
465 void* jit_handler_data =
466 assembler.positions_recorder()->DetachJITHandlerData();
467 LOG_CODE_EVENT(info()->isolate(),
Ben Murdochda12d292016-06-02 14:46:10 +0100468 CodeEndLinePosInfoRecordEvent(AbstractCode::cast(*code),
469 jit_handler_data));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470
471 CodeGenerator::PrintCode(code, info());
472 DCHECK(!(info()->isolate()->serializer_enabled() &&
473 info()->GetMustNotHaveEagerFrame() &&
474 generator.NeedsEagerFrame()));
475 return code;
476 }
477 assembler.AbortedCodeGeneration();
478 return Handle<Code>::null();
479}
480
481
482void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
483 allocated_double_registers_ = allocated_registers;
484 BitVector* doubles = allocated_double_registers();
485 BitVector::Iterator iterator(doubles);
486 while (!iterator.Done()) {
487 if (info()->saves_caller_doubles()) {
488 if (kDoubleSize == kPointerSize * 2) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100489 current_frame_slots_ += 2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000490 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100491 current_frame_slots_++;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000492 }
493 }
494 iterator.Advance();
495 }
496}
497
498
499void LChunkBuilderBase::Abort(BailoutReason reason) {
500 info()->AbortOptimization(reason);
501 status_ = ABORTED;
502}
503
504
505void LChunkBuilderBase::Retry(BailoutReason reason) {
506 info()->RetryOptimization(reason);
507 status_ = ABORTED;
508}
509
Ben Murdochda12d292016-06-02 14:46:10 +0100510void LChunkBuilderBase::CreateLazyBailoutForCall(HBasicBlock* current_block,
511 LInstruction* instr,
512 HInstruction* hydrogen_val) {
513 if (!instr->IsCall()) return;
514
515 HEnvironment* hydrogen_env = current_block->last_environment();
516 HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
517 DCHECK_NOT_NULL(hydrogen_env);
518 if (instr->IsSyntacticTailCall()) {
519 // If it was a syntactic tail call we need to drop the current frame and
520 // all the frames on top of it that are either an arguments adaptor frame
521 // or a tail caller frame.
522 hydrogen_env = hydrogen_env->outer();
523 while (hydrogen_env != nullptr &&
524 (hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR ||
525 hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION)) {
526 hydrogen_env = hydrogen_env->outer();
527 }
528 if (hydrogen_env != nullptr) {
529 if (hydrogen_env->frame_type() == JS_FUNCTION) {
530 // In case an outer frame is a function frame we have to replay
531 // environment manually because
532 // 1) it does not contain a result of inlined function yet,
533 // 2) we can't find the proper simulate that corresponds to the point
534 // after inlined call to do a ReplayEnvironment() on.
535 // So we push return value on top of outer environment.
536 // As for JS_GETTER/JS_SETTER/JS_CONSTRUCT nothing has to be done here,
537 // the deoptimizer ensures that the result of the callee is correctly
538 // propagated to result register during deoptimization.
539 hydrogen_env = hydrogen_env->Copy();
540 hydrogen_env->Push(hydrogen_val);
541 }
542 } else {
543 // Although we don't need this lazy bailout for normal execution
544 // (because when we tail call from the outermost function we should pop
545 // its frame) we still need it when debugger is on.
546 hydrogen_env = current_block->last_environment();
547 }
548 } else {
549 if (hydrogen_val->HasObservableSideEffects()) {
550 HSimulate* sim = HSimulate::cast(hydrogen_val->next());
551 sim->ReplayEnvironment(hydrogen_env);
552 hydrogen_value_for_lazy_bailout = sim;
553 }
554 }
555 LInstruction* bailout = LChunkBuilderBase::AssignEnvironment(
556 new (zone()) LLazyBailout(), hydrogen_env);
557 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
558 chunk_->AddInstruction(bailout, current_block);
559}
560
561LInstruction* LChunkBuilderBase::AssignEnvironment(LInstruction* instr,
562 HEnvironment* hydrogen_env) {
563 int argument_index_accumulator = 0;
564 ZoneList<HValue*> objects_to_materialize(0, zone());
565 DCHECK_NE(TAIL_CALLER_FUNCTION, hydrogen_env->frame_type());
566 instr->set_environment(CreateEnvironment(
567 hydrogen_env, &argument_index_accumulator, &objects_to_materialize));
568 return instr;
569}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000570
571LEnvironment* LChunkBuilderBase::CreateEnvironment(
572 HEnvironment* hydrogen_env, int* argument_index_accumulator,
573 ZoneList<HValue*>* objects_to_materialize) {
574 if (hydrogen_env == NULL) return NULL;
575
Ben Murdochda12d292016-06-02 14:46:10 +0100576 BailoutId ast_id = hydrogen_env->ast_id();
577 DCHECK(!ast_id.IsNone() ||
578 (hydrogen_env->frame_type() != JS_FUNCTION &&
579 hydrogen_env->frame_type() != TAIL_CALLER_FUNCTION));
580
581 if (hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION) {
582 // Skip potential outer arguments adaptor frame.
583 HEnvironment* outer_hydrogen_env = hydrogen_env->outer();
584 if (outer_hydrogen_env != nullptr &&
585 outer_hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR) {
586 outer_hydrogen_env = outer_hydrogen_env->outer();
587 }
588 LEnvironment* outer = CreateEnvironment(
589 outer_hydrogen_env, argument_index_accumulator, objects_to_materialize);
590 return new (zone())
591 LEnvironment(hydrogen_env->closure(), hydrogen_env->frame_type(),
592 ast_id, 0, 0, 0, outer, hydrogen_env->entry(), zone());
593 }
594
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595 LEnvironment* outer =
596 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
597 objects_to_materialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000598
599 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
600 ? 0
601 : hydrogen_env->specials_count();
602
603 int value_count = hydrogen_env->length() - omitted_count;
604 LEnvironment* result =
605 new(zone()) LEnvironment(hydrogen_env->closure(),
606 hydrogen_env->frame_type(),
607 ast_id,
608 hydrogen_env->parameter_count(),
609 argument_count_,
610 value_count,
611 outer,
612 hydrogen_env->entry(),
613 zone());
614 int argument_index = *argument_index_accumulator;
615
616 // Store the environment description into the environment
617 // (with holes for nested objects)
618 for (int i = 0; i < hydrogen_env->length(); ++i) {
619 if (hydrogen_env->is_special_index(i) &&
620 hydrogen_env->frame_type() != JS_FUNCTION) {
621 continue;
622 }
623 LOperand* op;
624 HValue* value = hydrogen_env->values()->at(i);
625 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
626 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
627 op = LEnvironment::materialization_marker();
628 } else {
629 op = UseAny(value);
630 }
631 result->AddValue(op,
632 value->representation(),
633 value->CheckFlag(HInstruction::kUint32));
634 }
635
636 // Recursively store the nested objects into the environment
637 for (int i = 0; i < hydrogen_env->length(); ++i) {
638 if (hydrogen_env->is_special_index(i)) continue;
639
640 HValue* value = hydrogen_env->values()->at(i);
641 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
642 AddObjectToMaterialize(value, objects_to_materialize, result);
643 }
644 }
645
646 if (hydrogen_env->frame_type() == JS_FUNCTION) {
647 *argument_index_accumulator = argument_index;
648 }
649
650 return result;
651}
652
653
654// Add an object to the supplied environment and object materialization list.
655//
656// Notes:
657//
658// We are building three lists here:
659//
660// 1. In the result->object_mapping_ list (added to by the
661// LEnvironment::Add*Object methods), we store the lengths (number
662// of fields) of the captured objects in depth-first traversal order, or
663// in case of duplicated objects, we store the index to the duplicate object
664// (with a tag to differentiate between captured and duplicated objects).
665//
666// 2. The object fields are stored in the result->values_ list
667// (added to by the LEnvironment.AddValue method) sequentially as lists
668// of fields with holes for nested objects (the holes will be expanded
669// later by LCodegen::AddToTranslation according to the
670// LEnvironment.object_mapping_ list).
671//
672// 3. The auxiliary objects_to_materialize array stores the hydrogen values
673// in the same order as result->object_mapping_ list. This is used
674// to detect duplicate values and calculate the corresponding object index.
675void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
676 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
677 int object_index = objects_to_materialize->length();
678 // Store the hydrogen value into the de-duplication array
679 objects_to_materialize->Add(value, zone());
680 // Find out whether we are storing a duplicated value
681 int previously_materialized_object = -1;
682 for (int prev = 0; prev < object_index; ++prev) {
683 if (objects_to_materialize->at(prev) == value) {
684 previously_materialized_object = prev;
685 break;
686 }
687 }
688 // Store the captured object length (or duplicated object index)
689 // into the environment. For duplicated objects, we stop here.
690 int length = value->OperandCount();
691 bool is_arguments = value->IsArgumentsObject();
692 if (previously_materialized_object >= 0) {
693 result->AddDuplicateObject(previously_materialized_object);
694 return;
695 } else {
696 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
697 }
698 // Store the captured object's fields into the environment
699 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
700 LOperand* op;
701 HValue* arg_value = value->OperandAt(i);
702 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
703 // Insert a hole for nested objects
704 op = LEnvironment::materialization_marker();
705 } else {
706 DCHECK(!arg_value->IsPushArguments());
707 // For ordinary values, tell the register allocator we need the value
708 // to be alive here
709 op = UseAny(arg_value);
710 }
711 result->AddValue(op,
712 arg_value->representation(),
713 arg_value->CheckFlag(HInstruction::kUint32));
714 }
715 // Recursively store all the nested captured objects into the environment
716 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
717 HValue* arg_value = value->OperandAt(i);
718 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
719 AddObjectToMaterialize(arg_value, objects_to_materialize, result);
720 }
721 }
722}
723
724
725LPhase::~LPhase() {
726 if (ShouldProduceTraceOutput()) {
727 isolate()->GetHTracer()->TraceLithium(name(), chunk_);
728 }
729}
730
731
732} // namespace internal
733} // namespace v8