blob: 677639095a522f64a9729a6100ad5f8472df79d6 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/lithium.h"
6
7#include "src/ast/scopes.h"
8
9#if V8_TARGET_ARCH_IA32
10#include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
11#include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
12#elif V8_TARGET_ARCH_X64
13#include "src/crankshaft/x64/lithium-x64.h" // NOLINT
14#include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
15#elif V8_TARGET_ARCH_ARM
16#include "src/crankshaft/arm/lithium-arm.h" // NOLINT
17#include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
18#elif V8_TARGET_ARCH_PPC
19#include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
20#include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
21#elif V8_TARGET_ARCH_MIPS
22#include "src/crankshaft/mips/lithium-mips.h" // NOLINT
23#include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
24#elif V8_TARGET_ARCH_ARM64
25#include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
26#include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
27#elif V8_TARGET_ARCH_MIPS64
28#include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
29#include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
30#elif V8_TARGET_ARCH_X87
31#include "src/crankshaft/x87/lithium-x87.h" // NOLINT
32#include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
33#else
34#error "Unknown architecture."
35#endif
36
37namespace v8 {
38namespace internal {
39
40
41void LOperand::PrintTo(StringStream* stream) {
42 LUnallocated* unalloc = NULL;
43 switch (kind()) {
44 case INVALID:
45 stream->Add("(0)");
46 break;
47 case UNALLOCATED:
48 unalloc = LUnallocated::cast(this);
49 stream->Add("v%d", unalloc->virtual_register());
50 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
51 stream->Add("(=%dS)", unalloc->fixed_slot_index());
52 break;
53 }
54 switch (unalloc->extended_policy()) {
55 case LUnallocated::NONE:
56 break;
57 case LUnallocated::FIXED_REGISTER: {
58 int reg_index = unalloc->fixed_register_index();
59 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
60 stream->Add("(=invalid_reg#%d)", reg_index);
61 } else {
62 const char* register_name =
63 Register::from_code(reg_index).ToString();
64 stream->Add("(=%s)", register_name);
65 }
66 break;
67 }
68 case LUnallocated::FIXED_DOUBLE_REGISTER: {
69 int reg_index = unalloc->fixed_register_index();
70 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
71 stream->Add("(=invalid_double_reg#%d)", reg_index);
72 } else {
73 const char* double_register_name =
74 DoubleRegister::from_code(reg_index).ToString();
75 stream->Add("(=%s)", double_register_name);
76 }
77 break;
78 }
79 case LUnallocated::MUST_HAVE_REGISTER:
80 stream->Add("(R)");
81 break;
82 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
83 stream->Add("(D)");
84 break;
85 case LUnallocated::WRITABLE_REGISTER:
86 stream->Add("(WR)");
87 break;
88 case LUnallocated::SAME_AS_FIRST_INPUT:
89 stream->Add("(1)");
90 break;
91 case LUnallocated::ANY:
92 stream->Add("(-)");
93 break;
94 }
95 break;
96 case CONSTANT_OPERAND:
97 stream->Add("[constant:%d]", index());
98 break;
99 case STACK_SLOT:
100 stream->Add("[stack:%d]", index());
101 break;
102 case DOUBLE_STACK_SLOT:
103 stream->Add("[double_stack:%d]", index());
104 break;
105 case REGISTER: {
106 int reg_index = index();
107 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
108 stream->Add("(=invalid_reg#%d|R)", reg_index);
109 } else {
110 stream->Add("[%s|R]", Register::from_code(reg_index).ToString());
111 }
112 break;
113 }
114 case DOUBLE_REGISTER: {
115 int reg_index = index();
116 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
117 stream->Add("(=invalid_double_reg#%d|R)", reg_index);
118 } else {
119 stream->Add("[%s|R]", DoubleRegister::from_code(reg_index).ToString());
120 }
121 break;
122 }
123 }
124}
125
126
127template<LOperand::Kind kOperandKind, int kNumCachedOperands>
128LSubKindOperand<kOperandKind, kNumCachedOperands>*
129LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
130
131
132template<LOperand::Kind kOperandKind, int kNumCachedOperands>
133void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
134 if (cache) return;
135 cache = new LSubKindOperand[kNumCachedOperands];
136 for (int i = 0; i < kNumCachedOperands; i++) {
137 cache[i].ConvertTo(kOperandKind, i);
138 }
139}
140
141
142template<LOperand::Kind kOperandKind, int kNumCachedOperands>
143void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
144 delete[] cache;
145 cache = NULL;
146}
147
148
149void LOperand::SetUpCaches() {
150#define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
151 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
152#undef LITHIUM_OPERAND_SETUP
153}
154
155
156void LOperand::TearDownCaches() {
157#define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
158 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
159#undef LITHIUM_OPERAND_TEARDOWN
160}
161
162
163bool LParallelMove::IsRedundant() const {
164 for (int i = 0; i < move_operands_.length(); ++i) {
165 if (!move_operands_[i].IsRedundant()) return false;
166 }
167 return true;
168}
169
170
171void LParallelMove::PrintDataTo(StringStream* stream) const {
172 bool first = true;
173 for (int i = 0; i < move_operands_.length(); ++i) {
174 if (!move_operands_[i].IsEliminated()) {
175 LOperand* source = move_operands_[i].source();
176 LOperand* destination = move_operands_[i].destination();
177 if (!first) stream->Add(" ");
178 first = false;
179 if (source->Equals(destination)) {
180 destination->PrintTo(stream);
181 } else {
182 destination->PrintTo(stream);
183 stream->Add(" = ");
184 source->PrintTo(stream);
185 }
186 stream->Add(";");
187 }
188 }
189}
190
191
192void LEnvironment::PrintTo(StringStream* stream) {
193 stream->Add("[id=%d|", ast_id().ToInt());
194 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
195 stream->Add("deopt_id=%d|", deoptimization_index());
196 }
197 stream->Add("parameters=%d|", parameter_count());
198 stream->Add("arguments_stack_height=%d|", arguments_stack_height());
199 for (int i = 0; i < values_.length(); ++i) {
200 if (i != 0) stream->Add(";");
201 if (values_[i] == NULL) {
202 stream->Add("[hole]");
203 } else {
204 values_[i]->PrintTo(stream);
205 }
206 }
207 stream->Add("]");
208}
209
210
211void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
212 // Do not record arguments as pointers.
213 if (op->IsStackSlot() && op->index() < 0) return;
214 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
215 pointer_operands_.Add(op, zone);
216}
217
218
219void LPointerMap::RemovePointer(LOperand* op) {
220 // Do not record arguments as pointers.
221 if (op->IsStackSlot() && op->index() < 0) return;
222 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
223 for (int i = 0; i < pointer_operands_.length(); ++i) {
224 if (pointer_operands_[i]->Equals(op)) {
225 pointer_operands_.Remove(i);
226 --i;
227 }
228 }
229}
230
231
232void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
233 // Do not record arguments as pointers.
234 if (op->IsStackSlot() && op->index() < 0) return;
235 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
236 untagged_operands_.Add(op, zone);
237}
238
239
240void LPointerMap::PrintTo(StringStream* stream) {
241 stream->Add("{");
242 for (int i = 0; i < pointer_operands_.length(); ++i) {
243 if (i != 0) stream->Add(";");
244 pointer_operands_[i]->PrintTo(stream);
245 }
246 stream->Add("}");
247}
248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249LChunk::LChunk(CompilationInfo* info, HGraph* graph)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100250 : base_frame_slots_(StandardFrameConstants::kFixedFrameSize / kPointerSize),
251 current_frame_slots_(base_frame_slots_),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 info_(info),
253 graph_(graph),
254 instructions_(32, info->zone()),
255 pointer_maps_(8, info->zone()),
256 inlined_functions_(1, info->zone()),
257 deprecation_dependencies_(32, info->zone()),
258 stability_dependencies_(8, info->zone()) {}
259
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000260LLabel* LChunk::GetLabel(int block_id) const {
261 HBasicBlock* block = graph_->blocks()->at(block_id);
262 int first_instruction = block->first_instruction_index();
263 return LLabel::cast(instructions_[first_instruction]);
264}
265
266
267int LChunk::LookupDestination(int block_id) const {
268 LLabel* cur = GetLabel(block_id);
269 while (cur->replacement() != NULL) {
270 cur = cur->replacement();
271 }
272 return cur->block_id();
273}
274
275Label* LChunk::GetAssemblyLabel(int block_id) const {
276 LLabel* label = GetLabel(block_id);
277 DCHECK(!label->HasReplacement());
278 return label->label();
279}
280
281
282void LChunk::MarkEmptyBlocks() {
283 LPhase phase("L_Mark empty blocks", this);
284 for (int i = 0; i < graph()->blocks()->length(); ++i) {
285 HBasicBlock* block = graph()->blocks()->at(i);
286 int first = block->first_instruction_index();
287 int last = block->last_instruction_index();
288 LInstruction* first_instr = instructions()->at(first);
289 LInstruction* last_instr = instructions()->at(last);
290
291 LLabel* label = LLabel::cast(first_instr);
292 if (last_instr->IsGoto()) {
293 LGoto* goto_instr = LGoto::cast(last_instr);
294 if (label->IsRedundant() &&
295 !label->is_loop_header()) {
296 bool can_eliminate = true;
297 for (int i = first + 1; i < last && can_eliminate; ++i) {
298 LInstruction* cur = instructions()->at(i);
299 if (cur->IsGap()) {
300 LGap* gap = LGap::cast(cur);
301 if (!gap->IsRedundant()) {
302 can_eliminate = false;
303 }
304 } else {
305 can_eliminate = false;
306 }
307 }
308 if (can_eliminate) {
309 label->set_replacement(GetLabel(goto_instr->block_id()));
310 }
311 }
312 }
313 }
314}
315
316
317void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
318 LInstructionGap* gap = new (zone()) LInstructionGap(block);
319 gap->set_hydrogen_value(instr->hydrogen_value());
320 int index = -1;
321 if (instr->IsControl()) {
322 instructions_.Add(gap, zone());
323 index = instructions_.length();
324 instructions_.Add(instr, zone());
325 } else {
326 index = instructions_.length();
327 instructions_.Add(instr, zone());
328 instructions_.Add(gap, zone());
329 }
330 if (instr->HasPointerMap()) {
331 pointer_maps_.Add(instr->pointer_map(), zone());
332 instr->pointer_map()->set_lithium_position(index);
333 }
334}
335
336
337LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
338 return LConstantOperand::Create(constant->id(), zone());
339}
340
341
342int LChunk::GetParameterStackSlot(int index) const {
343 // The receiver is at index 0, the first parameter at index 1, so we
344 // shift all parameter indexes down by the number of parameters, and
345 // make sure they end up negative so they are distinguishable from
346 // spill slots.
347 int result = index - info()->num_parameters() - 1;
348
349 DCHECK(result < 0);
350 return result;
351}
352
353
354// A parameter relative to ebp in the arguments stub.
355int LChunk::ParameterAt(int index) {
356 DCHECK(-1 <= index); // -1 is the receiver.
357 return (1 + info()->scope()->num_parameters() - index) *
358 kPointerSize;
359}
360
361
362LGap* LChunk::GetGapAt(int index) const {
363 return LGap::cast(instructions_[index]);
364}
365
366
367bool LChunk::IsGapAt(int index) const {
368 return instructions_[index]->IsGap();
369}
370
371
372int LChunk::NearestGapPos(int index) const {
373 while (!IsGapAt(index)) index--;
374 return index;
375}
376
377
378void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
379 GetGapAt(index)->GetOrCreateParallelMove(
380 LGap::START, zone())->AddMove(from, to, zone());
381}
382
383
384HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
385 return HConstant::cast(graph_->LookupValue(operand->index()));
386}
387
388
389Representation LChunk::LookupLiteralRepresentation(
390 LConstantOperand* operand) const {
391 return graph_->LookupValue(operand->index())->representation();
392}
393
394
395void LChunk::CommitDependencies(Handle<Code> code) const {
396 if (!code->is_optimized_code()) return;
397 HandleScope scope(isolate());
398
399 for (Handle<Map> map : deprecation_dependencies_) {
400 DCHECK(!map->is_deprecated());
401 DCHECK(map->CanBeDeprecated());
402 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
403 }
404
405 for (Handle<Map> map : stability_dependencies_) {
406 DCHECK(map->is_stable());
407 DCHECK(map->CanTransition());
408 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
409 }
410
411 info_->dependencies()->Commit(code);
412}
413
414
415LChunk* LChunk::NewChunk(HGraph* graph) {
416 DisallowHandleAllocation no_handles;
417 DisallowHeapAllocation no_gc;
418 graph->DisallowAddingNewValues();
419 int values = graph->GetMaximumValueID();
420 CompilationInfo* info = graph->info();
421 if (values > LUnallocated::kMaxVirtualRegisters) {
422 info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
423 return NULL;
424 }
425 LAllocator allocator(values, graph);
426 LChunkBuilder builder(info, graph, &allocator);
427 LChunk* chunk = builder.Build();
428 if (chunk == NULL) return NULL;
429
430 if (!allocator.Allocate(chunk)) {
431 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
432 return NULL;
433 }
434
435 chunk->set_allocated_double_registers(
436 allocator.assigned_double_registers());
437
438 return chunk;
439}
440
441
442Handle<Code> LChunk::Codegen() {
443 MacroAssembler assembler(info()->isolate(), NULL, 0,
444 CodeObjectRequired::kYes);
445 LOG_CODE_EVENT(info()->isolate(),
446 CodeStartLinePosInfoRecordEvent(
447 assembler.positions_recorder()));
448 // Code serializer only takes unoptimized code.
449 DCHECK(!info()->will_serialize());
450 LCodeGen generator(this, &assembler, info());
451
452 MarkEmptyBlocks();
453
454 if (generator.GenerateCode()) {
455 generator.CheckEnvironmentUsage();
456 CodeGenerator::MakeCodePrologue(info(), "optimized");
457 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&assembler, info());
458 generator.FinishCode(code);
459 CommitDependencies(code);
460 code->set_is_crankshafted(true);
461 void* jit_handler_data =
462 assembler.positions_recorder()->DetachJITHandlerData();
463 LOG_CODE_EVENT(info()->isolate(),
464 CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
465
466 CodeGenerator::PrintCode(code, info());
467 DCHECK(!(info()->isolate()->serializer_enabled() &&
468 info()->GetMustNotHaveEagerFrame() &&
469 generator.NeedsEagerFrame()));
470 return code;
471 }
472 assembler.AbortedCodeGeneration();
473 return Handle<Code>::null();
474}
475
476
477void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
478 allocated_double_registers_ = allocated_registers;
479 BitVector* doubles = allocated_double_registers();
480 BitVector::Iterator iterator(doubles);
481 while (!iterator.Done()) {
482 if (info()->saves_caller_doubles()) {
483 if (kDoubleSize == kPointerSize * 2) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100484 current_frame_slots_ += 2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000485 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100486 current_frame_slots_++;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487 }
488 }
489 iterator.Advance();
490 }
491}
492
493
494void LChunkBuilderBase::Abort(BailoutReason reason) {
495 info()->AbortOptimization(reason);
496 status_ = ABORTED;
497}
498
499
500void LChunkBuilderBase::Retry(BailoutReason reason) {
501 info()->RetryOptimization(reason);
502 status_ = ABORTED;
503}
504
505
506LEnvironment* LChunkBuilderBase::CreateEnvironment(
507 HEnvironment* hydrogen_env, int* argument_index_accumulator,
508 ZoneList<HValue*>* objects_to_materialize) {
509 if (hydrogen_env == NULL) return NULL;
510
511 LEnvironment* outer =
512 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
513 objects_to_materialize);
514 BailoutId ast_id = hydrogen_env->ast_id();
515 DCHECK(!ast_id.IsNone() ||
516 hydrogen_env->frame_type() != JS_FUNCTION);
517
518 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
519 ? 0
520 : hydrogen_env->specials_count();
521
522 int value_count = hydrogen_env->length() - omitted_count;
523 LEnvironment* result =
524 new(zone()) LEnvironment(hydrogen_env->closure(),
525 hydrogen_env->frame_type(),
526 ast_id,
527 hydrogen_env->parameter_count(),
528 argument_count_,
529 value_count,
530 outer,
531 hydrogen_env->entry(),
532 zone());
533 int argument_index = *argument_index_accumulator;
534
535 // Store the environment description into the environment
536 // (with holes for nested objects)
537 for (int i = 0; i < hydrogen_env->length(); ++i) {
538 if (hydrogen_env->is_special_index(i) &&
539 hydrogen_env->frame_type() != JS_FUNCTION) {
540 continue;
541 }
542 LOperand* op;
543 HValue* value = hydrogen_env->values()->at(i);
544 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
545 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
546 op = LEnvironment::materialization_marker();
547 } else {
548 op = UseAny(value);
549 }
550 result->AddValue(op,
551 value->representation(),
552 value->CheckFlag(HInstruction::kUint32));
553 }
554
555 // Recursively store the nested objects into the environment
556 for (int i = 0; i < hydrogen_env->length(); ++i) {
557 if (hydrogen_env->is_special_index(i)) continue;
558
559 HValue* value = hydrogen_env->values()->at(i);
560 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
561 AddObjectToMaterialize(value, objects_to_materialize, result);
562 }
563 }
564
565 if (hydrogen_env->frame_type() == JS_FUNCTION) {
566 *argument_index_accumulator = argument_index;
567 }
568
569 return result;
570}
571
572
573// Add an object to the supplied environment and object materialization list.
574//
575// Notes:
576//
577// We are building three lists here:
578//
579// 1. In the result->object_mapping_ list (added to by the
580// LEnvironment::Add*Object methods), we store the lengths (number
581// of fields) of the captured objects in depth-first traversal order, or
582// in case of duplicated objects, we store the index to the duplicate object
583// (with a tag to differentiate between captured and duplicated objects).
584//
585// 2. The object fields are stored in the result->values_ list
586// (added to by the LEnvironment.AddValue method) sequentially as lists
587// of fields with holes for nested objects (the holes will be expanded
588// later by LCodegen::AddToTranslation according to the
589// LEnvironment.object_mapping_ list).
590//
591// 3. The auxiliary objects_to_materialize array stores the hydrogen values
592// in the same order as result->object_mapping_ list. This is used
593// to detect duplicate values and calculate the corresponding object index.
594void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
595 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
596 int object_index = objects_to_materialize->length();
597 // Store the hydrogen value into the de-duplication array
598 objects_to_materialize->Add(value, zone());
599 // Find out whether we are storing a duplicated value
600 int previously_materialized_object = -1;
601 for (int prev = 0; prev < object_index; ++prev) {
602 if (objects_to_materialize->at(prev) == value) {
603 previously_materialized_object = prev;
604 break;
605 }
606 }
607 // Store the captured object length (or duplicated object index)
608 // into the environment. For duplicated objects, we stop here.
609 int length = value->OperandCount();
610 bool is_arguments = value->IsArgumentsObject();
611 if (previously_materialized_object >= 0) {
612 result->AddDuplicateObject(previously_materialized_object);
613 return;
614 } else {
615 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
616 }
617 // Store the captured object's fields into the environment
618 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
619 LOperand* op;
620 HValue* arg_value = value->OperandAt(i);
621 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
622 // Insert a hole for nested objects
623 op = LEnvironment::materialization_marker();
624 } else {
625 DCHECK(!arg_value->IsPushArguments());
626 // For ordinary values, tell the register allocator we need the value
627 // to be alive here
628 op = UseAny(arg_value);
629 }
630 result->AddValue(op,
631 arg_value->representation(),
632 arg_value->CheckFlag(HInstruction::kUint32));
633 }
634 // Recursively store all the nested captured objects into the environment
635 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
636 HValue* arg_value = value->OperandAt(i);
637 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
638 AddObjectToMaterialize(arg_value, objects_to_materialize, result);
639 }
640 }
641}
642
643
644LPhase::~LPhase() {
645 if (ShouldProduceTraceOutput()) {
646 isolate()->GetHTracer()->TraceLithium(name(), chunk_);
647 }
648}
649
650
651} // namespace internal
652} // namespace v8