blob: 800a09dd22bf3e446fb8c0203e6dab92674ccacd [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/bailout-reason.h"
8#include "src/code-stubs.h"
9#include "src/field-index.h"
10#include "src/hydrogen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/ic/ic.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/lithium.h"
13
14namespace v8 {
15namespace internal {
16
17
18static LChunk* OptimizeGraph(HGraph* graph) {
19 DisallowHeapAllocation no_allocation;
20 DisallowHandleAllocation no_handles;
21 DisallowHandleDereference no_deref;
22
23 DCHECK(graph != NULL);
24 BailoutReason bailout_reason = kNoReason;
25 if (!graph->Optimize(&bailout_reason)) {
26 FATAL(GetBailoutReason(bailout_reason));
27 }
28 LChunk* chunk = LChunk::NewChunk(graph);
29 if (chunk == NULL) {
30 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
31 }
32 return chunk;
33}
34
35
36class CodeStubGraphBuilderBase : public HGraphBuilder {
37 public:
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038 explicit CodeStubGraphBuilderBase(CompilationInfoWithZone* info)
39 : HGraphBuilder(info),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 arguments_length_(NULL),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040041 info_(info),
42 descriptor_(info->code_stub()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 context_(NULL) {
44 int parameter_count = descriptor_.GetEnvironmentParameterCount();
45 parameters_.Reset(new HParameter*[parameter_count]);
46 }
47 virtual bool BuildGraph();
48
49 protected:
50 virtual HValue* BuildCodeStub() = 0;
51 HParameter* GetParameter(int parameter) {
52 DCHECK(parameter < descriptor_.GetEnvironmentParameterCount());
53 return parameters_[parameter];
54 }
55 HValue* GetArgumentsLength() {
56 // This is initialized in BuildGraph()
57 DCHECK(arguments_length_ != NULL);
58 return arguments_length_;
59 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040060 CompilationInfo* info() { return info_; }
61 HydrogenCodeStub* stub() { return info_->code_stub(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000062 HContext* context() { return context_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040063 Isolate* isolate() { return info_->isolate(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000064
65 HLoadNamedField* BuildLoadNamedField(HValue* object,
66 FieldIndex index);
67 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040068 Representation representation,
69 bool transition_to_field);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070
71 enum ArgumentClass {
72 NONE,
73 SINGLE,
74 MULTIPLE
75 };
76
77 HValue* UnmappedCase(HValue* elements, HValue* key);
78
79 HValue* BuildArrayConstructor(ElementsKind kind,
80 AllocationSiteOverrideMode override_mode,
81 ArgumentClass argument_class);
82 HValue* BuildInternalArrayConstructor(ElementsKind kind,
83 ArgumentClass argument_class);
84
85 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
86 // function found in the optimized code map at map_index in js_function, if
87 // the function at map_index matches the given native_context. Builder is
88 // left in the "Then()" state after the install.
89 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
90 HValue* native_context,
91 IfBuilder* builder,
92 HValue* optimized_map,
93 HValue* map_index);
94 void BuildInstallCode(HValue* js_function, HValue* shared_info);
95
96 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
97 HValue* iterator,
98 int field_offset);
99 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
100 HValue* shared_info,
101 HValue* native_context);
102
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400103 // Tail calls handler found at array[map_index + 1].
104 void TailCallHandler(HValue* receiver, HValue* name, HValue* array,
105 HValue* map_index, HValue* slot, HValue* vector);
106
107 // Tail calls handler_code.
108 void TailCallHandler(HValue* receiver, HValue* name, HValue* slot,
109 HValue* vector, HValue* handler_code);
110
111 void TailCallMiss(HValue* receiver, HValue* name, HValue* slot,
112 HValue* vector, bool keyed_load);
113
114 // Handle MONOMORPHIC and POLYMORPHIC LoadIC and KeyedLoadIC cases.
115 void HandleArrayCases(HValue* array, HValue* receiver, HValue* name,
116 HValue* slot, HValue* vector, bool keyed_load);
117
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 private:
119 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
120 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
121 ElementsKind kind);
122
123 SmartArrayPointer<HParameter*> parameters_;
124 HValue* arguments_length_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400125 CompilationInfoWithZone* info_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 CodeStubDescriptor descriptor_;
127 HContext* context_;
128};
129
130
131bool CodeStubGraphBuilderBase::BuildGraph() {
132 // Update the static counter each time a new code stub is generated.
133 isolate()->counters()->code_stubs()->Increment();
134
135 if (FLAG_trace_hydrogen_stubs) {
136 const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
137 PrintF("-----------------------------------------------------------\n");
138 PrintF("Compiling stub %s using hydrogen\n", name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400139 isolate()->GetHTracer()->TraceCompilation(info());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 }
141
142 int param_count = descriptor_.GetEnvironmentParameterCount();
143 HEnvironment* start_environment = graph()->start_environment();
144 HBasicBlock* next_block = CreateBasicBlock(start_environment);
145 Goto(next_block);
146 next_block->SetJoinId(BailoutId::StubEntry());
147 set_current_block(next_block);
148
149 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
150 HInstruction* stack_parameter_count = NULL;
151 for (int i = 0; i < param_count; ++i) {
152 Representation r = descriptor_.GetEnvironmentParameterRepresentation(i);
153 HParameter* param = Add<HParameter>(i,
154 HParameter::REGISTER_PARAMETER, r);
155 start_environment->Bind(i, param);
156 parameters_[i] = param;
157 if (descriptor_.IsEnvironmentParameterCountRegister(i)) {
158 param->set_type(HType::Smi());
159 stack_parameter_count = param;
160 arguments_length_ = stack_parameter_count;
161 }
162 }
163
164 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
165 if (!runtime_stack_params) {
166 stack_parameter_count = graph()->GetConstantMinus1();
167 arguments_length_ = graph()->GetConstant0();
168 }
169
170 context_ = Add<HContext>();
171 start_environment->BindContext(context_);
172
173 Add<HSimulate>(BailoutId::StubEntry());
174
175 NoObservableSideEffectsScope no_effects(this);
176
177 HValue* return_value = BuildCodeStub();
178
179 // We might have extra expressions to pop from the stack in addition to the
180 // arguments above.
181 HInstruction* stack_pop_count = stack_parameter_count;
182 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
183 if (!stack_parameter_count->IsConstant() &&
184 descriptor_.hint_stack_parameter_count() < 0) {
185 HInstruction* constant_one = graph()->GetConstant1();
186 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
187 stack_pop_count->ClearFlag(HValue::kCanOverflow);
188 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
189 // smi.
190 } else {
191 int count = descriptor_.hint_stack_parameter_count();
192 stack_pop_count = Add<HConstant>(count);
193 }
194 }
195
196 if (current_block() != NULL) {
197 HReturn* hreturn_instruction = New<HReturn>(return_value,
198 stack_pop_count);
199 FinishCurrentBlock(hreturn_instruction);
200 }
201 return true;
202}
203
204
205template <class Stub>
206class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
207 public:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400208 explicit CodeStubGraphBuilder(CompilationInfoWithZone* info)
209 : CodeStubGraphBuilderBase(info) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000210
211 protected:
212 virtual HValue* BuildCodeStub() {
213 if (casted_stub()->IsUninitialized()) {
214 return BuildCodeUninitializedStub();
215 } else {
216 return BuildCodeInitializedStub();
217 }
218 }
219
220 virtual HValue* BuildCodeInitializedStub() {
221 UNIMPLEMENTED();
222 return NULL;
223 }
224
225 virtual HValue* BuildCodeUninitializedStub() {
226 // Force a deopt that falls back to the runtime.
227 HValue* undefined = graph()->GetConstantUndefined();
228 IfBuilder builder(this);
229 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
230 builder.Then();
231 builder.ElseDeopt("Forced deopt to runtime");
232 return undefined;
233 }
234
235 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
236};
237
238
239Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
240 ExternalReference miss) {
241 Factory* factory = isolate()->factory();
242
243 // Generate the new code.
244 MacroAssembler masm(isolate(), NULL, 256);
245
246 {
247 // Update the static counter each time a new code stub is generated.
248 isolate()->counters()->code_stubs()->Increment();
249
250 // Generate the code for the stub.
251 masm.set_generating_stub(true);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400252 // TODO(yangguo): remove this once we can serialize IC stubs.
253 masm.enable_serializer();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000254 NoCurrentFrameScope scope(&masm);
255 GenerateLightweightMiss(&masm, miss);
256 }
257
258 // Create the code object.
259 CodeDesc desc;
260 masm.GetCode(&desc);
261
262 // Copy the generated code into a heap object.
263 Code::Flags flags = Code::ComputeFlags(
264 GetCodeKind(),
265 GetICState(),
266 GetExtraICState(),
267 GetStubType());
268 Handle<Code> new_object = factory->NewCode(
269 desc, flags, masm.CodeObject(), NeedsImmovableCode());
270 return new_object;
271}
272
273
274template <class Stub>
275static Handle<Code> DoGenerateCode(Stub* stub) {
276 Isolate* isolate = stub->isolate();
277 CodeStubDescriptor descriptor(stub);
278
279 // If we are uninitialized we can use a light-weight stub to enter
280 // the runtime that is significantly faster than using the standard
281 // stub-failure deopt mechanism.
282 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
283 DCHECK(!descriptor.stack_parameter_count().is_valid());
284 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
285 }
286 base::ElapsedTimer timer;
287 if (FLAG_profile_hydrogen_code_stub_compilation) {
288 timer.Start();
289 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400290 CompilationInfoWithZone info(stub, isolate);
291 CodeStubGraphBuilder<Stub> builder(&info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000292 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293 Handle<Code> code = chunk->Codegen();
294 if (FLAG_profile_hydrogen_code_stub_compilation) {
295 OFStream os(stdout);
296 os << "[Lazy compilation of " << stub << " took "
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400297 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 }
299 return code;
300}
301
302
303template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
305 info()->MarkAsSavesCallerDoubles();
306 HValue* number = GetParameter(NumberToStringStub::kNumber);
307 return BuildNumberToString(number, Type::Number(zone()));
308}
309
310
311Handle<Code> NumberToStringStub::GenerateCode() {
312 return DoGenerateCode(this);
313}
314
315
316template <>
317HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
318 Factory* factory = isolate()->factory();
319 HValue* undefined = graph()->GetConstantUndefined();
320 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
321
322 // This stub is very performance sensitive, the generated code must be tuned
323 // so that it doesn't build and eager frame.
324 info()->MarkMustNotHaveEagerFrame();
325
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400326 HInstruction* allocation_site =
327 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 IfBuilder checker(this);
329 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
330 undefined);
331 checker.Then();
332
333 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
334 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400335 HInstruction* boilerplate =
336 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 HValue* elements = AddLoadElements(boilerplate);
338 HValue* capacity = AddLoadFixedArrayLength(elements);
339 IfBuilder zero_capacity(this);
340 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
341 Token::EQ);
342 zero_capacity.Then();
343 Push(BuildCloneShallowArrayEmpty(boilerplate,
344 allocation_site,
345 alloc_site_mode));
346 zero_capacity.Else();
347 IfBuilder if_fixed_cow(this);
348 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
349 if_fixed_cow.Then();
350 Push(BuildCloneShallowArrayCow(boilerplate,
351 allocation_site,
352 alloc_site_mode,
353 FAST_ELEMENTS));
354 if_fixed_cow.Else();
355 IfBuilder if_fixed(this);
356 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
357 if_fixed.Then();
358 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
359 allocation_site,
360 alloc_site_mode,
361 FAST_ELEMENTS));
362
363 if_fixed.Else();
364 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
365 allocation_site,
366 alloc_site_mode,
367 FAST_DOUBLE_ELEMENTS));
368 if_fixed.End();
369 if_fixed_cow.End();
370 zero_capacity.End();
371
372 checker.ElseDeopt("Uninitialized boilerplate literals");
373 checker.End();
374
375 return environment()->Pop();
376}
377
378
379Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
380 return DoGenerateCode(this);
381}
382
383
384template <>
385HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
386 HValue* undefined = graph()->GetConstantUndefined();
387
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400388 HInstruction* allocation_site =
389 Add<HLoadKeyed>(GetParameter(0), GetParameter(1), nullptr, FAST_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000390
391 IfBuilder checker(this);
392 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
393 undefined);
394 checker.And();
395
396 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
397 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400398 HInstruction* boilerplate =
399 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400401 int length = casted_stub()->length();
402 if (length == 0) {
403 // Empty objects have some slack added to them.
404 length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
405 }
406 int size = JSObject::kHeaderSize + length * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 int object_size = size;
408 if (FLAG_allocation_site_pretenuring) {
409 size += AllocationMemento::kSize;
410 }
411
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400412 HValue* boilerplate_map =
413 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000414 HValue* boilerplate_size = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400415 boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000416 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
417 checker.If<HCompareNumericAndBranch>(boilerplate_size,
418 size_in_words, Token::EQ);
419 checker.Then();
420
421 HValue* size_in_bytes = Add<HConstant>(size);
422
423 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
424 NOT_TENURED, JS_OBJECT_TYPE);
425
426 for (int i = 0; i < object_size; i += kPointerSize) {
427 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400428 Add<HStoreNamedField>(object, access,
429 Add<HLoadNamedField>(boilerplate, nullptr, access));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000430 }
431
432 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
433 if (FLAG_allocation_site_pretenuring) {
434 BuildCreateAllocationMemento(
435 object, Add<HConstant>(object_size), allocation_site);
436 }
437
438 environment()->Push(object);
439 checker.ElseDeopt("Uninitialized boilerplate in fast clone");
440 checker.End();
441
442 return environment()->Pop();
443}
444
445
446Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
447 return DoGenerateCode(this);
448}
449
450
451template <>
452HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
453 HValue* size = Add<HConstant>(AllocationSite::kSize);
454 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
455 JS_OBJECT_TYPE);
456
457 // Store the map
458 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
459 AddStoreMapConstant(object, allocation_site_map);
460
461 // Store the payload (smi elements kind)
462 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
463 Add<HStoreNamedField>(object,
464 HObjectAccess::ForAllocationSiteOffset(
465 AllocationSite::kTransitionInfoOffset),
466 initial_elements_kind);
467
468 // Unlike literals, constructed arrays don't have nested sites
469 Add<HStoreNamedField>(object,
470 HObjectAccess::ForAllocationSiteOffset(
471 AllocationSite::kNestedSiteOffset),
472 graph()->GetConstant0());
473
474 // Pretenuring calculation field.
475 Add<HStoreNamedField>(object,
476 HObjectAccess::ForAllocationSiteOffset(
477 AllocationSite::kPretenureDataOffset),
478 graph()->GetConstant0());
479
480 // Pretenuring memento creation count field.
481 Add<HStoreNamedField>(object,
482 HObjectAccess::ForAllocationSiteOffset(
483 AllocationSite::kPretenureCreateCountOffset),
484 graph()->GetConstant0());
485
486 // Store an empty fixed array for the code dependency.
487 HConstant* empty_fixed_array =
488 Add<HConstant>(isolate()->factory()->empty_fixed_array());
489 Add<HStoreNamedField>(
490 object,
491 HObjectAccess::ForAllocationSiteOffset(
492 AllocationSite::kDependentCodeOffset),
493 empty_fixed_array);
494
495 // Link the object to the allocation site list
496 HValue* site_list = Add<HConstant>(
497 ExternalReference::allocation_sites_list_address(isolate()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400498 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
499 HObjectAccess::ForAllocationSiteList());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
501 // mark as such in order to skip the write barrier, once we have a unified
502 // system for weakness. For now we decided to keep it like this because having
503 // an initial write barrier backed store makes this pointer strong until the
504 // next GC, and allocation sites are designed to survive several GCs anyway.
505 Add<HStoreNamedField>(
506 object,
507 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
508 site);
509 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
510 object);
511
512 HInstruction* feedback_vector = GetParameter(0);
513 HInstruction* slot = GetParameter(1);
514 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
515 INITIALIZING_STORE);
516 return feedback_vector;
517}
518
519
520Handle<Code> CreateAllocationSiteStub::GenerateCode() {
521 return DoGenerateCode(this);
522}
523
524
525template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400526HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
527 int context_index = casted_stub()->context_index();
528 int slot_index = casted_stub()->slot_index();
529
530 HValue* script_context = BuildGetScriptContext(context_index);
531 return Add<HLoadNamedField>(script_context, nullptr,
532 HObjectAccess::ForContextSlot(slot_index));
533}
534
535
536Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
537 return DoGenerateCode(this);
538}
539
540
541template <>
542HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
543 int context_index = casted_stub()->context_index();
544 int slot_index = casted_stub()->slot_index();
545
546 HValue* script_context = BuildGetScriptContext(context_index);
547 Add<HStoreNamedField>(script_context,
548 HObjectAccess::ForContextSlot(slot_index),
549 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
550 return GetParameter(2);
551}
552
553
554Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
555 return DoGenerateCode(this);
556}
557
558
559template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000560HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
561 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
562 GetParameter(LoadDescriptor::kReceiverIndex),
563 GetParameter(LoadDescriptor::kNameIndex), NULL,
564 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
565 NEVER_RETURN_HOLE, STANDARD_STORE);
566 return load;
567}
568
569
570Handle<Code> LoadFastElementStub::GenerateCode() {
571 return DoGenerateCode(this);
572}
573
574
575HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
576 HValue* object, FieldIndex index) {
577 Representation representation = index.is_double()
578 ? Representation::Double()
579 : Representation::Tagged();
580 int offset = index.offset();
581 HObjectAccess access = index.is_inobject()
582 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
583 : HObjectAccess::ForBackingStoreOffset(offset, representation);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400584 if (index.is_double() &&
585 (!FLAG_unbox_double_fields || !index.is_inobject())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586 // Load the heap number.
587 object = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400588 object, nullptr, access.WithRepresentation(Representation::Tagged()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 // Load the double value from it.
590 access = HObjectAccess::ForHeapNumberValue();
591 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400592 return Add<HLoadNamedField>(object, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000593}
594
595
596template<>
597HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
598 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
599}
600
601
602Handle<Code> LoadFieldStub::GenerateCode() {
603 return DoGenerateCode(this);
604}
605
606
607template <>
608HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
609 HValue* map = AddLoadMap(GetParameter(0), NULL);
610 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
611 Map::kDescriptorsOffset, Representation::Tagged());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400612 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000613 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
614 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400615 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000616}
617
618
619Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
620
621
622HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key) {
623 HValue* result;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400624 HInstruction* backing_store =
625 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
626 ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400628 HValue* backing_store_length = Add<HLoadNamedField>(
629 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000630 IfBuilder in_unmapped_range(this);
631 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
632 Token::LT);
633 in_unmapped_range.Then();
634 {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400635 result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
636 NEVER_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 }
638 in_unmapped_range.ElseDeopt("Outside of range");
639 in_unmapped_range.End();
640 return result;
641}
642
643
644template <>
645HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
646 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
647 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
648
649 // Mapped arguments are actual arguments. Unmapped arguments are values added
650 // to the arguments object after it was created for the call. Mapped arguments
651 // are stored in the context at indexes given by elements[key + 2]. Unmapped
652 // arguments are stored as regular indexed properties in the arguments array,
653 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
654 // look at argument object construction.
655 //
656 // The sloppy arguments elements array has a special format:
657 //
658 // 0: context
659 // 1: unmapped arguments array
660 // 2: mapped_index0,
661 // 3: mapped_index1,
662 // ...
663 //
664 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
665 // If key + 2 >= elements.length then attempt to look in the unmapped
666 // arguments array (given by elements[1]) and return the value at key, missing
667 // to the runtime if the unmapped arguments array is not a fixed array or if
668 // key >= unmapped_arguments_array.length.
669 //
670 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
671 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
672 // index into the context array given at elements[0]. Return the value at
673 // context[t].
674
675 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
676 IfBuilder positive_smi(this);
677 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
678 Token::LT);
679 positive_smi.ThenDeopt("key is negative");
680 positive_smi.End();
681
682 HValue* constant_two = Add<HConstant>(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400683 HValue* elements = AddLoadElements(receiver, nullptr);
684 HValue* elements_length = Add<HLoadNamedField>(
685 elements, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
687 IfBuilder in_range(this);
688 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
689 in_range.Then();
690 {
691 HValue* index = AddUncasted<HAdd>(key, constant_two);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400692 HInstruction* mapped_index = Add<HLoadKeyed>(
693 elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000694
695 IfBuilder is_valid(this);
696 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
697 graph()->GetConstantHole());
698 is_valid.Then();
699 {
700 // TODO(mvstanton): I'd like to assert from this point, that if the
701 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
702 // smi check is being emitted.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400703 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
704 nullptr, FAST_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705 DCHECK(Context::kHeaderSize == FixedArray::kHeaderSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400706 HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
707 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708 environment()->Push(result);
709 }
710 is_valid.Else();
711 {
712 HValue* result = UnmappedCase(elements, key);
713 environment()->Push(result);
714 }
715 is_valid.End();
716 }
717 in_range.Else();
718 {
719 HValue* result = UnmappedCase(elements, key);
720 environment()->Push(result);
721 }
722 in_range.End();
723
724 return environment()->Pop();
725}
726
727
728Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
729 return DoGenerateCode(this);
730}
731
732
733void CodeStubGraphBuilderBase::BuildStoreNamedField(
734 HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400735 Representation representation, bool transition_to_field) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000736 DCHECK(!index.is_double() || representation.IsDouble());
737 int offset = index.offset();
738 HObjectAccess access =
739 index.is_inobject()
740 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
741 : HObjectAccess::ForBackingStoreOffset(offset, representation);
742
743 if (representation.IsDouble()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400744 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
745 HObjectAccess heap_number_access =
746 access.WithRepresentation(Representation::Tagged());
747 if (transition_to_field) {
748 // The store requires a mutable HeapNumber to be allocated.
749 NoObservableSideEffectsScope no_side_effects(this);
750 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
751
752 // TODO(hpayer): Allocation site pretenuring support.
753 HInstruction* heap_number =
754 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
755 MUTABLE_HEAP_NUMBER_TYPE);
756 AddStoreMapConstant(heap_number,
757 isolate()->factory()->mutable_heap_number_map());
758 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
759 value);
760 // Store the new mutable heap number into the object.
761 access = heap_number_access;
762 value = heap_number;
763 } else {
764 // Load the heap number.
765 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
766 // Store the double value into it.
767 access = HObjectAccess::ForHeapNumberValue();
768 }
769 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000770 } else if (representation.IsHeapObject()) {
771 BuildCheckHeapObject(value);
772 }
773
774 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
775}
776
777
778template <>
779HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
780 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400781 casted_stub()->representation(), false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000782 return GetParameter(2);
783}
784
785
786Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
787
788
789template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400790HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
791 HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
792
793 switch (casted_stub()->store_mode()) {
794 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
795 HValue* properties = Add<HLoadNamedField>(
796 object, nullptr, HObjectAccess::ForPropertiesPointer());
797 HValue* length = AddLoadFixedArrayLength(properties);
798 HValue* delta =
799 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
800 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
801
802 // Grow properties array.
803 ElementsKind kind = FAST_ELEMENTS;
804 Add<HBoundsCheck>(new_capacity,
805 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
806 FixedArray::kHeaderSize) >>
807 ElementsKindToShiftSize(kind)));
808
809 // Reuse this code for properties backing store allocation.
810 HValue* new_properties =
811 BuildAllocateAndInitializeArray(kind, new_capacity);
812
813 BuildCopyProperties(properties, new_properties, length, new_capacity);
814
815 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
816 new_properties);
817 }
818 // Fall through.
819 case StoreTransitionStub::StoreMapAndValue:
820 // Store the new value into the "extended" object.
821 BuildStoreNamedField(
822 object, GetParameter(StoreTransitionDescriptor::kValueIndex),
823 casted_stub()->index(), casted_stub()->representation(), true);
824 // Fall through.
825
826 case StoreTransitionStub::StoreMapOnly:
827 // And finally update the map.
828 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
829 GetParameter(StoreTransitionDescriptor::kMapIndex));
830 break;
831 }
832 return GetParameter(StoreTransitionDescriptor::kValueIndex);
833}
834
835
836Handle<Code> StoreTransitionStub::GenerateCode() {
837 return DoGenerateCode(this);
838}
839
840
841template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000842HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
843 HValue* string = BuildLoadNamedField(GetParameter(0),
844 FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
845 return BuildLoadNamedField(string,
846 FieldIndex::ForInObjectOffset(String::kLengthOffset));
847}
848
849
850Handle<Code> StringLengthStub::GenerateCode() {
851 return DoGenerateCode(this);
852}
853
854
855template <>
856HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
857 BuildUncheckedMonomorphicElementAccess(
858 GetParameter(StoreDescriptor::kReceiverIndex),
859 GetParameter(StoreDescriptor::kNameIndex),
860 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
861 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
862 casted_stub()->store_mode());
863
864 return GetParameter(2);
865}
866
867
868Handle<Code> StoreFastElementStub::GenerateCode() {
869 return DoGenerateCode(this);
870}
871
872
873template <>
874HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
875 info()->MarkAsSavesCallerDoubles();
876
877 BuildTransitionElementsKind(GetParameter(0),
878 GetParameter(1),
879 casted_stub()->from_kind(),
880 casted_stub()->to_kind(),
881 casted_stub()->is_js_array());
882
883 return GetParameter(0);
884}
885
886
887Handle<Code> TransitionElementsKindStub::GenerateCode() {
888 return DoGenerateCode(this);
889}
890
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400891
892template <>
893HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
894 HValue* result =
895 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
896 NOT_TENURED, HEAP_NUMBER_TYPE);
897 AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
898 return result;
899}
900
901
902Handle<Code> AllocateHeapNumberStub::GenerateCode() {
903 return DoGenerateCode(this);
904}
905
906
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000907HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
908 ElementsKind kind,
909 AllocationSiteOverrideMode override_mode,
910 ArgumentClass argument_class) {
911 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
912 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
913 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
914 override_mode);
915 HValue* result = NULL;
916 switch (argument_class) {
917 case NONE:
918 // This stub is very performance sensitive, the generated code must be
919 // tuned so that it doesn't build and eager frame.
920 info()->MarkMustNotHaveEagerFrame();
921 result = array_builder.AllocateEmptyArray();
922 break;
923 case SINGLE:
924 result = BuildArraySingleArgumentConstructor(&array_builder);
925 break;
926 case MULTIPLE:
927 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
928 break;
929 }
930
931 return result;
932}
933
934
935HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
936 ElementsKind kind, ArgumentClass argument_class) {
937 HValue* constructor = GetParameter(
938 InternalArrayConstructorStubBase::kConstructor);
939 JSArrayBuilder array_builder(this, kind, constructor);
940
941 HValue* result = NULL;
942 switch (argument_class) {
943 case NONE:
944 // This stub is very performance sensitive, the generated code must be
945 // tuned so that it doesn't build and eager frame.
946 info()->MarkMustNotHaveEagerFrame();
947 result = array_builder.AllocateEmptyArray();
948 break;
949 case SINGLE:
950 result = BuildArraySingleArgumentConstructor(&array_builder);
951 break;
952 case MULTIPLE:
953 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
954 break;
955 }
956 return result;
957}
958
959
960HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
961 JSArrayBuilder* array_builder) {
962 // Smi check and range check on the input arg.
963 HValue* constant_one = graph()->GetConstant1();
964 HValue* constant_zero = graph()->GetConstant0();
965
966 HInstruction* elements = Add<HArgumentsElements>(false);
967 HInstruction* argument = Add<HAccessArgumentsAt>(
968 elements, constant_one, constant_zero);
969
970 return BuildAllocateArrayFromLength(array_builder, argument);
971}
972
973
974HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
975 JSArrayBuilder* array_builder, ElementsKind kind) {
976 // Insert a bounds check because the number of arguments might exceed
977 // the kInitialMaxFastElementArray limit. This cannot happen for code
978 // that was parsed, but calling via Array.apply(thisArg, [...]) might
979 // trigger it.
980 HValue* length = GetArgumentsLength();
981 HConstant* max_alloc_length =
982 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
983 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
984
985 // We need to fill with the hole if it's a smi array in the multi-argument
986 // case because we might have to bail out while copying arguments into
987 // the array because they aren't compatible with a smi array.
988 // If it's a double array, no problem, and if it's fast then no
989 // problem either because doubles are boxed.
990 //
991 // TODO(mvstanton): consider an instruction to memset fill the array
992 // with zero in this case instead.
993 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
994 ? JSArrayBuilder::FILL_WITH_HOLE
995 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
996 HValue* new_object = array_builder->AllocateArray(checked_length,
997 max_alloc_length,
998 checked_length,
999 fill_mode);
1000 HValue* elements = array_builder->GetElementsLocation();
1001 DCHECK(elements != NULL);
1002
1003 // Now populate the elements correctly.
1004 LoopBuilder builder(this,
1005 context(),
1006 LoopBuilder::kPostIncrement);
1007 HValue* start = graph()->GetConstant0();
1008 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1009 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1010 HInstruction* argument = Add<HAccessArgumentsAt>(
1011 argument_elements, checked_length, key);
1012
1013 Add<HStoreKeyed>(elements, key, argument, kind);
1014 builder.EndBody();
1015 return new_object;
1016}
1017
1018
1019template <>
1020HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1021 ElementsKind kind = casted_stub()->elements_kind();
1022 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1023 return BuildArrayConstructor(kind, override_mode, NONE);
1024}
1025
1026
1027Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1028 return DoGenerateCode(this);
1029}
1030
1031
1032template <>
1033HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1034 BuildCodeStub() {
1035 ElementsKind kind = casted_stub()->elements_kind();
1036 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1037 return BuildArrayConstructor(kind, override_mode, SINGLE);
1038}
1039
1040
1041Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1042 return DoGenerateCode(this);
1043}
1044
1045
1046template <>
1047HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1048 ElementsKind kind = casted_stub()->elements_kind();
1049 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1050 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1051}
1052
1053
1054Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1055 return DoGenerateCode(this);
1056}
1057
1058
1059template <>
1060HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1061 BuildCodeStub() {
1062 ElementsKind kind = casted_stub()->elements_kind();
1063 return BuildInternalArrayConstructor(kind, NONE);
1064}
1065
1066
1067Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1068 return DoGenerateCode(this);
1069}
1070
1071
1072template <>
1073HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1074 BuildCodeStub() {
1075 ElementsKind kind = casted_stub()->elements_kind();
1076 return BuildInternalArrayConstructor(kind, SINGLE);
1077}
1078
1079
1080Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1081 return DoGenerateCode(this);
1082}
1083
1084
1085template <>
1086HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1087 BuildCodeStub() {
1088 ElementsKind kind = casted_stub()->elements_kind();
1089 return BuildInternalArrayConstructor(kind, MULTIPLE);
1090}
1091
1092
1093Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1094 return DoGenerateCode(this);
1095}
1096
1097
1098template <>
1099HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1100 Isolate* isolate = graph()->isolate();
1101 CompareNilICStub* stub = casted_stub();
1102 HIfContinuation continuation;
1103 Handle<Map> sentinel_map(isolate->heap()->meta_map());
1104 Type* type = stub->GetType(zone(), sentinel_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001105 BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001106 IfBuilder if_nil(this, &continuation);
1107 if_nil.Then();
1108 if (continuation.IsFalseReachable()) {
1109 if_nil.Else();
1110 if_nil.Return(graph()->GetConstant0());
1111 }
1112 if_nil.End();
1113 return continuation.IsTrueReachable()
1114 ? graph()->GetConstant1()
1115 : graph()->GetConstantUndefined();
1116}
1117
1118
1119Handle<Code> CompareNilICStub::GenerateCode() {
1120 return DoGenerateCode(this);
1121}
1122
1123
1124template <>
1125HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1126 BinaryOpICState state = casted_stub()->state();
1127
1128 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1129 HValue* right = GetParameter(BinaryOpICStub::kRight);
1130
1131 Type* left_type = state.GetLeftType(zone());
1132 Type* right_type = state.GetRightType(zone());
1133 Type* result_type = state.GetResultType(zone());
1134
1135 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1136 (state.HasSideEffects() || !result_type->Is(Type::None())));
1137
1138 HValue* result = NULL;
1139 HAllocationMode allocation_mode(NOT_TENURED);
1140 if (state.op() == Token::ADD &&
1141 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1142 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1143 // For the generic add stub a fast case for string addition is performance
1144 // critical.
1145 if (left_type->Maybe(Type::String())) {
1146 IfBuilder if_leftisstring(this);
1147 if_leftisstring.If<HIsStringAndBranch>(left);
1148 if_leftisstring.Then();
1149 {
1150 Push(BuildBinaryOperation(
1151 state.op(), left, right,
1152 Type::String(zone()), right_type,
1153 result_type, state.fixed_right_arg(),
1154 allocation_mode));
1155 }
1156 if_leftisstring.Else();
1157 {
1158 Push(BuildBinaryOperation(
1159 state.op(), left, right,
1160 left_type, right_type, result_type,
1161 state.fixed_right_arg(), allocation_mode));
1162 }
1163 if_leftisstring.End();
1164 result = Pop();
1165 } else {
1166 IfBuilder if_rightisstring(this);
1167 if_rightisstring.If<HIsStringAndBranch>(right);
1168 if_rightisstring.Then();
1169 {
1170 Push(BuildBinaryOperation(
1171 state.op(), left, right,
1172 left_type, Type::String(zone()),
1173 result_type, state.fixed_right_arg(),
1174 allocation_mode));
1175 }
1176 if_rightisstring.Else();
1177 {
1178 Push(BuildBinaryOperation(
1179 state.op(), left, right,
1180 left_type, right_type, result_type,
1181 state.fixed_right_arg(), allocation_mode));
1182 }
1183 if_rightisstring.End();
1184 result = Pop();
1185 }
1186 } else {
1187 result = BuildBinaryOperation(
1188 state.op(), left, right,
1189 left_type, right_type, result_type,
1190 state.fixed_right_arg(), allocation_mode);
1191 }
1192
1193 // If we encounter a generic argument, the number conversion is
1194 // observable, thus we cannot afford to bail out after the fact.
1195 if (!state.HasSideEffects()) {
1196 result = EnforceNumberType(result, result_type);
1197 }
1198
1199 // Reuse the double box of one of the operands if we are allowed to (i.e.
1200 // chained binops).
1201 if (state.CanReuseDoubleBox()) {
1202 HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
1203 IfBuilder if_heap_number(this);
1204 if_heap_number.If<HHasInstanceTypeAndBranch>(operand, HEAP_NUMBER_TYPE);
1205 if_heap_number.Then();
1206 Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
1207 Push(operand);
1208 if_heap_number.Else();
1209 Push(result);
1210 if_heap_number.End();
1211 result = Pop();
1212 }
1213
1214 return result;
1215}
1216
1217
1218Handle<Code> BinaryOpICStub::GenerateCode() {
1219 return DoGenerateCode(this);
1220}
1221
1222
1223template <>
1224HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1225 BinaryOpICState state = casted_stub()->state();
1226
1227 HValue* allocation_site = GetParameter(
1228 BinaryOpWithAllocationSiteStub::kAllocationSite);
1229 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1230 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1231
1232 Type* left_type = state.GetLeftType(zone());
1233 Type* right_type = state.GetRightType(zone());
1234 Type* result_type = state.GetResultType(zone());
1235 HAllocationMode allocation_mode(allocation_site);
1236
1237 return BuildBinaryOperation(state.op(), left, right,
1238 left_type, right_type, result_type,
1239 state.fixed_right_arg(), allocation_mode);
1240}
1241
1242
1243Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1244 return DoGenerateCode(this);
1245}
1246
1247
1248template <>
1249HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1250 StringAddStub* stub = casted_stub();
1251 StringAddFlags flags = stub->flags();
1252 PretenureFlag pretenure_flag = stub->pretenure_flag();
1253
1254 HValue* left = GetParameter(StringAddStub::kLeft);
1255 HValue* right = GetParameter(StringAddStub::kRight);
1256
1257 // Make sure that both arguments are strings if not known in advance.
1258 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1259 left = BuildCheckString(left);
1260 }
1261 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1262 right = BuildCheckString(right);
1263 }
1264
1265 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1266}
1267
1268
1269Handle<Code> StringAddStub::GenerateCode() {
1270 return DoGenerateCode(this);
1271}
1272
1273
1274template <>
1275HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1276 ToBooleanStub* stub = casted_stub();
1277 HValue* true_value = NULL;
1278 HValue* false_value = NULL;
1279
1280 switch (stub->mode()) {
1281 case ToBooleanStub::RESULT_AS_SMI:
1282 true_value = graph()->GetConstant1();
1283 false_value = graph()->GetConstant0();
1284 break;
1285 case ToBooleanStub::RESULT_AS_ODDBALL:
1286 true_value = graph()->GetConstantTrue();
1287 false_value = graph()->GetConstantFalse();
1288 break;
1289 case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1290 true_value = graph()->GetConstantFalse();
1291 false_value = graph()->GetConstantTrue();
1292 break;
1293 }
1294
1295 IfBuilder if_true(this);
1296 if_true.If<HBranch>(GetParameter(0), stub->types());
1297 if_true.Then();
1298 if_true.Return(true_value);
1299 if_true.Else();
1300 if_true.End();
1301 return false_value;
1302}
1303
1304
1305Handle<Code> ToBooleanStub::GenerateCode() {
1306 return DoGenerateCode(this);
1307}
1308
1309
1310template <>
1311HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1312 StoreGlobalStub* stub = casted_stub();
1313 Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
1314 Handle<PropertyCell> placeholder_cell =
1315 isolate()->factory()->NewPropertyCell(placeholer_value);
1316
1317 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1318
1319 if (stub->check_global()) {
1320 // Check that the map of the global has not changed: use a placeholder map
1321 // that will be replaced later with the global object's map.
1322 Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1323 HValue* global = Add<HConstant>(
1324 StoreGlobalStub::global_placeholder(isolate()));
1325 Add<HCheckMaps>(global, placeholder_map);
1326 }
1327
1328 HValue* cell = Add<HConstant>(placeholder_cell);
1329 HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001330 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001331
1332 if (stub->is_constant()) {
1333 IfBuilder builder(this);
1334 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1335 builder.Then();
1336 builder.ElseDeopt("Unexpected cell contents in constant global store");
1337 builder.End();
1338 } else {
1339 // Load the payload of the global parameter cell. A hole indicates that the
1340 // property has been deleted and that the store must be handled by the
1341 // runtime.
1342 IfBuilder builder(this);
1343 HValue* hole_value = graph()->GetConstantHole();
1344 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1345 builder.Then();
1346 builder.Deopt("Unexpected cell contents in global store");
1347 builder.Else();
1348 Add<HStoreNamedField>(cell, access, value);
1349 builder.End();
1350 }
1351
1352 return value;
1353}
1354
1355
1356Handle<Code> StoreGlobalStub::GenerateCode() {
1357 return DoGenerateCode(this);
1358}
1359
1360
1361template<>
1362HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1363 HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1364 HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1365 HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1366 HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1367
1368 if (FLAG_trace_elements_transitions) {
1369 // Tracing elements transitions is the job of the runtime.
1370 Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1371 } else {
1372 info()->MarkAsSavesCallerDoubles();
1373
1374 BuildTransitionElementsKind(object, map,
1375 casted_stub()->from_kind(),
1376 casted_stub()->to_kind(),
1377 casted_stub()->is_jsarray());
1378
1379 BuildUncheckedMonomorphicElementAccess(object, key, value,
1380 casted_stub()->is_jsarray(),
1381 casted_stub()->to_kind(),
1382 STORE, ALLOW_RETURN_HOLE,
1383 casted_stub()->store_mode());
1384 }
1385
1386 return value;
1387}
1388
1389
1390Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1391 return DoGenerateCode(this);
1392}
1393
1394
1395void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1396 HValue* js_function,
1397 HValue* native_context,
1398 IfBuilder* builder,
1399 HValue* optimized_map,
1400 HValue* map_index) {
1401 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1402 HValue* context_slot = LoadFromOptimizedCodeMap(
1403 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1404 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1405 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1406 builder->If<HCompareObjectEqAndBranch>(native_context,
1407 context_slot);
1408 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1409 builder->Then();
1410 HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1411 map_index, SharedFunctionInfo::kCachedCodeOffset);
1412 // and the literals
1413 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1414 map_index, SharedFunctionInfo::kLiteralsOffset);
1415
1416 Counters* counters = isolate()->counters();
1417 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1418
1419 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1420 // map and either unmangle them on marking or do nothing as the whole map is
1421 // discarded on major GC anyway.
1422 Add<HStoreCodeEntry>(js_function, code_object);
1423 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1424 literals);
1425
1426 // Now link a function into a list of optimized functions.
1427 HValue* optimized_functions_list = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001428 native_context, nullptr,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001429 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1430 Add<HStoreNamedField>(js_function,
1431 HObjectAccess::ForNextFunctionLinkPointer(),
1432 optimized_functions_list);
1433
1434 // This store is the only one that should have a write barrier.
1435 Add<HStoreNamedField>(native_context,
1436 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1437 js_function);
1438
1439 // The builder continues in the "then" after this function.
1440}
1441
1442
1443void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1444 HValue* shared_info) {
1445 Add<HStoreNamedField>(js_function,
1446 HObjectAccess::ForNextFunctionLinkPointer(),
1447 graph()->GetConstantUndefined());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001448 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1449 HObjectAccess::ForCodeOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001450 Add<HStoreCodeEntry>(js_function, code_object);
1451}
1452
1453
1454HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1455 HValue* optimized_map,
1456 HValue* iterator,
1457 int field_offset) {
1458 // By making sure to express these loads in the form [<hvalue> + constant]
1459 // the keyed load can be hoisted.
1460 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1461 HValue* field_slot = iterator;
1462 if (field_offset > 0) {
1463 HValue* field_offset_value = Add<HConstant>(field_offset);
1464 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1465 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001466 HInstruction* field_entry =
1467 Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 return field_entry;
1469}
1470
1471
1472void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1473 HValue* js_function,
1474 HValue* shared_info,
1475 HValue* native_context) {
1476 Counters* counters = isolate()->counters();
1477 IfBuilder is_optimized(this);
1478 HInstruction* optimized_map = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001479 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001480 HValue* null_constant = Add<HConstant>(0);
1481 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1482 is_optimized.Then();
1483 {
1484 BuildInstallCode(js_function, shared_info);
1485 }
1486 is_optimized.Else();
1487 {
1488 AddIncrementCounter(counters->fast_new_closure_try_optimized());
1489 // optimized_map points to fixed array of 3-element entries
1490 // (native context, optimized code, literals).
1491 // Map must never be empty, so check the first elements.
1492 HValue* first_entry_index =
1493 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1494 IfBuilder already_in(this);
1495 BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1496 optimized_map, first_entry_index);
1497 already_in.Else();
1498 {
1499 // Iterate through the rest of map backwards. Do not double check first
1500 // entry. After the loop, if no matching optimized code was found,
1501 // install unoptimized code.
1502 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1503 // i > SharedFunctionInfo::kEntriesStart;
1504 // i -= SharedFunctionInfo::kEntryLength) { .. }
1505 HValue* shared_function_entry_length =
1506 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1507 LoopBuilder loop_builder(this,
1508 context(),
1509 LoopBuilder::kPostDecrement,
1510 shared_function_entry_length);
1511 HValue* array_length = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001512 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001513 HValue* start_pos = AddUncasted<HSub>(array_length,
1514 shared_function_entry_length);
1515 HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1516 first_entry_index,
1517 Token::GT);
1518 {
1519 IfBuilder done_check(this);
1520 BuildCheckAndInstallOptimizedCode(js_function, native_context,
1521 &done_check,
1522 optimized_map,
1523 slot_iterator);
1524 // Fall out of the loop
1525 loop_builder.Break();
1526 }
1527 loop_builder.EndBody();
1528
1529 // If slot_iterator equals first entry index, then we failed to find and
1530 // install optimized code
1531 IfBuilder no_optimized_code_check(this);
1532 no_optimized_code_check.If<HCompareNumericAndBranch>(
1533 slot_iterator, first_entry_index, Token::EQ);
1534 no_optimized_code_check.Then();
1535 {
1536 // Store the unoptimized code
1537 BuildInstallCode(js_function, shared_info);
1538 }
1539 }
1540 }
1541}
1542
1543
1544template<>
1545HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1546 Counters* counters = isolate()->counters();
1547 Factory* factory = isolate()->factory();
1548 HInstruction* empty_fixed_array =
1549 Add<HConstant>(factory->empty_fixed_array());
1550 HValue* shared_info = GetParameter(0);
1551
1552 AddIncrementCounter(counters->fast_new_closure_total());
1553
1554 // Create a new closure from the given function info in new space
1555 HValue* size = Add<HConstant>(JSFunction::kSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001556 HInstruction* js_function =
1557 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001558
1559 int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
1560 casted_stub()->kind());
1561
1562 // Compute the function map in the current native context and set that
1563 // as the map of the allocated object.
1564 HInstruction* native_context = BuildGetNativeContext();
1565 HInstruction* map_slot_value = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001566 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001567 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1568
1569 // Initialize the rest of the function.
1570 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1571 empty_fixed_array);
1572 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1573 empty_fixed_array);
1574 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1575 empty_fixed_array);
1576 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1577 graph()->GetConstantHole());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001578 Add<HStoreNamedField>(
1579 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001580 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1581 context());
1582
1583 // Initialize the code pointer in the function to be the one
1584 // found in the shared function info object.
1585 // But first check if there is an optimized version for our context.
1586 if (FLAG_cache_optimized_code) {
1587 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1588 } else {
1589 BuildInstallCode(js_function, shared_info);
1590 }
1591
1592 return js_function;
1593}
1594
1595
1596Handle<Code> FastNewClosureStub::GenerateCode() {
1597 return DoGenerateCode(this);
1598}
1599
1600
1601template<>
1602HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1603 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1604
1605 // Get the function.
1606 HParameter* function = GetParameter(FastNewContextStub::kFunction);
1607
1608 // Allocate the context in new space.
1609 HAllocate* function_context = Add<HAllocate>(
1610 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1611 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
1612
1613 // Set up the object header.
1614 AddStoreMapConstant(function_context,
1615 isolate()->factory()->function_context_map());
1616 Add<HStoreNamedField>(function_context,
1617 HObjectAccess::ForFixedArrayLength(),
1618 Add<HConstant>(length));
1619
1620 // Set up the fixed slots.
1621 Add<HStoreNamedField>(function_context,
1622 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1623 function);
1624 Add<HStoreNamedField>(function_context,
1625 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1626 context());
1627 Add<HStoreNamedField>(function_context,
1628 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1629 graph()->GetConstant0());
1630
1631 // Copy the global object from the previous context.
1632 HValue* global_object = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001633 context(), nullptr,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1635 Add<HStoreNamedField>(function_context,
1636 HObjectAccess::ForContextSlot(
1637 Context::GLOBAL_OBJECT_INDEX),
1638 global_object);
1639
1640 // Initialize the rest of the slots to undefined.
1641 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1642 Add<HStoreNamedField>(function_context,
1643 HObjectAccess::ForContextSlot(i),
1644 graph()->GetConstantUndefined());
1645 }
1646
1647 return function_context;
1648}
1649
1650
1651Handle<Code> FastNewContextStub::GenerateCode() {
1652 return DoGenerateCode(this);
1653}
1654
1655
1656template <>
1657HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
1658 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1659 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1660
1661 Add<HCheckSmi>(key);
1662
1663 HValue* elements = AddLoadElements(receiver);
1664
1665 HValue* hash = BuildElementIndexHash(key);
1666
1667 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1668}
1669
1670
1671Handle<Code> LoadDictionaryElementStub::GenerateCode() {
1672 return DoGenerateCode(this);
1673}
1674
1675
1676template<>
1677HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1678 // Determine the parameters.
1679 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1680 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1681 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1682
1683 info()->MarkMustNotHaveEagerFrame();
1684
1685 return BuildRegExpConstructResult(length, index, input);
1686}
1687
1688
1689Handle<Code> RegExpConstructResultStub::GenerateCode() {
1690 return DoGenerateCode(this);
1691}
1692
1693
1694template <>
1695class CodeStubGraphBuilder<KeyedLoadGenericStub>
1696 : public CodeStubGraphBuilderBase {
1697 public:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001698 explicit CodeStubGraphBuilder(CompilationInfoWithZone* info)
1699 : CodeStubGraphBuilderBase(info) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001700
1701 protected:
1702 virtual HValue* BuildCodeStub();
1703
1704 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1705 HValue* bit_field2,
1706 ElementsKind kind);
1707
1708 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1709 HValue* receiver,
1710 HValue* key,
1711 HValue* instance_type,
1712 HValue* bit_field2,
1713 ElementsKind kind);
1714
1715 void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1716 HValue* receiver,
1717 HValue* key,
1718 HValue* instance_type,
1719 HValue* bit_field2,
1720 ElementsKind kind);
1721
1722 KeyedLoadGenericStub* casted_stub() {
1723 return static_cast<KeyedLoadGenericStub*>(stub());
1724 }
1725};
1726
1727
1728void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
1729 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
1730 ElementsKind kind) {
1731 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
1732 HValue* kind_limit = Add<HConstant>(
1733 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
1734
1735 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
1736 if_builder->Then();
1737}
1738
1739
1740void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
1741 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1742 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1743 DCHECK(!IsExternalArrayElementsKind(kind));
1744
1745 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1746
1747 IfBuilder js_array_check(this);
1748 js_array_check.If<HCompareNumericAndBranch>(
1749 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
1750 js_array_check.Then();
1751 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1752 true, kind,
1753 LOAD, NEVER_RETURN_HOLE,
1754 STANDARD_STORE));
1755 js_array_check.Else();
1756 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1757 false, kind,
1758 LOAD, NEVER_RETURN_HOLE,
1759 STANDARD_STORE));
1760 js_array_check.End();
1761}
1762
1763
1764void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
1765 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1766 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1767 DCHECK(IsExternalArrayElementsKind(kind));
1768
1769 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1770
1771 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1772 false, kind,
1773 LOAD, NEVER_RETURN_HOLE,
1774 STANDARD_STORE));
1775}
1776
1777
1778HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
1779 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1780 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1781
1782 // Split into a smi/integer case and unique string case.
1783 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
1784 graph()->CreateBasicBlock());
1785
1786 BuildKeyedIndexCheck(key, &index_name_split_continuation);
1787
1788 IfBuilder index_name_split(this, &index_name_split_continuation);
1789 index_name_split.Then();
1790 {
1791 // Key is an index (number)
1792 key = Pop();
1793
1794 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1795 (1 << Map::kHasIndexedInterceptor);
1796 BuildJSObjectCheck(receiver, bit_field_mask);
1797
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001798 HValue* map =
1799 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001800
1801 HValue* instance_type =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001802 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001803
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001804 HValue* bit_field2 =
1805 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001806
1807 IfBuilder kind_if(this);
1808 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1809 FAST_HOLEY_ELEMENTS);
1810
1811 kind_if.Else();
1812 {
1813 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1814 FAST_HOLEY_DOUBLE_ELEMENTS);
1815 }
1816 kind_if.Else();
1817
1818 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
1819 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
1820 {
1821 HValue* elements = AddLoadElements(receiver);
1822
1823 HValue* hash = BuildElementIndexHash(key);
1824
1825 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
1826 }
1827 kind_if.Else();
1828
1829 // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
1830 BuildElementsKindLimitCheck(&kind_if, bit_field2,
1831 SLOPPY_ARGUMENTS_ELEMENTS);
1832 // Non-strict elements are not handled.
1833 Add<HDeoptimize>("non-strict elements in KeyedLoadGenericStub",
1834 Deoptimizer::EAGER);
1835 Push(graph()->GetConstant0());
1836
1837 kind_if.Else();
1838 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1839 EXTERNAL_INT8_ELEMENTS);
1840
1841 kind_if.Else();
1842 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1843 EXTERNAL_UINT8_ELEMENTS);
1844
1845 kind_if.Else();
1846 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1847 EXTERNAL_INT16_ELEMENTS);
1848
1849 kind_if.Else();
1850 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1851 EXTERNAL_UINT16_ELEMENTS);
1852
1853 kind_if.Else();
1854 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1855 EXTERNAL_INT32_ELEMENTS);
1856
1857 kind_if.Else();
1858 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1859 EXTERNAL_UINT32_ELEMENTS);
1860
1861 kind_if.Else();
1862 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1863 EXTERNAL_FLOAT32_ELEMENTS);
1864
1865 kind_if.Else();
1866 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1867 EXTERNAL_FLOAT64_ELEMENTS);
1868
1869 kind_if.Else();
1870 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1871 EXTERNAL_UINT8_CLAMPED_ELEMENTS);
1872
1873 kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericStub");
1874
1875 kind_if.End();
1876 }
1877 index_name_split.Else();
1878 {
1879 // Key is a unique string.
1880 key = Pop();
1881
1882 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1883 (1 << Map::kHasNamedInterceptor);
1884 BuildJSObjectCheck(receiver, bit_field_mask);
1885
1886 HIfContinuation continuation;
1887 BuildTestForDictionaryProperties(receiver, &continuation);
1888 IfBuilder if_dict_properties(this, &continuation);
1889 if_dict_properties.Then();
1890 {
1891 // Key is string, properties are dictionary mode
1892 BuildNonGlobalObjectCheck(receiver);
1893
1894 HValue* properties = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001895 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001896
1897 HValue* hash =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001898 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001899
1900 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
1901
1902 HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
1903 properties,
1904 key,
1905 hash);
1906 Push(value);
1907 }
1908 if_dict_properties.Else();
1909 {
1910 // Key is string, properties are fast mode
1911 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
1912
1913 ExternalReference cache_keys_ref =
1914 ExternalReference::keyed_lookup_cache_keys(isolate());
1915 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
1916
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001917 HValue* map =
1918 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001919 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
1920 base_index->ClearFlag(HValue::kCanOverflow);
1921
1922 HIfContinuation inline_or_runtime_continuation(
1923 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
1924 {
1925 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
1926 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
1927 ++probe) {
1928 IfBuilder* lookup_if = &lookup_ifs[probe];
1929 lookup_if->Initialize(this);
1930 int probe_base = probe * KeyedLookupCache::kEntryLength;
1931 HValue* map_index = AddUncasted<HAdd>(
1932 base_index,
1933 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
1934 map_index->ClearFlag(HValue::kCanOverflow);
1935 HValue* key_index = AddUncasted<HAdd>(
1936 base_index,
1937 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
1938 key_index->ClearFlag(HValue::kCanOverflow);
1939 HValue* map_to_check =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001940 Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS,
1941 NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001942 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
1943 lookup_if->And();
1944 HValue* key_to_check =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001945 Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS,
1946 NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001947 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
1948 lookup_if->Then();
1949 {
1950 ExternalReference cache_field_offsets_ref =
1951 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
1952 HValue* cache_field_offsets =
1953 Add<HConstant>(cache_field_offsets_ref);
1954 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
1955 index->ClearFlag(HValue::kCanOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001956 HValue* property_index =
1957 Add<HLoadKeyed>(cache_field_offsets, index, nullptr,
1958 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 Push(property_index);
1960 }
1961 lookup_if->Else();
1962 }
1963 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
1964 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
1965 }
1966 }
1967
1968 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
1969 inline_or_runtime.Then();
1970 {
1971 // Found a cached index, load property inline.
1972 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
1973 }
1974 inline_or_runtime.Else();
1975 {
1976 // KeyedLookupCache miss; call runtime.
1977 Add<HPushArguments>(receiver, key);
1978 Push(Add<HCallRuntime>(
1979 isolate()->factory()->empty_string(),
1980 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
1981 }
1982 inline_or_runtime.End();
1983 }
1984 if_dict_properties.End();
1985 }
1986 index_name_split.End();
1987
1988 return Pop();
1989}
1990
1991
1992Handle<Code> KeyedLoadGenericStub::GenerateCode() {
1993 return DoGenerateCode(this);
1994}
1995
1996
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001997void CodeStubGraphBuilderBase::TailCallHandler(HValue* receiver, HValue* name,
1998 HValue* array, HValue* map_index,
1999 HValue* slot, HValue* vector) {
2000 // The handler is at array[map_index + 1]. Compute this with a custom offset
2001 // to HLoadKeyed.
2002 int offset =
2003 GetDefaultHeaderSizeForElementsKind(FAST_ELEMENTS) + kPointerSize;
2004 HValue* handler_code = Add<HLoadKeyed>(
2005 array, map_index, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, offset);
2006 TailCallHandler(receiver, name, slot, vector, handler_code);
2007}
2008
2009
2010void CodeStubGraphBuilderBase::TailCallHandler(HValue* receiver, HValue* name,
2011 HValue* slot, HValue* vector,
2012 HValue* handler_code) {
2013 VectorLoadICDescriptor descriptor(isolate());
2014 HValue* op_vals[] = {context(), receiver, name, slot, vector};
2015 Add<HCallWithDescriptor>(handler_code, 0, descriptor,
2016 Vector<HValue*>(op_vals, 5), TAIL_CALL);
2017 // We never return here, it is a tail call.
2018}
2019
2020
2021void CodeStubGraphBuilderBase::TailCallMiss(HValue* receiver, HValue* name,
2022 HValue* slot, HValue* vector,
2023 bool keyed_load) {
2024 DCHECK(FLAG_vector_ics);
2025 Add<HTailCallThroughMegamorphicCache>(
2026 receiver, name, slot, vector,
2027 HTailCallThroughMegamorphicCache::ComputeFlags(keyed_load, true));
2028 // We never return here, it is a tail call.
2029}
2030
2031
2032void CodeStubGraphBuilderBase::HandleArrayCases(HValue* array, HValue* receiver,
2033 HValue* name, HValue* slot,
2034 HValue* vector,
2035 bool keyed_load) {
2036 IfBuilder if_receiver_heap_object(this);
2037 if_receiver_heap_object.IfNot<HIsSmiAndBranch>(receiver);
2038 if_receiver_heap_object.Then();
2039 {
2040 HConstant* constant_two = Add<HConstant>(2);
2041 HConstant* constant_three = Add<HConstant>(3);
2042
2043 HValue* receiver_map = AddLoadMap(receiver, nullptr);
2044 HValue* start =
2045 keyed_load ? graph()->GetConstant1() : graph()->GetConstant0();
2046 HValue* weak_cell = Add<HLoadKeyed>(array, start, nullptr, FAST_ELEMENTS,
2047 ALLOW_RETURN_HOLE);
2048 // Load the weak cell value. It may be Smi(0), or a map. Compare nonetheless
2049 // against the receiver_map.
2050 HValue* array_map = Add<HLoadNamedField>(weak_cell, nullptr,
2051 HObjectAccess::ForWeakCellValue());
2052
2053 IfBuilder if_correct_map(this);
2054 if_correct_map.If<HCompareObjectEqAndBranch>(receiver_map, array_map);
2055 if_correct_map.Then();
2056 { TailCallHandler(receiver, name, array, start, slot, vector); }
2057 if_correct_map.Else();
2058 {
2059 // If our array has more elements, the ic is polymorphic. Look for the
2060 // receiver map in the rest of the array.
2061 HValue* length = AddLoadFixedArrayLength(array, nullptr);
2062 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement,
2063 constant_two);
2064 start = keyed_load ? constant_three : constant_two;
2065 HValue* key = builder.BeginBody(start, length, Token::LT);
2066 {
2067 HValue* weak_cell = Add<HLoadKeyed>(array, key, nullptr, FAST_ELEMENTS,
2068 ALLOW_RETURN_HOLE);
2069 HValue* array_map = Add<HLoadNamedField>(
2070 weak_cell, nullptr, HObjectAccess::ForWeakCellValue());
2071 IfBuilder if_correct_poly_map(this);
2072 if_correct_poly_map.If<HCompareObjectEqAndBranch>(receiver_map,
2073 array_map);
2074 if_correct_poly_map.Then();
2075 { TailCallHandler(receiver, name, array, key, slot, vector); }
2076 }
2077 builder.EndBody();
2078 }
2079 if_correct_map.End();
2080 }
2081}
2082
2083
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002084template <>
2085HValue* CodeStubGraphBuilder<VectorLoadStub>::BuildCodeStub() {
2086 HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002087 HValue* name = GetParameter(VectorLoadICDescriptor::kNameIndex);
2088 HValue* slot = GetParameter(VectorLoadICDescriptor::kSlotIndex);
2089 HValue* vector = GetParameter(VectorLoadICDescriptor::kVectorIndex);
2090
2091 // If the feedback is an array, then the IC is in the monomorphic or
2092 // polymorphic state.
2093 HValue* feedback =
2094 Add<HLoadKeyed>(vector, slot, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2095 IfBuilder array_checker(this);
2096 array_checker.If<HCompareMap>(feedback,
2097 isolate()->factory()->fixed_array_map());
2098 array_checker.Then();
2099 { HandleArrayCases(feedback, receiver, name, slot, vector, false); }
2100 array_checker.Else();
2101 {
2102 // Is the IC megamorphic?
2103 IfBuilder mega_checker(this);
2104 HConstant* megamorphic_symbol =
2105 Add<HConstant>(isolate()->factory()->megamorphic_symbol());
2106 mega_checker.If<HCompareObjectEqAndBranch>(feedback, megamorphic_symbol);
2107 mega_checker.Then();
2108 {
2109 // Probe the stub cache.
2110 Add<HTailCallThroughMegamorphicCache>(
2111 receiver, name, slot, vector,
2112 HTailCallThroughMegamorphicCache::ComputeFlags(false, false));
2113 }
2114 mega_checker.End();
2115 }
2116 array_checker.End();
2117
2118 TailCallMiss(receiver, name, slot, vector, false);
2119 return graph()->GetConstant0();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120}
2121
2122
2123Handle<Code> VectorLoadStub::GenerateCode() { return DoGenerateCode(this); }
2124
2125
2126template <>
2127HValue* CodeStubGraphBuilder<VectorKeyedLoadStub>::BuildCodeStub() {
2128 HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002129 HValue* name = GetParameter(VectorLoadICDescriptor::kNameIndex);
2130 HValue* slot = GetParameter(VectorLoadICDescriptor::kSlotIndex);
2131 HValue* vector = GetParameter(VectorLoadICDescriptor::kVectorIndex);
2132 HConstant* zero = graph()->GetConstant0();
2133
2134 // If the feedback is an array, then the IC is in the monomorphic or
2135 // polymorphic state.
2136 HValue* feedback =
2137 Add<HLoadKeyed>(vector, slot, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2138 IfBuilder array_checker(this);
2139 array_checker.If<HCompareMap>(feedback,
2140 isolate()->factory()->fixed_array_map());
2141 array_checker.Then();
2142 {
2143 // If feedback[0] is 0, then the IC has element handlers and name should be
2144 // a smi. If feedback[0] is a string, verify that it matches name.
2145 HValue* recorded_name = Add<HLoadKeyed>(feedback, zero, nullptr,
2146 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
2147
2148 IfBuilder recorded_name_is_zero(this);
2149 recorded_name_is_zero.If<HCompareObjectEqAndBranch>(recorded_name, zero);
2150 recorded_name_is_zero.Then();
2151 { Add<HCheckSmi>(name); }
2152 recorded_name_is_zero.Else();
2153 {
2154 IfBuilder strings_match(this);
2155 strings_match.IfNot<HCompareObjectEqAndBranch>(name, recorded_name);
2156 strings_match.Then();
2157 TailCallMiss(receiver, name, slot, vector, true);
2158 strings_match.End();
2159 }
2160 recorded_name_is_zero.End();
2161
2162 HandleArrayCases(feedback, receiver, name, slot, vector, true);
2163 }
2164 array_checker.Else();
2165 {
2166 // Check if the IC is in generic state.
2167 IfBuilder generic_checker(this);
2168 HConstant* generic_symbol =
2169 Add<HConstant>(isolate()->factory()->generic_symbol());
2170 generic_checker.If<HCompareObjectEqAndBranch>(feedback, generic_symbol);
2171 generic_checker.Then();
2172 {
2173 // Tail-call to the generic KeyedLoadIC, treating it like a handler.
2174 Handle<Code> stub = KeyedLoadIC::generic_stub(isolate());
2175 HValue* constant_stub = Add<HConstant>(stub);
2176 LoadDescriptor descriptor(isolate());
2177 HValue* op_vals[] = {context(), receiver, name};
2178 Add<HCallWithDescriptor>(constant_stub, 0, descriptor,
2179 Vector<HValue*>(op_vals, 3), TAIL_CALL);
2180 // We never return here, it is a tail call.
2181 }
2182 generic_checker.End();
2183 }
2184 array_checker.End();
2185
2186 TailCallMiss(receiver, name, slot, vector, true);
2187 return zero;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002188}
2189
2190
2191Handle<Code> VectorKeyedLoadStub::GenerateCode() {
2192 return DoGenerateCode(this);
2193}
2194
2195
2196Handle<Code> MegamorphicLoadStub::GenerateCode() {
2197 return DoGenerateCode(this);
2198}
2199
2200
2201template <>
2202HValue* CodeStubGraphBuilder<MegamorphicLoadStub>::BuildCodeStub() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002203 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2204 HValue* name = GetParameter(LoadDescriptor::kNameIndex);
2205
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002206 // We shouldn't generate this when FLAG_vector_ics is true because the
2207 // megamorphic case is handled as part of the default stub.
2208 DCHECK(!FLAG_vector_ics);
2209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002210 // Probe the stub cache.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002211 Add<HTailCallThroughMegamorphicCache>(receiver, name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002212
2213 // We never continue.
2214 return graph()->GetConstant0();
2215}
2216} } // namespace v8::internal