blob: 2fab578b9b34e1b97109dab8c308d996ed401df3 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/code-stubs.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006
7#include "src/bailout-reason.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/crankshaft/hydrogen.h"
9#include "src/crankshaft/lithium.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/field-index.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/ic/ic.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15
16
17static LChunk* OptimizeGraph(HGraph* graph) {
18 DisallowHeapAllocation no_allocation;
19 DisallowHandleAllocation no_handles;
20 DisallowHandleDereference no_deref;
21
22 DCHECK(graph != NULL);
23 BailoutReason bailout_reason = kNoReason;
24 if (!graph->Optimize(&bailout_reason)) {
25 FATAL(GetBailoutReason(bailout_reason));
26 }
27 LChunk* chunk = LChunk::NewChunk(graph);
28 if (chunk == NULL) {
29 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
30 }
31 return chunk;
32}
33
34
35class CodeStubGraphBuilderBase : public HGraphBuilder {
36 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037 explicit CodeStubGraphBuilderBase(CompilationInfo* info)
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038 : HGraphBuilder(info),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039 arguments_length_(NULL),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040040 info_(info),
41 descriptor_(info->code_stub()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000042 context_(NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 int parameter_count = GetParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044 parameters_.Reset(new HParameter*[parameter_count]);
45 }
46 virtual bool BuildGraph();
47
48 protected:
49 virtual HValue* BuildCodeStub() = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000050 int GetParameterCount() const { return descriptor_.GetParameterCount(); }
51 int GetRegisterParameterCount() const {
52 return descriptor_.GetRegisterParameterCount();
53 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 HParameter* GetParameter(int parameter) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000055 DCHECK(parameter < GetParameterCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000056 return parameters_[parameter];
57 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 Representation GetParameterRepresentation(int parameter) {
59 return RepresentationFromType(descriptor_.GetParameterType(parameter));
60 }
61 bool IsParameterCountRegister(int index) const {
62 return descriptor_.GetRegisterParameter(index)
63 .is(descriptor_.stack_parameter_count());
64 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065 HValue* GetArgumentsLength() {
66 // This is initialized in BuildGraph()
67 DCHECK(arguments_length_ != NULL);
68 return arguments_length_;
69 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040070 CompilationInfo* info() { return info_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 CodeStub* stub() { return info_->code_stub(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072 HContext* context() { return context_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040073 Isolate* isolate() { return info_->isolate(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000075 HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040077 Representation representation,
78 bool transition_to_field);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079
80 enum ArgumentClass {
81 NONE,
82 SINGLE,
83 MULTIPLE
84 };
85
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000086 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
87 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
88 HValue* value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089
90 HValue* BuildArrayConstructor(ElementsKind kind,
91 AllocationSiteOverrideMode override_mode,
92 ArgumentClass argument_class);
93 HValue* BuildInternalArrayConstructor(ElementsKind kind,
94 ArgumentClass argument_class);
95
96 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
97 // function found in the optimized code map at map_index in js_function, if
98 // the function at map_index matches the given native_context. Builder is
99 // left in the "Then()" state after the install.
100 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
101 HValue* native_context,
102 IfBuilder* builder,
103 HValue* optimized_map,
104 HValue* map_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000105 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
106 HValue* code_object, HValue* literals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 void BuildInstallCode(HValue* js_function, HValue* shared_info);
108
109 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
110 HValue* iterator,
111 int field_offset);
112 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
113 HValue* shared_info,
114 HValue* native_context);
115
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000116 HValue* BuildToString(HValue* input, bool convert);
117 HValue* BuildToPrimitive(HValue* input, HValue* input_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400118
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 private:
120 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
121 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
122 ElementsKind kind);
123
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000124 base::SmartArrayPointer<HParameter*> parameters_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 HValue* arguments_length_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126 CompilationInfo* info_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 CodeStubDescriptor descriptor_;
128 HContext* context_;
129};
130
131
132bool CodeStubGraphBuilderBase::BuildGraph() {
133 // Update the static counter each time a new code stub is generated.
134 isolate()->counters()->code_stubs()->Increment();
135
136 if (FLAG_trace_hydrogen_stubs) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000137 const char* name = CodeStub::MajorName(stub()->MajorKey());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138 PrintF("-----------------------------------------------------------\n");
139 PrintF("Compiling stub %s using hydrogen\n", name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400140 isolate()->GetHTracer()->TraceCompilation(info());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000141 }
142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143 int param_count = GetParameterCount();
144 int register_param_count = GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000145 HEnvironment* start_environment = graph()->start_environment();
146 HBasicBlock* next_block = CreateBasicBlock(start_environment);
147 Goto(next_block);
148 next_block->SetJoinId(BailoutId::StubEntry());
149 set_current_block(next_block);
150
151 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
152 HInstruction* stack_parameter_count = NULL;
153 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154 Representation r = GetParameterRepresentation(i);
155 HParameter* param;
156 if (i >= register_param_count) {
157 param = Add<HParameter>(i - register_param_count,
158 HParameter::STACK_PARAMETER, r);
159 } else {
160 param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
161 start_environment->Bind(i, param);
162 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 parameters_[i] = param;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000164 if (i < register_param_count && IsParameterCountRegister(i)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165 param->set_type(HType::Smi());
166 stack_parameter_count = param;
167 arguments_length_ = stack_parameter_count;
168 }
169 }
170
171 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
172 if (!runtime_stack_params) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173 stack_parameter_count =
174 Add<HConstant>(param_count - register_param_count - 1);
175 // graph()->GetConstantMinus1();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000176 arguments_length_ = graph()->GetConstant0();
177 }
178
179 context_ = Add<HContext>();
180 start_environment->BindContext(context_);
181
182 Add<HSimulate>(BailoutId::StubEntry());
183
184 NoObservableSideEffectsScope no_effects(this);
185
186 HValue* return_value = BuildCodeStub();
187
188 // We might have extra expressions to pop from the stack in addition to the
189 // arguments above.
190 HInstruction* stack_pop_count = stack_parameter_count;
191 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
192 if (!stack_parameter_count->IsConstant() &&
193 descriptor_.hint_stack_parameter_count() < 0) {
194 HInstruction* constant_one = graph()->GetConstant1();
195 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
196 stack_pop_count->ClearFlag(HValue::kCanOverflow);
197 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
198 // smi.
199 } else {
200 int count = descriptor_.hint_stack_parameter_count();
201 stack_pop_count = Add<HConstant>(count);
202 }
203 }
204
205 if (current_block() != NULL) {
206 HReturn* hreturn_instruction = New<HReturn>(return_value,
207 stack_pop_count);
208 FinishCurrentBlock(hreturn_instruction);
209 }
210 return true;
211}
212
213
214template <class Stub>
215class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
216 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000217 explicit CodeStubGraphBuilder(CompilationInfo* info)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400218 : CodeStubGraphBuilderBase(info) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000219
220 protected:
221 virtual HValue* BuildCodeStub() {
222 if (casted_stub()->IsUninitialized()) {
223 return BuildCodeUninitializedStub();
224 } else {
225 return BuildCodeInitializedStub();
226 }
227 }
228
229 virtual HValue* BuildCodeInitializedStub() {
230 UNIMPLEMENTED();
231 return NULL;
232 }
233
234 virtual HValue* BuildCodeUninitializedStub() {
235 // Force a deopt that falls back to the runtime.
236 HValue* undefined = graph()->GetConstantUndefined();
237 IfBuilder builder(this);
238 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
239 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241 return undefined;
242 }
243
244 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
245};
246
247
248Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
249 ExternalReference miss) {
250 Factory* factory = isolate()->factory();
251
252 // Generate the new code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000253 MacroAssembler masm(isolate(), NULL, 256, CodeObjectRequired::kYes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000254
255 {
256 // Update the static counter each time a new code stub is generated.
257 isolate()->counters()->code_stubs()->Increment();
258
259 // Generate the code for the stub.
260 masm.set_generating_stub(true);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400261 // TODO(yangguo): remove this once we can serialize IC stubs.
262 masm.enable_serializer();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263 NoCurrentFrameScope scope(&masm);
264 GenerateLightweightMiss(&masm, miss);
265 }
266
267 // Create the code object.
268 CodeDesc desc;
269 masm.GetCode(&desc);
270
271 // Copy the generated code into a heap object.
272 Code::Flags flags = Code::ComputeFlags(
273 GetCodeKind(),
274 GetICState(),
275 GetExtraICState(),
276 GetStubType());
277 Handle<Code> new_object = factory->NewCode(
278 desc, flags, masm.CodeObject(), NeedsImmovableCode());
279 return new_object;
280}
281
282
283template <class Stub>
284static Handle<Code> DoGenerateCode(Stub* stub) {
285 Isolate* isolate = stub->isolate();
286 CodeStubDescriptor descriptor(stub);
287
288 // If we are uninitialized we can use a light-weight stub to enter
289 // the runtime that is significantly faster than using the standard
290 // stub-failure deopt mechanism.
291 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
292 DCHECK(!descriptor.stack_parameter_count().is_valid());
293 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
294 }
295 base::ElapsedTimer timer;
296 if (FLAG_profile_hydrogen_code_stub_compilation) {
297 timer.Start();
298 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299 Zone zone;
300 CompilationInfo info(stub, isolate, &zone);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400301 CodeStubGraphBuilder<Stub> builder(&info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000302 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000303 Handle<Code> code = chunk->Codegen();
304 if (FLAG_profile_hydrogen_code_stub_compilation) {
305 OFStream os(stdout);
306 os << "[Lazy compilation of " << stub << " took "
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400307 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 }
309 return code;
310}
311
312
313template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
315 info()->MarkAsSavesCallerDoubles();
316 HValue* number = GetParameter(NumberToStringStub::kNumber);
317 return BuildNumberToString(number, Type::Number(zone()));
318}
319
320
321Handle<Code> NumberToStringStub::GenerateCode() {
322 return DoGenerateCode(this);
323}
324
325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326// Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 Factory* factory = isolate()->factory();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 HConstant* number_string = Add<HConstant>(factory->number_string());
331 HValue* object = GetParameter(TypeofStub::kObject);
332
333 IfBuilder is_smi(this);
334 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
335 is_smi.Then();
336 { Push(number_string); }
337 is_smi.Else();
338 {
339 IfBuilder is_number(this);
340 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
341 is_number.Then();
342 { Push(number_string); }
343 is_number.Else();
344 {
345 HValue* map = AddLoadMap(object, smi_check);
346 HValue* instance_type = Add<HLoadNamedField>(
347 map, nullptr, HObjectAccess::ForMapInstanceType());
348 IfBuilder is_string(this);
349 is_string.If<HCompareNumericAndBranch>(
350 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
351 is_string.Then();
352 { Push(Add<HConstant>(factory->string_string())); }
353 is_string.Else();
354 {
355 HConstant* object_string = Add<HConstant>(factory->object_string());
356 IfBuilder is_oddball(this);
357 is_oddball.If<HCompareNumericAndBranch>(
358 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
359 is_oddball.Then();
360 {
361 Push(Add<HLoadNamedField>(object, nullptr,
362 HObjectAccess::ForOddballTypeOf()));
363 }
364 is_oddball.Else();
365 {
366 IfBuilder is_symbol(this);
367 is_symbol.If<HCompareNumericAndBranch>(
368 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
369 is_symbol.Then();
370 { Push(Add<HConstant>(factory->symbol_string())); }
371 is_symbol.Else();
372 {
373 HValue* bit_field = Add<HLoadNamedField>(
374 map, nullptr, HObjectAccess::ForMapBitField());
375 HValue* bit_field_masked = AddUncasted<HBitwise>(
376 Token::BIT_AND, bit_field,
377 Add<HConstant>((1 << Map::kIsCallable) |
378 (1 << Map::kIsUndetectable)));
379 IfBuilder is_function(this);
380 is_function.If<HCompareNumericAndBranch>(
381 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable),
382 Token::EQ);
383 is_function.Then();
384 { Push(Add<HConstant>(factory->function_string())); }
385 is_function.Else();
386 {
387#define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \
388 IfBuilder is_##type(this); \
389 is_##type.If<HCompareObjectEqAndBranch>( \
390 map, Add<HConstant>(factory->type##_map())); \
391 is_##type.Then(); \
392 { Push(Add<HConstant>(factory->type##_string())); } \
393 is_##type.Else(); {
394 SIMD128_TYPES(SIMD128_BUILDER_OPEN)
395#undef SIMD128_BUILDER_OPEN
396 // Is it an undetectable object?
397 IfBuilder is_undetectable(this);
398 is_undetectable.If<HCompareNumericAndBranch>(
399 bit_field_masked, graph()->GetConstant0(), Token::NE);
400 is_undetectable.Then();
401 {
402 // typeof an undetectable object is 'undefined'.
403 Push(Add<HConstant>(factory->undefined_string()));
404 }
405 is_undetectable.Else();
406 {
407 // For any kind of object not handled above, the spec rule for
408 // host objects gives that it is okay to return "object".
409 Push(object_string);
410 }
411#define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) }
412 SIMD128_TYPES(SIMD128_BUILDER_CLOSE)
413#undef SIMD128_BUILDER_CLOSE
414 }
415 is_function.End();
416 }
417 is_symbol.End();
418 }
419 is_oddball.End();
420 }
421 is_string.End();
422 }
423 is_number.End();
424 }
425 is_smi.End();
426
427 return environment()->Pop();
428}
429
430
431Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
432
433
434template <>
435HValue* CodeStubGraphBuilder<FastCloneRegExpStub>::BuildCodeStub() {
436 HValue* closure = GetParameter(0);
437 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438
439 // This stub is very performance sensitive, the generated code must be tuned
440 // so that it doesn't build and eager frame.
441 info()->MarkMustNotHaveEagerFrame();
442
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443 HValue* literals_array = Add<HLoadNamedField>(
444 closure, nullptr, HObjectAccess::ForLiteralsPointer());
445 HInstruction* boilerplate = Add<HLoadKeyed>(
446 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
447 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
448
449 IfBuilder if_notundefined(this);
450 if_notundefined.IfNot<HCompareObjectEqAndBranch>(
451 boilerplate, graph()->GetConstantUndefined());
452 if_notundefined.Then();
453 {
454 int result_size =
455 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
456 HValue* result =
457 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(),
458 NOT_TENURED, JS_REGEXP_TYPE);
459 Add<HStoreNamedField>(
460 result, HObjectAccess::ForMap(),
461 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap()));
462 Add<HStoreNamedField>(
463 result, HObjectAccess::ForPropertiesPointer(),
464 Add<HLoadNamedField>(boilerplate, nullptr,
465 HObjectAccess::ForPropertiesPointer()));
466 Add<HStoreNamedField>(
467 result, HObjectAccess::ForElementsPointer(),
468 Add<HLoadNamedField>(boilerplate, nullptr,
469 HObjectAccess::ForElementsPointer()));
470 for (int offset = JSObject::kHeaderSize; offset < result_size;
471 offset += kPointerSize) {
472 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset);
473 Add<HStoreNamedField>(result, access,
474 Add<HLoadNamedField>(boilerplate, nullptr, access));
475 }
476 Push(result);
477 }
478 if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
479 if_notundefined.End();
480
481 return Pop();
482}
483
484
485Handle<Code> FastCloneRegExpStub::GenerateCode() {
486 return DoGenerateCode(this);
487}
488
489
490template <>
491HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
492 Factory* factory = isolate()->factory();
493 HValue* undefined = graph()->GetConstantUndefined();
494 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
495 HValue* closure = GetParameter(0);
496 HValue* literal_index = GetParameter(1);
497
498 // This stub is very performance sensitive, the generated code must be tuned
499 // so that it doesn't build and eager frame.
500 info()->MarkMustNotHaveEagerFrame();
501
502 HValue* literals_array = Add<HLoadNamedField>(
503 closure, nullptr, HObjectAccess::ForLiteralsPointer());
504
505 HInstruction* allocation_site = Add<HLoadKeyed>(
506 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
507 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000508 IfBuilder checker(this);
509 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
510 undefined);
511 checker.Then();
512
513 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
514 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400515 HInstruction* boilerplate =
516 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 HValue* elements = AddLoadElements(boilerplate);
518 HValue* capacity = AddLoadFixedArrayLength(elements);
519 IfBuilder zero_capacity(this);
520 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
521 Token::EQ);
522 zero_capacity.Then();
523 Push(BuildCloneShallowArrayEmpty(boilerplate,
524 allocation_site,
525 alloc_site_mode));
526 zero_capacity.Else();
527 IfBuilder if_fixed_cow(this);
528 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
529 if_fixed_cow.Then();
530 Push(BuildCloneShallowArrayCow(boilerplate,
531 allocation_site,
532 alloc_site_mode,
533 FAST_ELEMENTS));
534 if_fixed_cow.Else();
535 IfBuilder if_fixed(this);
536 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
537 if_fixed.Then();
538 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
539 allocation_site,
540 alloc_site_mode,
541 FAST_ELEMENTS));
542
543 if_fixed.Else();
544 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
545 allocation_site,
546 alloc_site_mode,
547 FAST_DOUBLE_ELEMENTS));
548 if_fixed.End();
549 if_fixed_cow.End();
550 zero_capacity.End();
551
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553 checker.End();
554
555 return environment()->Pop();
556}
557
558
559Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
560 return DoGenerateCode(this);
561}
562
563
564template <>
565HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
566 HValue* undefined = graph()->GetConstantUndefined();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 HValue* closure = GetParameter(0);
568 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000570 HValue* literals_array = Add<HLoadNamedField>(
571 closure, nullptr, HObjectAccess::ForLiteralsPointer());
572
573 HInstruction* allocation_site = Add<HLoadKeyed>(
574 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
575 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576
577 IfBuilder checker(this);
578 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
579 undefined);
580 checker.And();
581
582 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
583 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400584 HInstruction* boilerplate =
585 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400587 int length = casted_stub()->length();
588 if (length == 0) {
589 // Empty objects have some slack added to them.
590 length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
591 }
592 int size = JSObject::kHeaderSize + length * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000593 int object_size = size;
594 if (FLAG_allocation_site_pretenuring) {
595 size += AllocationMemento::kSize;
596 }
597
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400598 HValue* boilerplate_map =
599 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000600 HValue* boilerplate_size = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400601 boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
603 checker.If<HCompareNumericAndBranch>(boilerplate_size,
604 size_in_words, Token::EQ);
605 checker.Then();
606
607 HValue* size_in_bytes = Add<HConstant>(size);
608
609 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
610 NOT_TENURED, JS_OBJECT_TYPE);
611
612 for (int i = 0; i < object_size; i += kPointerSize) {
613 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400614 Add<HStoreNamedField>(object, access,
615 Add<HLoadNamedField>(boilerplate, nullptr, access));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000616 }
617
618 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
619 if (FLAG_allocation_site_pretenuring) {
620 BuildCreateAllocationMemento(
621 object, Add<HConstant>(object_size), allocation_site);
622 }
623
624 environment()->Push(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000625 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 checker.End();
627
628 return environment()->Pop();
629}
630
631
632Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
633 return DoGenerateCode(this);
634}
635
636
637template <>
638HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 // This stub is performance sensitive, the generated code must be tuned
640 // so that it doesn't build an eager frame.
641 info()->MarkMustNotHaveEagerFrame();
642
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000643 HValue* size = Add<HConstant>(AllocationSite::kSize);
644 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
645 JS_OBJECT_TYPE);
646
647 // Store the map
648 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
649 AddStoreMapConstant(object, allocation_site_map);
650
651 // Store the payload (smi elements kind)
652 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
653 Add<HStoreNamedField>(object,
654 HObjectAccess::ForAllocationSiteOffset(
655 AllocationSite::kTransitionInfoOffset),
656 initial_elements_kind);
657
658 // Unlike literals, constructed arrays don't have nested sites
659 Add<HStoreNamedField>(object,
660 HObjectAccess::ForAllocationSiteOffset(
661 AllocationSite::kNestedSiteOffset),
662 graph()->GetConstant0());
663
664 // Pretenuring calculation field.
665 Add<HStoreNamedField>(object,
666 HObjectAccess::ForAllocationSiteOffset(
667 AllocationSite::kPretenureDataOffset),
668 graph()->GetConstant0());
669
670 // Pretenuring memento creation count field.
671 Add<HStoreNamedField>(object,
672 HObjectAccess::ForAllocationSiteOffset(
673 AllocationSite::kPretenureCreateCountOffset),
674 graph()->GetConstant0());
675
676 // Store an empty fixed array for the code dependency.
677 HConstant* empty_fixed_array =
678 Add<HConstant>(isolate()->factory()->empty_fixed_array());
679 Add<HStoreNamedField>(
680 object,
681 HObjectAccess::ForAllocationSiteOffset(
682 AllocationSite::kDependentCodeOffset),
683 empty_fixed_array);
684
685 // Link the object to the allocation site list
686 HValue* site_list = Add<HConstant>(
687 ExternalReference::allocation_sites_list_address(isolate()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400688 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
689 HObjectAccess::ForAllocationSiteList());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000690 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
691 // mark as such in order to skip the write barrier, once we have a unified
692 // system for weakness. For now we decided to keep it like this because having
693 // an initial write barrier backed store makes this pointer strong until the
694 // next GC, and allocation sites are designed to survive several GCs anyway.
695 Add<HStoreNamedField>(
696 object,
697 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
698 site);
699 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
700 object);
701
702 HInstruction* feedback_vector = GetParameter(0);
703 HInstruction* slot = GetParameter(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000704 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705 INITIALIZING_STORE);
706 return feedback_vector;
707}
708
709
710Handle<Code> CreateAllocationSiteStub::GenerateCode() {
711 return DoGenerateCode(this);
712}
713
714
715template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000716HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
717 // This stub is performance sensitive, the generated code must be tuned
718 // so that it doesn't build an eager frame.
719 info()->MarkMustNotHaveEagerFrame();
720
721 HValue* size = Add<HConstant>(WeakCell::kSize);
722 HInstruction* object =
723 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
724
725 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
726 AddStoreMapConstant(object, weak_cell_map);
727
728 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
729 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
730 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
731 graph()->GetConstantHole());
732
733 HInstruction* feedback_vector =
734 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
735 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
736 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
737 INITIALIZING_STORE);
738 return graph()->GetConstant0();
739}
740
741
742Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
743
744
745template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400746HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
747 int context_index = casted_stub()->context_index();
748 int slot_index = casted_stub()->slot_index();
749
750 HValue* script_context = BuildGetScriptContext(context_index);
751 return Add<HLoadNamedField>(script_context, nullptr,
752 HObjectAccess::ForContextSlot(slot_index));
753}
754
755
756Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
757 return DoGenerateCode(this);
758}
759
760
761template <>
762HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
763 int context_index = casted_stub()->context_index();
764 int slot_index = casted_stub()->slot_index();
765
766 HValue* script_context = BuildGetScriptContext(context_index);
767 Add<HStoreNamedField>(script_context,
768 HObjectAccess::ForContextSlot(slot_index),
769 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
770 return GetParameter(2);
771}
772
773
774Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
775 return DoGenerateCode(this);
776}
777
778
779template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000780HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
781 ElementsKind kind = casted_stub()->elements_kind();
782 if (IsFastDoubleElementsKind(kind)) {
783 info()->MarkAsSavesCallerDoubles();
784 }
785
786 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
787 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
788
789 HValue* elements = AddLoadElements(object);
790 HValue* current_capacity = Add<HLoadNamedField>(
791 elements, nullptr, HObjectAccess::ForFixedArrayLength());
792
793 HValue* length =
794 casted_stub()->is_js_array()
795 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
796 HObjectAccess::ForArrayLength(kind))
797 : current_capacity;
798
799 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
800 current_capacity, key);
801}
802
803
804Handle<Code> GrowArrayElementsStub::GenerateCode() {
805 return DoGenerateCode(this);
806}
807
808
809template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000810HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000811 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
812 ? CONVERT_HOLE_TO_UNDEFINED
813 : NEVER_RETURN_HOLE;
814
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000815 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
816 GetParameter(LoadDescriptor::kReceiverIndex),
817 GetParameter(LoadDescriptor::kNameIndex), NULL,
818 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000819 hole_mode, STANDARD_STORE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000820 return load;
821}
822
823
824Handle<Code> LoadFastElementStub::GenerateCode() {
825 return DoGenerateCode(this);
826}
827
828
829HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
830 HValue* object, FieldIndex index) {
831 Representation representation = index.is_double()
832 ? Representation::Double()
833 : Representation::Tagged();
834 int offset = index.offset();
835 HObjectAccess access = index.is_inobject()
836 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
837 : HObjectAccess::ForBackingStoreOffset(offset, representation);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400838 if (index.is_double() &&
839 (!FLAG_unbox_double_fields || !index.is_inobject())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000840 // Load the heap number.
841 object = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400842 object, nullptr, access.WithRepresentation(Representation::Tagged()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000843 // Load the double value from it.
844 access = HObjectAccess::ForHeapNumberValue();
845 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400846 return Add<HLoadNamedField>(object, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847}
848
849
850template<>
851HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
852 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
853}
854
855
856Handle<Code> LoadFieldStub::GenerateCode() {
857 return DoGenerateCode(this);
858}
859
860
861template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000862HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
863 return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
864 casted_stub()->index());
865}
866
867
868Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
869 return DoGenerateCode(this);
870}
871
872
873template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000874HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
875 HValue* map = AddLoadMap(GetParameter(0), NULL);
876 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
877 Map::kDescriptorsOffset, Representation::Tagged());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400878 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
880 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400881 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000882}
883
884
885Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
886
887
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000888HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
889 HValue* value) {
890 HValue* result = NULL;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400891 HInstruction* backing_store =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000892 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, nullptr,
893 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000894 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400895 HValue* backing_store_length = Add<HLoadNamedField>(
896 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000897 IfBuilder in_unmapped_range(this);
898 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
899 Token::LT);
900 in_unmapped_range.Then();
901 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000902 if (value == NULL) {
903 result = Add<HLoadKeyed>(backing_store, key, nullptr, nullptr,
904 FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE);
905 } else {
906 Add<HStoreKeyed>(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS);
907 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000909 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000910 in_unmapped_range.End();
911 return result;
912}
913
914
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
916 HValue* key,
917 HValue* value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918 // Mapped arguments are actual arguments. Unmapped arguments are values added
919 // to the arguments object after it was created for the call. Mapped arguments
920 // are stored in the context at indexes given by elements[key + 2]. Unmapped
921 // arguments are stored as regular indexed properties in the arguments array,
922 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
923 // look at argument object construction.
924 //
925 // The sloppy arguments elements array has a special format:
926 //
927 // 0: context
928 // 1: unmapped arguments array
929 // 2: mapped_index0,
930 // 3: mapped_index1,
931 // ...
932 //
933 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
934 // If key + 2 >= elements.length then attempt to look in the unmapped
935 // arguments array (given by elements[1]) and return the value at key, missing
936 // to the runtime if the unmapped arguments array is not a fixed array or if
937 // key >= unmapped_arguments_array.length.
938 //
939 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
940 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
941 // index into the context array given at elements[0]. Return the value at
942 // context[t].
943
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000944 bool is_load = value == NULL;
945
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
947 IfBuilder positive_smi(this);
948 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
949 Token::LT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000950 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000951 positive_smi.End();
952
953 HValue* constant_two = Add<HConstant>(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400954 HValue* elements = AddLoadElements(receiver, nullptr);
955 HValue* elements_length = Add<HLoadNamedField>(
956 elements, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000957 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
958 IfBuilder in_range(this);
959 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
960 in_range.Then();
961 {
962 HValue* index = AddUncasted<HAdd>(key, constant_two);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000963 HInstruction* mapped_index =
964 Add<HLoadKeyed>(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS,
965 ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000966
967 IfBuilder is_valid(this);
968 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
969 graph()->GetConstantHole());
970 is_valid.Then();
971 {
972 // TODO(mvstanton): I'd like to assert from this point, that if the
973 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
974 // smi check is being emitted.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400975 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000976 nullptr, nullptr, FAST_ELEMENTS);
977 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
978 if (is_load) {
979 HValue* result =
980 Add<HLoadKeyed>(the_context, mapped_index, nullptr, nullptr,
981 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
982 environment()->Push(result);
983 } else {
984 DCHECK(value != NULL);
985 Add<HStoreKeyed>(the_context, mapped_index, value, nullptr,
986 FAST_ELEMENTS);
987 environment()->Push(value);
988 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000989 }
990 is_valid.Else();
991 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000992 HValue* result = UnmappedCase(elements, key, value);
993 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000994 }
995 is_valid.End();
996 }
997 in_range.Else();
998 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 HValue* result = UnmappedCase(elements, key, value);
1000 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001 }
1002 in_range.End();
1003
1004 return environment()->Pop();
1005}
1006
1007
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001008template <>
1009HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
1010 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1011 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1012
1013 return EmitKeyedSloppyArguments(receiver, key, NULL);
1014}
1015
1016
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
1018 return DoGenerateCode(this);
1019}
1020
1021
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001022template <>
1023HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
1024 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
1025 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
1026 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
1027
1028 return EmitKeyedSloppyArguments(receiver, key, value);
1029}
1030
1031
1032Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
1033 return DoGenerateCode(this);
1034}
1035
1036
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001037void CodeStubGraphBuilderBase::BuildStoreNamedField(
1038 HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001039 Representation representation, bool transition_to_field) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001040 DCHECK(!index.is_double() || representation.IsDouble());
1041 int offset = index.offset();
1042 HObjectAccess access =
1043 index.is_inobject()
1044 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
1045 : HObjectAccess::ForBackingStoreOffset(offset, representation);
1046
1047 if (representation.IsDouble()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001048 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
1049 HObjectAccess heap_number_access =
1050 access.WithRepresentation(Representation::Tagged());
1051 if (transition_to_field) {
1052 // The store requires a mutable HeapNumber to be allocated.
1053 NoObservableSideEffectsScope no_side_effects(this);
1054 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
1055
1056 // TODO(hpayer): Allocation site pretenuring support.
1057 HInstruction* heap_number =
1058 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
1059 MUTABLE_HEAP_NUMBER_TYPE);
1060 AddStoreMapConstant(heap_number,
1061 isolate()->factory()->mutable_heap_number_map());
1062 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
1063 value);
1064 // Store the new mutable heap number into the object.
1065 access = heap_number_access;
1066 value = heap_number;
1067 } else {
1068 // Load the heap number.
1069 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
1070 // Store the double value into it.
1071 access = HObjectAccess::ForHeapNumberValue();
1072 }
1073 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074 } else if (representation.IsHeapObject()) {
1075 BuildCheckHeapObject(value);
1076 }
1077
1078 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1079}
1080
1081
1082template <>
1083HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1084 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001085 casted_stub()->representation(), false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 return GetParameter(2);
1087}
1088
1089
1090Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1091
1092
1093template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001094HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001095 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001096
1097 switch (casted_stub()->store_mode()) {
1098 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1099 HValue* properties = Add<HLoadNamedField>(
1100 object, nullptr, HObjectAccess::ForPropertiesPointer());
1101 HValue* length = AddLoadFixedArrayLength(properties);
1102 HValue* delta =
1103 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1104 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1105
1106 // Grow properties array.
1107 ElementsKind kind = FAST_ELEMENTS;
1108 Add<HBoundsCheck>(new_capacity,
1109 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1110 FixedArray::kHeaderSize) >>
1111 ElementsKindToShiftSize(kind)));
1112
1113 // Reuse this code for properties backing store allocation.
1114 HValue* new_properties =
1115 BuildAllocateAndInitializeArray(kind, new_capacity);
1116
1117 BuildCopyProperties(properties, new_properties, length, new_capacity);
1118
1119 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1120 new_properties);
1121 }
1122 // Fall through.
1123 case StoreTransitionStub::StoreMapAndValue:
1124 // Store the new value into the "extended" object.
1125 BuildStoreNamedField(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001126 object, GetParameter(StoreTransitionHelper::ValueIndex()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001127 casted_stub()->index(), casted_stub()->representation(), true);
1128 // Fall through.
1129
1130 case StoreTransitionStub::StoreMapOnly:
1131 // And finally update the map.
1132 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001133 GetParameter(StoreTransitionHelper::MapIndex()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001134 break;
1135 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001136 return GetParameter(StoreTransitionHelper::ValueIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001137}
1138
1139
1140Handle<Code> StoreTransitionStub::GenerateCode() {
1141 return DoGenerateCode(this);
1142}
1143
1144
1145template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1147 BuildUncheckedMonomorphicElementAccess(
1148 GetParameter(StoreDescriptor::kReceiverIndex),
1149 GetParameter(StoreDescriptor::kNameIndex),
1150 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1151 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1152 casted_stub()->store_mode());
1153
1154 return GetParameter(2);
1155}
1156
1157
1158Handle<Code> StoreFastElementStub::GenerateCode() {
1159 return DoGenerateCode(this);
1160}
1161
1162
1163template <>
1164HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1165 info()->MarkAsSavesCallerDoubles();
1166
1167 BuildTransitionElementsKind(GetParameter(0),
1168 GetParameter(1),
1169 casted_stub()->from_kind(),
1170 casted_stub()->to_kind(),
1171 casted_stub()->is_js_array());
1172
1173 return GetParameter(0);
1174}
1175
1176
1177Handle<Code> TransitionElementsKindStub::GenerateCode() {
1178 return DoGenerateCode(this);
1179}
1180
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001181
1182template <>
1183HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
1184 HValue* result =
1185 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
1186 NOT_TENURED, HEAP_NUMBER_TYPE);
1187 AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
1188 return result;
1189}
1190
1191
1192Handle<Code> AllocateHeapNumberStub::GenerateCode() {
1193 return DoGenerateCode(this);
1194}
1195
1196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001197template <>
1198HValue* CodeStubGraphBuilder<AllocateMutableHeapNumberStub>::BuildCodeStub() {
1199 HValue* result =
1200 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapObject(),
1201 NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE);
1202 AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map());
1203 return result;
1204}
1205
1206
1207Handle<Code> AllocateMutableHeapNumberStub::GenerateCode() {
1208 return DoGenerateCode(this);
1209}
1210
1211
1212template <>
1213HValue* CodeStubGraphBuilder<AllocateInNewSpaceStub>::BuildCodeStub() {
1214 HValue* result = Add<HAllocate>(GetParameter(0), HType::Tagged(), NOT_TENURED,
1215 JS_OBJECT_TYPE);
1216 return result;
1217}
1218
1219
1220Handle<Code> AllocateInNewSpaceStub::GenerateCode() {
1221 return DoGenerateCode(this);
1222}
1223
1224
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001225HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1226 ElementsKind kind,
1227 AllocationSiteOverrideMode override_mode,
1228 ArgumentClass argument_class) {
1229 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1230 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1231 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1232 override_mode);
1233 HValue* result = NULL;
1234 switch (argument_class) {
1235 case NONE:
1236 // This stub is very performance sensitive, the generated code must be
1237 // tuned so that it doesn't build and eager frame.
1238 info()->MarkMustNotHaveEagerFrame();
1239 result = array_builder.AllocateEmptyArray();
1240 break;
1241 case SINGLE:
1242 result = BuildArraySingleArgumentConstructor(&array_builder);
1243 break;
1244 case MULTIPLE:
1245 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1246 break;
1247 }
1248
1249 return result;
1250}
1251
1252
1253HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1254 ElementsKind kind, ArgumentClass argument_class) {
1255 HValue* constructor = GetParameter(
1256 InternalArrayConstructorStubBase::kConstructor);
1257 JSArrayBuilder array_builder(this, kind, constructor);
1258
1259 HValue* result = NULL;
1260 switch (argument_class) {
1261 case NONE:
1262 // This stub is very performance sensitive, the generated code must be
1263 // tuned so that it doesn't build and eager frame.
1264 info()->MarkMustNotHaveEagerFrame();
1265 result = array_builder.AllocateEmptyArray();
1266 break;
1267 case SINGLE:
1268 result = BuildArraySingleArgumentConstructor(&array_builder);
1269 break;
1270 case MULTIPLE:
1271 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1272 break;
1273 }
1274 return result;
1275}
1276
1277
1278HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1279 JSArrayBuilder* array_builder) {
1280 // Smi check and range check on the input arg.
1281 HValue* constant_one = graph()->GetConstant1();
1282 HValue* constant_zero = graph()->GetConstant0();
1283
1284 HInstruction* elements = Add<HArgumentsElements>(false);
1285 HInstruction* argument = Add<HAccessArgumentsAt>(
1286 elements, constant_one, constant_zero);
1287
1288 return BuildAllocateArrayFromLength(array_builder, argument);
1289}
1290
1291
1292HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1293 JSArrayBuilder* array_builder, ElementsKind kind) {
1294 // Insert a bounds check because the number of arguments might exceed
1295 // the kInitialMaxFastElementArray limit. This cannot happen for code
1296 // that was parsed, but calling via Array.apply(thisArg, [...]) might
1297 // trigger it.
1298 HValue* length = GetArgumentsLength();
1299 HConstant* max_alloc_length =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001300 Add<HConstant>(JSArray::kInitialMaxFastElementArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001301 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1302
1303 // We need to fill with the hole if it's a smi array in the multi-argument
1304 // case because we might have to bail out while copying arguments into
1305 // the array because they aren't compatible with a smi array.
1306 // If it's a double array, no problem, and if it's fast then no
1307 // problem either because doubles are boxed.
1308 //
1309 // TODO(mvstanton): consider an instruction to memset fill the array
1310 // with zero in this case instead.
1311 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1312 ? JSArrayBuilder::FILL_WITH_HOLE
1313 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1314 HValue* new_object = array_builder->AllocateArray(checked_length,
1315 max_alloc_length,
1316 checked_length,
1317 fill_mode);
1318 HValue* elements = array_builder->GetElementsLocation();
1319 DCHECK(elements != NULL);
1320
1321 // Now populate the elements correctly.
1322 LoopBuilder builder(this,
1323 context(),
1324 LoopBuilder::kPostIncrement);
1325 HValue* start = graph()->GetConstant0();
1326 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1327 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1328 HInstruction* argument = Add<HAccessArgumentsAt>(
1329 argument_elements, checked_length, key);
1330
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001331 Add<HStoreKeyed>(elements, key, argument, nullptr, kind);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001332 builder.EndBody();
1333 return new_object;
1334}
1335
1336
1337template <>
1338HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1339 ElementsKind kind = casted_stub()->elements_kind();
1340 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1341 return BuildArrayConstructor(kind, override_mode, NONE);
1342}
1343
1344
1345Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1346 return DoGenerateCode(this);
1347}
1348
1349
1350template <>
1351HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1352 BuildCodeStub() {
1353 ElementsKind kind = casted_stub()->elements_kind();
1354 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1355 return BuildArrayConstructor(kind, override_mode, SINGLE);
1356}
1357
1358
1359Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1360 return DoGenerateCode(this);
1361}
1362
1363
1364template <>
1365HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1366 ElementsKind kind = casted_stub()->elements_kind();
1367 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1368 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1369}
1370
1371
1372Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1373 return DoGenerateCode(this);
1374}
1375
1376
1377template <>
1378HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1379 BuildCodeStub() {
1380 ElementsKind kind = casted_stub()->elements_kind();
1381 return BuildInternalArrayConstructor(kind, NONE);
1382}
1383
1384
1385Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1386 return DoGenerateCode(this);
1387}
1388
1389
1390template <>
1391HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1392 BuildCodeStub() {
1393 ElementsKind kind = casted_stub()->elements_kind();
1394 return BuildInternalArrayConstructor(kind, SINGLE);
1395}
1396
1397
1398Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1399 return DoGenerateCode(this);
1400}
1401
1402
1403template <>
1404HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1405 BuildCodeStub() {
1406 ElementsKind kind = casted_stub()->elements_kind();
1407 return BuildInternalArrayConstructor(kind, MULTIPLE);
1408}
1409
1410
1411Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1412 return DoGenerateCode(this);
1413}
1414
1415
1416template <>
1417HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1418 Isolate* isolate = graph()->isolate();
1419 CompareNilICStub* stub = casted_stub();
1420 HIfContinuation continuation;
1421 Handle<Map> sentinel_map(isolate->heap()->meta_map());
1422 Type* type = stub->GetType(zone(), sentinel_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001423 BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424 IfBuilder if_nil(this, &continuation);
1425 if_nil.Then();
1426 if (continuation.IsFalseReachable()) {
1427 if_nil.Else();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428 if_nil.Return(graph()->GetConstantFalse());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001429 }
1430 if_nil.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431 return continuation.IsTrueReachable() ? graph()->GetConstantTrue()
1432 : graph()->GetConstantUndefined();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433}
1434
1435
1436Handle<Code> CompareNilICStub::GenerateCode() {
1437 return DoGenerateCode(this);
1438}
1439
1440
1441template <>
1442HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1443 BinaryOpICState state = casted_stub()->state();
1444
1445 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1446 HValue* right = GetParameter(BinaryOpICStub::kRight);
1447
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 Type* left_type = state.GetLeftType();
1449 Type* right_type = state.GetRightType();
1450 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451
1452 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1453 (state.HasSideEffects() || !result_type->Is(Type::None())));
1454
1455 HValue* result = NULL;
1456 HAllocationMode allocation_mode(NOT_TENURED);
1457 if (state.op() == Token::ADD &&
1458 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1459 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1460 // For the generic add stub a fast case for string addition is performance
1461 // critical.
1462 if (left_type->Maybe(Type::String())) {
1463 IfBuilder if_leftisstring(this);
1464 if_leftisstring.If<HIsStringAndBranch>(left);
1465 if_leftisstring.Then();
1466 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001467 Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()),
1468 right_type, result_type,
1469 state.fixed_right_arg(), allocation_mode,
1470 state.strength()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001471 }
1472 if_leftisstring.Else();
1473 {
1474 Push(BuildBinaryOperation(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001475 state.op(), left, right, left_type, right_type, result_type,
1476 state.fixed_right_arg(), allocation_mode, state.strength()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001477 }
1478 if_leftisstring.End();
1479 result = Pop();
1480 } else {
1481 IfBuilder if_rightisstring(this);
1482 if_rightisstring.If<HIsStringAndBranch>(right);
1483 if_rightisstring.Then();
1484 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001485 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1486 Type::String(zone()), result_type,
1487 state.fixed_right_arg(), allocation_mode,
1488 state.strength()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001489 }
1490 if_rightisstring.Else();
1491 {
1492 Push(BuildBinaryOperation(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001493 state.op(), left, right, left_type, right_type, result_type,
1494 state.fixed_right_arg(), allocation_mode, state.strength()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001495 }
1496 if_rightisstring.End();
1497 result = Pop();
1498 }
1499 } else {
1500 result = BuildBinaryOperation(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001501 state.op(), left, right, left_type, right_type, result_type,
1502 state.fixed_right_arg(), allocation_mode, state.strength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503 }
1504
1505 // If we encounter a generic argument, the number conversion is
1506 // observable, thus we cannot afford to bail out after the fact.
1507 if (!state.HasSideEffects()) {
1508 result = EnforceNumberType(result, result_type);
1509 }
1510
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001511 return result;
1512}
1513
1514
1515Handle<Code> BinaryOpICStub::GenerateCode() {
1516 return DoGenerateCode(this);
1517}
1518
1519
1520template <>
1521HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1522 BinaryOpICState state = casted_stub()->state();
1523
1524 HValue* allocation_site = GetParameter(
1525 BinaryOpWithAllocationSiteStub::kAllocationSite);
1526 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1527 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1528
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001529 Type* left_type = state.GetLeftType();
1530 Type* right_type = state.GetRightType();
1531 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001532 HAllocationMode allocation_mode(allocation_site);
1533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1535 result_type, state.fixed_right_arg(),
1536 allocation_mode, state.strength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001537}
1538
1539
1540Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1541 return DoGenerateCode(this);
1542}
1543
1544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) {
1546 if (!convert) return BuildCheckString(input);
1547 IfBuilder if_inputissmi(this);
1548 HValue* inputissmi = if_inputissmi.If<HIsSmiAndBranch>(input);
1549 if_inputissmi.Then();
1550 {
1551 // Convert the input smi to a string.
1552 Push(BuildNumberToString(input, Type::SignedSmall()));
1553 }
1554 if_inputissmi.Else();
1555 {
1556 HValue* input_map =
1557 Add<HLoadNamedField>(input, inputissmi, HObjectAccess::ForMap());
1558 HValue* input_instance_type = Add<HLoadNamedField>(
1559 input_map, inputissmi, HObjectAccess::ForMapInstanceType());
1560 IfBuilder if_inputisstring(this);
1561 if_inputisstring.If<HCompareNumericAndBranch>(
1562 input_instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
1563 if_inputisstring.Then();
1564 {
1565 // The input is already a string.
1566 Push(input);
1567 }
1568 if_inputisstring.Else();
1569 {
1570 // Convert to primitive first (if necessary), see
1571 // ES6 section 12.7.3 The Addition operator.
1572 IfBuilder if_inputisprimitive(this);
1573 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1574 if_inputisprimitive.If<HCompareNumericAndBranch>(
1575 input_instance_type, Add<HConstant>(LAST_PRIMITIVE_TYPE), Token::LTE);
1576 if_inputisprimitive.Then();
1577 {
1578 // The input is already a primitive.
1579 Push(input);
1580 }
1581 if_inputisprimitive.Else();
1582 {
1583 // Convert the input to a primitive.
1584 Push(BuildToPrimitive(input, input_map));
1585 }
1586 if_inputisprimitive.End();
1587 // Convert the primitive to a string value.
1588 ToStringDescriptor descriptor(isolate());
1589 ToStringStub stub(isolate());
1590 HValue* values[] = {context(), Pop()};
1591 Push(AddUncasted<HCallWithDescriptor>(
1592 Add<HConstant>(stub.GetCode()), 0, descriptor,
1593 Vector<HValue*>(values, arraysize(values))));
1594 }
1595 if_inputisstring.End();
1596 }
1597 if_inputissmi.End();
1598 return Pop();
1599}
1600
1601
1602HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input,
1603 HValue* input_map) {
1604 // Get the native context of the caller.
1605 HValue* native_context = BuildGetNativeContext();
1606
1607 // Determine the initial map of the %ObjectPrototype%.
1608 HValue* object_function_prototype_map =
1609 Add<HLoadNamedField>(native_context, nullptr,
1610 HObjectAccess::ForContextSlot(
1611 Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX));
1612
1613 // Determine the initial map of the %StringPrototype%.
1614 HValue* string_function_prototype_map =
1615 Add<HLoadNamedField>(native_context, nullptr,
1616 HObjectAccess::ForContextSlot(
1617 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
1618
1619 // Determine the initial map of the String function.
1620 HValue* string_function = Add<HLoadNamedField>(
1621 native_context, nullptr,
1622 HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX));
1623 HValue* string_function_initial_map = Add<HLoadNamedField>(
1624 string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
1625
1626 // Determine the map of the [[Prototype]] of {input}.
1627 HValue* input_prototype =
1628 Add<HLoadNamedField>(input_map, nullptr, HObjectAccess::ForPrototype());
1629 HValue* input_prototype_map =
1630 Add<HLoadNamedField>(input_prototype, nullptr, HObjectAccess::ForMap());
1631
1632 // For string wrappers (JSValue instances with [[StringData]] internal
1633 // fields), we can shortcirciut the ToPrimitive if
1634 //
1635 // (a) the {input} map matches the initial map of the String function,
1636 // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e.
1637 // no one monkey-patched toString, @@toPrimitive or valueOf), and
1638 // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the
1639 // %StringPrototype%) is also unmodified, that is no one sneaked a
1640 // @@toPrimitive into the %ObjectPrototype%.
1641 //
1642 // If all these assumptions hold, we can just take the [[StringData]] value
1643 // and return it.
1644 // TODO(bmeurer): This just repairs a regression introduced by removing the
1645 // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which
1646 // was intendend to something similar to this, although less efficient and
1647 // wrong in the presence of @@toPrimitive. Long-term we might want to move
1648 // into the direction of having a ToPrimitiveStub that can do common cases
1649 // while staying in JavaScript land (i.e. not going to C++).
1650 IfBuilder if_inputisstringwrapper(this);
1651 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1652 input_map, string_function_initial_map);
1653 if_inputisstringwrapper.And();
1654 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1655 input_prototype_map, string_function_prototype_map);
1656 if_inputisstringwrapper.And();
1657 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1658 Add<HLoadNamedField>(Add<HLoadNamedField>(input_prototype_map, nullptr,
1659 HObjectAccess::ForPrototype()),
1660 nullptr, HObjectAccess::ForMap()),
1661 object_function_prototype_map);
1662 if_inputisstringwrapper.Then();
1663 {
1664 Push(BuildLoadNamedField(
1665 input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset)));
1666 }
1667 if_inputisstringwrapper.Else();
1668 {
1669 // TODO(bmeurer): Add support for fast ToPrimitive conversion using
1670 // a dedicated ToPrimitiveStub.
1671 Add<HPushArguments>(input);
1672 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kToPrimitive), 1));
1673 }
1674 if_inputisstringwrapper.End();
1675 return Pop();
1676}
1677
1678
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001679template <>
1680HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1681 StringAddStub* stub = casted_stub();
1682 StringAddFlags flags = stub->flags();
1683 PretenureFlag pretenure_flag = stub->pretenure_flag();
1684
1685 HValue* left = GetParameter(StringAddStub::kLeft);
1686 HValue* right = GetParameter(StringAddStub::kRight);
1687
1688 // Make sure that both arguments are strings if not known in advance.
1689 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001690 left =
1691 BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001692 }
1693 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001694 right = BuildToString(right,
1695 (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001696 }
1697
1698 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1699}
1700
1701
1702Handle<Code> StringAddStub::GenerateCode() {
1703 return DoGenerateCode(this);
1704}
1705
1706
1707template <>
1708HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1709 ToBooleanStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001710 IfBuilder if_true(this);
1711 if_true.If<HBranch>(GetParameter(0), stub->types());
1712 if_true.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001713 if_true.Return(graph()->GetConstantTrue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001714 if_true.Else();
1715 if_true.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 return graph()->GetConstantFalse();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001717}
1718
1719
1720Handle<Code> ToBooleanStub::GenerateCode() {
1721 return DoGenerateCode(this);
1722}
1723
1724
1725template <>
1726HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1727 StoreGlobalStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001728 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001729 if (stub->check_global()) {
1730 // Check that the map of the global has not changed: use a placeholder map
1731 // that will be replaced later with the global object's map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1733 HValue* proxy_map =
1734 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1735 HValue* global =
1736 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1737 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1738 StoreGlobalStub::global_map_placeholder(isolate())));
1739 HValue* expected_map = Add<HLoadNamedField>(
1740 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1741 HValue* map =
1742 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1743 IfBuilder map_check(this);
1744 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1745 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1746 map_check.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001747 }
1748
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001749 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1750 StoreGlobalStub::property_cell_placeholder(isolate())));
1751 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1752 HObjectAccess::ForWeakCellValue());
1753 Add<HCheckHeapObject>(cell);
1754 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1755 // Load the payload of the global parameter cell. A hole indicates that the
1756 // cell has been invalidated and that the store must be handled by the
1757 // runtime.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001758 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001760 auto cell_type = stub->cell_type();
1761 if (cell_type == PropertyCellType::kConstant ||
1762 cell_type == PropertyCellType::kUndefined) {
1763 // This is always valid for all states a cell can be in.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001764 IfBuilder builder(this);
1765 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1766 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001767 builder.ElseDeopt(
1768 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001769 builder.End();
1770 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 IfBuilder builder(this);
1772 HValue* hole_value = graph()->GetConstantHole();
1773 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1774 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001775 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776 builder.Else();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001777 // When dealing with constant types, the type may be allowed to change, as
1778 // long as optimized code remains valid.
1779 if (cell_type == PropertyCellType::kConstantType) {
1780 switch (stub->constant_type()) {
1781 case PropertyCellConstantType::kSmi:
1782 access = access.WithRepresentation(Representation::Smi());
1783 break;
1784 case PropertyCellConstantType::kStableMap: {
1785 // It is sufficient here to check that the value and cell contents
1786 // have identical maps, no matter if they are stable or not or if they
1787 // are the maps that were originally in the cell or not. If optimized
1788 // code will deopt when a cell has a unstable map and if it has a
1789 // dependency on a stable map, it will deopt if the map destabilizes.
1790 Add<HCheckHeapObject>(value);
1791 Add<HCheckHeapObject>(cell_contents);
1792 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1793 HObjectAccess::ForMap());
1794 HValue* map =
1795 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1796 IfBuilder map_check(this);
1797 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1798 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1799 map_check.End();
1800 access = access.WithRepresentation(Representation::HeapObject());
1801 break;
1802 }
1803 }
1804 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001805 Add<HStoreNamedField>(cell, access, value);
1806 builder.End();
1807 }
1808
1809 return value;
1810}
1811
1812
1813Handle<Code> StoreGlobalStub::GenerateCode() {
1814 return DoGenerateCode(this);
1815}
1816
1817
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001818template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001819HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001820 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
1821 HValue* key = GetParameter(StoreTransitionHelper::NameIndex());
1822 HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
1823 HValue* map = GetParameter(StoreTransitionHelper::MapIndex());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001824
1825 if (FLAG_trace_elements_transitions) {
1826 // Tracing elements transitions is the job of the runtime.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1828 Deoptimizer::EAGER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001829 } else {
1830 info()->MarkAsSavesCallerDoubles();
1831
1832 BuildTransitionElementsKind(object, map,
1833 casted_stub()->from_kind(),
1834 casted_stub()->to_kind(),
1835 casted_stub()->is_jsarray());
1836
1837 BuildUncheckedMonomorphicElementAccess(object, key, value,
1838 casted_stub()->is_jsarray(),
1839 casted_stub()->to_kind(),
1840 STORE, ALLOW_RETURN_HOLE,
1841 casted_stub()->store_mode());
1842 }
1843
1844 return value;
1845}
1846
1847
1848Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1849 return DoGenerateCode(this);
1850}
1851
1852
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001853template <>
1854HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() {
1855 HValue* receiver = GetParameter(ToObjectDescriptor::kReceiverIndex);
1856 return BuildToObject(receiver);
1857}
1858
1859
1860Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); }
1861
1862
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001863void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1864 HValue* js_function,
1865 HValue* native_context,
1866 IfBuilder* builder,
1867 HValue* optimized_map,
1868 HValue* map_index) {
1869 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1870 HValue* context_slot = LoadFromOptimizedCodeMap(
1871 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001872 context_slot = Add<HLoadNamedField>(context_slot, nullptr,
1873 HObjectAccess::ForWeakCellValue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001874 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1875 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001876 HValue* code_object = LoadFromOptimizedCodeMap(
1877 optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset);
1878 code_object = Add<HLoadNamedField>(code_object, nullptr,
1879 HObjectAccess::ForWeakCellValue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001880 builder->If<HCompareObjectEqAndBranch>(native_context,
1881 context_slot);
1882 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001883 builder->And();
1884 builder->IfNot<HCompareObjectEqAndBranch>(code_object,
1885 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001886 builder->Then();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1888 map_index, SharedFunctionInfo::kLiteralsOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001889 literals = Add<HLoadNamedField>(literals, nullptr,
1890 HObjectAccess::ForWeakCellValue());
1891 IfBuilder maybe_deopt(this);
1892 maybe_deopt.If<HCompareObjectEqAndBranch>(literals, graph()->GetConstant0());
1893 maybe_deopt.ThenDeopt(Deoptimizer::kLiteralsWereDisposed);
1894 maybe_deopt.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001895
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001896 BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
1897
1898 // The builder continues in the "then" after this function.
1899}
1900
1901
1902void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function,
1903 HValue* native_context,
1904 HValue* code_object,
1905 HValue* literals) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906 Counters* counters = isolate()->counters();
1907 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1908
1909 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1910 // map and either unmangle them on marking or do nothing as the whole map is
1911 // discarded on major GC anyway.
1912 Add<HStoreCodeEntry>(js_function, code_object);
1913 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1914 literals);
1915
1916 // Now link a function into a list of optimized functions.
1917 HValue* optimized_functions_list = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001918 native_context, nullptr,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001919 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1920 Add<HStoreNamedField>(js_function,
1921 HObjectAccess::ForNextFunctionLinkPointer(),
1922 optimized_functions_list);
1923
1924 // This store is the only one that should have a write barrier.
1925 Add<HStoreNamedField>(native_context,
1926 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1927 js_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001928}
1929
1930
1931void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1932 HValue* shared_info) {
1933 Add<HStoreNamedField>(js_function,
1934 HObjectAccess::ForNextFunctionLinkPointer(),
1935 graph()->GetConstantUndefined());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001936 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1937 HObjectAccess::ForCodeOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001938 Add<HStoreCodeEntry>(js_function, code_object);
1939}
1940
1941
1942HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1943 HValue* optimized_map,
1944 HValue* iterator,
1945 int field_offset) {
1946 // By making sure to express these loads in the form [<hvalue> + constant]
1947 // the keyed load can be hoisted.
1948 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1949 HValue* field_slot = iterator;
1950 if (field_offset > 0) {
1951 HValue* field_offset_value = Add<HConstant>(field_offset);
1952 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1953 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001954 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
1955 nullptr, nullptr, FAST_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001956 return field_entry;
1957}
1958
1959
1960void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1961 HValue* js_function,
1962 HValue* shared_info,
1963 HValue* native_context) {
1964 Counters* counters = isolate()->counters();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001965 Factory* factory = isolate()->factory();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001966 IfBuilder is_optimized(this);
1967 HInstruction* optimized_map = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001968 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969 HValue* null_constant = Add<HConstant>(0);
1970 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1971 is_optimized.Then();
1972 {
1973 BuildInstallCode(js_function, shared_info);
1974 }
1975 is_optimized.Else();
1976 {
1977 AddIncrementCounter(counters->fast_new_closure_try_optimized());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001978 // The {optimized_map} points to fixed array of 4-element entries:
1979 // (native context, optimized code, literals, ast-id).
1980 // Iterate through the {optimized_map} backwards. After the loop, if no
1981 // matching optimized code was found, install unoptimized code.
1982 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1983 // i >= SharedFunctionInfo::kEntriesStart;
1984 // i -= SharedFunctionInfo::kEntryLength) { ... }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001985 HValue* first_entry_index =
1986 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001987 HValue* shared_function_entry_length =
1988 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1989 LoopBuilder loop_builder(this, context(), LoopBuilder::kPostDecrement,
1990 shared_function_entry_length);
1991 HValue* array_length = Add<HLoadNamedField>(
1992 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1993 HValue* start_pos =
1994 AddUncasted<HSub>(array_length, shared_function_entry_length);
1995 HValue* slot_iterator =
1996 loop_builder.BeginBody(start_pos, first_entry_index, Token::GTE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001997 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001998 IfBuilder done_check(this);
1999 BuildCheckAndInstallOptimizedCode(js_function, native_context,
2000 &done_check, optimized_map,
2001 slot_iterator);
2002 // Fall out of the loop
2003 loop_builder.Break();
2004 }
2005 loop_builder.EndBody();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002007 // If {slot_iterator} is less than the first entry index, then we failed to
2008 // find a context-dependent code and try context-independent code next.
2009 IfBuilder no_optimized_code_check(this);
2010 no_optimized_code_check.If<HCompareNumericAndBranch>(
2011 slot_iterator, first_entry_index, Token::LT);
2012 no_optimized_code_check.Then();
2013 {
2014 IfBuilder shared_code_check(this);
2015 HValue* shared_code =
2016 Add<HLoadNamedField>(optimized_map, nullptr,
2017 HObjectAccess::ForOptimizedCodeMapSharedCode());
2018 shared_code = Add<HLoadNamedField>(shared_code, nullptr,
2019 HObjectAccess::ForWeakCellValue());
2020 shared_code_check.IfNot<HCompareObjectEqAndBranch>(
2021 shared_code, graph()->GetConstant0());
2022 shared_code_check.Then();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002023 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002024 // Store the context-independent optimized code.
2025 HValue* literals = Add<HConstant>(factory->empty_fixed_array());
2026 BuildInstallOptimizedCode(js_function, native_context, shared_code,
2027 literals);
2028 }
2029 shared_code_check.Else();
2030 {
2031 // Store the unoptimized code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002032 BuildInstallCode(js_function, shared_info);
2033 }
2034 }
2035 }
2036}
2037
2038
2039template<>
2040HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
2041 Counters* counters = isolate()->counters();
2042 Factory* factory = isolate()->factory();
2043 HInstruction* empty_fixed_array =
2044 Add<HConstant>(factory->empty_fixed_array());
2045 HValue* shared_info = GetParameter(0);
2046
2047 AddIncrementCounter(counters->fast_new_closure_total());
2048
2049 // Create a new closure from the given function info in new space
2050 HValue* size = Add<HConstant>(JSFunction::kSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002051 HInstruction* js_function =
2052 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002053
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002054 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002055 casted_stub()->kind());
2056
2057 // Compute the function map in the current native context and set that
2058 // as the map of the allocated object.
2059 HInstruction* native_context = BuildGetNativeContext();
2060 HInstruction* map_slot_value = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002061 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002062 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
2063
2064 // Initialize the rest of the function.
2065 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
2066 empty_fixed_array);
2067 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
2068 empty_fixed_array);
2069 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
2070 empty_fixed_array);
2071 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
2072 graph()->GetConstantHole());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002073 Add<HStoreNamedField>(
2074 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002075 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
2076 context());
2077
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002078 // Initialize the code pointer in the function to be the one found in the
2079 // shared function info object. But first check if there is an optimized
2080 // version for our context.
2081 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002082
2083 return js_function;
2084}
2085
2086
2087Handle<Code> FastNewClosureStub::GenerateCode() {
2088 return DoGenerateCode(this);
2089}
2090
2091
2092template<>
2093HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
2094 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
2095
2096 // Get the function.
2097 HParameter* function = GetParameter(FastNewContextStub::kFunction);
2098
2099 // Allocate the context in new space.
2100 HAllocate* function_context = Add<HAllocate>(
2101 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
2102 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
2103
2104 // Set up the object header.
2105 AddStoreMapConstant(function_context,
2106 isolate()->factory()->function_context_map());
2107 Add<HStoreNamedField>(function_context,
2108 HObjectAccess::ForFixedArrayLength(),
2109 Add<HConstant>(length));
2110
2111 // Set up the fixed slots.
2112 Add<HStoreNamedField>(function_context,
2113 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
2114 function);
2115 Add<HStoreNamedField>(function_context,
2116 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
2117 context());
2118 Add<HStoreNamedField>(function_context,
2119 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002120 graph()->GetConstantHole());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002121
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002122 // Copy the native context from the previous context.
2123 HValue* native_context = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002124 context(), nullptr,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002125 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2126 Add<HStoreNamedField>(function_context, HObjectAccess::ForContextSlot(
2127 Context::NATIVE_CONTEXT_INDEX),
2128 native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002129
2130 // Initialize the rest of the slots to undefined.
2131 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
2132 Add<HStoreNamedField>(function_context,
2133 HObjectAccess::ForContextSlot(i),
2134 graph()->GetConstantUndefined());
2135 }
2136
2137 return function_context;
2138}
2139
2140
2141Handle<Code> FastNewContextStub::GenerateCode() {
2142 return DoGenerateCode(this);
2143}
2144
2145
2146template <>
2147HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2148 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2149 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2150
2151 Add<HCheckSmi>(key);
2152
2153 HValue* elements = AddLoadElements(receiver);
2154
2155 HValue* hash = BuildElementIndexHash(key);
2156
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2158 casted_stub()->language_mode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002159}
2160
2161
2162Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2163 return DoGenerateCode(this);
2164}
2165
2166
2167template<>
2168HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2169 // Determine the parameters.
2170 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2171 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2172 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2173
2174 info()->MarkMustNotHaveEagerFrame();
2175
2176 return BuildRegExpConstructResult(length, index, input);
2177}
2178
2179
2180Handle<Code> RegExpConstructResultStub::GenerateCode() {
2181 return DoGenerateCode(this);
2182}
2183
2184
2185template <>
2186class CodeStubGraphBuilder<KeyedLoadGenericStub>
2187 : public CodeStubGraphBuilderBase {
2188 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002189 explicit CodeStubGraphBuilder(CompilationInfo* info)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002190 : CodeStubGraphBuilderBase(info) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002191
2192 protected:
2193 virtual HValue* BuildCodeStub();
2194
2195 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2196 HValue* bit_field2,
2197 ElementsKind kind);
2198
2199 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2200 HValue* receiver,
2201 HValue* key,
2202 HValue* instance_type,
2203 HValue* bit_field2,
2204 ElementsKind kind);
2205
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002206 KeyedLoadGenericStub* casted_stub() {
2207 return static_cast<KeyedLoadGenericStub*>(stub());
2208 }
2209};
2210
2211
2212void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2213 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2214 ElementsKind kind) {
2215 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2216 HValue* kind_limit = Add<HConstant>(
2217 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2218
2219 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2220 if_builder->Then();
2221}
2222
2223
2224void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2225 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2226 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002227 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2228
2229 IfBuilder js_array_check(this);
2230 js_array_check.If<HCompareNumericAndBranch>(
2231 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2232 js_array_check.Then();
2233 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2234 true, kind,
2235 LOAD, NEVER_RETURN_HOLE,
2236 STANDARD_STORE));
2237 js_array_check.Else();
2238 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2239 false, kind,
2240 LOAD, NEVER_RETURN_HOLE,
2241 STANDARD_STORE));
2242 js_array_check.End();
2243}
2244
2245
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2247 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2248 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002249 // Split into a smi/integer case and unique string case.
2250 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2251 graph()->CreateBasicBlock());
2252
2253 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2254
2255 IfBuilder index_name_split(this, &index_name_split_continuation);
2256 index_name_split.Then();
2257 {
2258 // Key is an index (number)
2259 key = Pop();
2260
2261 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2262 (1 << Map::kHasIndexedInterceptor);
2263 BuildJSObjectCheck(receiver, bit_field_mask);
2264
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002265 HValue* map =
2266 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002267
2268 HValue* instance_type =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002269 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002270
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002271 HValue* bit_field2 =
2272 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002273
2274 IfBuilder kind_if(this);
2275 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2276 FAST_HOLEY_ELEMENTS);
2277
2278 kind_if.Else();
2279 {
2280 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2281 FAST_HOLEY_DOUBLE_ELEMENTS);
2282 }
2283 kind_if.Else();
2284
2285 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2286 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2287 {
2288 HValue* elements = AddLoadElements(receiver);
2289
2290 HValue* hash = BuildElementIndexHash(key);
2291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
2293 casted_stub()->language_mode()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002294 }
2295 kind_if.Else();
2296
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002297 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2298 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2299 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002300 BuildElementsKindLimitCheck(&kind_if, bit_field2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002301 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002302 // Non-strict elements are not handled.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002303 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002304 Deoptimizer::EAGER);
2305 Push(graph()->GetConstant0());
2306
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002307 kind_if.ElseDeopt(
2308 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002309
2310 kind_if.End();
2311 }
2312 index_name_split.Else();
2313 {
2314 // Key is a unique string.
2315 key = Pop();
2316
2317 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2318 (1 << Map::kHasNamedInterceptor);
2319 BuildJSObjectCheck(receiver, bit_field_mask);
2320
2321 HIfContinuation continuation;
2322 BuildTestForDictionaryProperties(receiver, &continuation);
2323 IfBuilder if_dict_properties(this, &continuation);
2324 if_dict_properties.Then();
2325 {
2326 // Key is string, properties are dictionary mode
2327 BuildNonGlobalObjectCheck(receiver);
2328
2329 HValue* properties = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002330 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331
2332 HValue* hash =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002333 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002334
2335 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2336
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002337 HValue* value = BuildUncheckedDictionaryElementLoad(
2338 receiver, properties, key, hash, casted_stub()->language_mode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002339 Push(value);
2340 }
2341 if_dict_properties.Else();
2342 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002343 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2344 // megamorphic stub cache.
2345 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002346 // Key is string, properties are fast mode
2347 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2348
2349 ExternalReference cache_keys_ref =
2350 ExternalReference::keyed_lookup_cache_keys(isolate());
2351 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2352
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002353 HValue* map =
2354 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002355 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2356 base_index->ClearFlag(HValue::kCanOverflow);
2357
2358 HIfContinuation inline_or_runtime_continuation(
2359 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2360 {
2361 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2362 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2363 ++probe) {
2364 IfBuilder* lookup_if = &lookup_ifs[probe];
2365 lookup_if->Initialize(this);
2366 int probe_base = probe * KeyedLookupCache::kEntryLength;
2367 HValue* map_index = AddUncasted<HAdd>(
2368 base_index,
2369 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2370 map_index->ClearFlag(HValue::kCanOverflow);
2371 HValue* key_index = AddUncasted<HAdd>(
2372 base_index,
2373 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2374 key_index->ClearFlag(HValue::kCanOverflow);
2375 HValue* map_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002376 Add<HLoadKeyed>(cache_keys, map_index, nullptr, nullptr,
2377 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2379 lookup_if->And();
2380 HValue* key_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002381 Add<HLoadKeyed>(cache_keys, key_index, nullptr, nullptr,
2382 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002383 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2384 lookup_if->Then();
2385 {
2386 ExternalReference cache_field_offsets_ref =
2387 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2388 HValue* cache_field_offsets =
2389 Add<HConstant>(cache_field_offsets_ref);
2390 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2391 index->ClearFlag(HValue::kCanOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002392 HValue* property_index =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002393 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, cache_keys,
2394 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002395 Push(property_index);
2396 }
2397 lookup_if->Else();
2398 }
2399 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2400 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2401 }
2402 }
2403
2404 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2405 inline_or_runtime.Then();
2406 {
2407 // Found a cached index, load property inline.
2408 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2409 }
2410 inline_or_runtime.Else();
2411 {
2412 // KeyedLookupCache miss; call runtime.
2413 Add<HPushArguments>(receiver, key);
2414 Push(Add<HCallRuntime>(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002415 Runtime::FunctionForId(is_strong(casted_stub()->language_mode())
2416 ? Runtime::kKeyedGetPropertyStrong
2417 : Runtime::kKeyedGetProperty),
2418 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002419 }
2420 inline_or_runtime.End();
2421 }
2422 if_dict_properties.End();
2423 }
2424 index_name_split.End();
2425
2426 return Pop();
2427}
2428
2429
2430Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2431 return DoGenerateCode(this);
2432}
2433
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002434} // namespace internal
2435} // namespace v8