blob: 1d2fb811fbb2c2f007c00ed36ddbcf70e2842afa [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/code-stubs.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006
7#include "src/bailout-reason.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/crankshaft/hydrogen.h"
9#include "src/crankshaft/lithium.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/field-index.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/ic/ic.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15
16
17static LChunk* OptimizeGraph(HGraph* graph) {
18 DisallowHeapAllocation no_allocation;
19 DisallowHandleAllocation no_handles;
20 DisallowHandleDereference no_deref;
21
22 DCHECK(graph != NULL);
23 BailoutReason bailout_reason = kNoReason;
24 if (!graph->Optimize(&bailout_reason)) {
25 FATAL(GetBailoutReason(bailout_reason));
26 }
27 LChunk* chunk = LChunk::NewChunk(graph);
28 if (chunk == NULL) {
29 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
30 }
31 return chunk;
32}
33
34
35class CodeStubGraphBuilderBase : public HGraphBuilder {
36 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +010037 explicit CodeStubGraphBuilderBase(CompilationInfo* info, CodeStub* code_stub)
38 : HGraphBuilder(info, code_stub->GetCallInterfaceDescriptor()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039 arguments_length_(NULL),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040040 info_(info),
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 code_stub_(code_stub),
42 descriptor_(code_stub),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 context_(NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 int parameter_count = GetParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 parameters_.Reset(new HParameter*[parameter_count]);
46 }
47 virtual bool BuildGraph();
48
49 protected:
50 virtual HValue* BuildCodeStub() = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 int GetParameterCount() const { return descriptor_.GetParameterCount(); }
52 int GetRegisterParameterCount() const {
53 return descriptor_.GetRegisterParameterCount();
54 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 HParameter* GetParameter(int parameter) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000056 DCHECK(parameter < GetParameterCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 return parameters_[parameter];
58 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 Representation GetParameterRepresentation(int parameter) {
60 return RepresentationFromType(descriptor_.GetParameterType(parameter));
61 }
62 bool IsParameterCountRegister(int index) const {
63 return descriptor_.GetRegisterParameter(index)
64 .is(descriptor_.stack_parameter_count());
65 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 HValue* GetArgumentsLength() {
67 // This is initialized in BuildGraph()
68 DCHECK(arguments_length_ != NULL);
69 return arguments_length_;
70 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 CompilationInfo* info() { return info_; }
Ben Murdoch097c5b22016-05-18 11:27:45 +010072 CodeStub* stub() { return code_stub_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 HContext* context() { return context_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 Isolate* isolate() { return info_->isolate(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078 Representation representation,
79 bool transition_to_field);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080
Ben Murdochda12d292016-06-02 14:46:10 +010081 HValue* BuildPushElement(HValue* object, HValue* argc,
82 HValue* argument_elements, ElementsKind kind);
83
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084 enum ArgumentClass {
85 NONE,
86 SINGLE,
87 MULTIPLE
88 };
89
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000090 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
91 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
92 HValue* value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093
94 HValue* BuildArrayConstructor(ElementsKind kind,
95 AllocationSiteOverrideMode override_mode,
96 ArgumentClass argument_class);
97 HValue* BuildInternalArrayConstructor(ElementsKind kind,
98 ArgumentClass argument_class);
99
100 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
101 // function found in the optimized code map at map_index in js_function, if
102 // the function at map_index matches the given native_context. Builder is
103 // left in the "Then()" state after the install.
104 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
105 HValue* native_context,
106 IfBuilder* builder,
107 HValue* optimized_map,
108 HValue* map_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000109 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
110 HValue* code_object, HValue* literals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111 void BuildInstallCode(HValue* js_function, HValue* shared_info);
112
113 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
114 HValue* iterator,
115 int field_offset);
116 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
117 HValue* shared_info,
118 HValue* native_context);
119
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000120 HValue* BuildToString(HValue* input, bool convert);
121 HValue* BuildToPrimitive(HValue* input, HValue* input_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400122
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 private:
124 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
125 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
126 ElementsKind kind);
127
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000128 base::SmartArrayPointer<HParameter*> parameters_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 HValue* arguments_length_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 CompilationInfo* info_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100131 CodeStub* code_stub_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132 CodeStubDescriptor descriptor_;
133 HContext* context_;
134};
135
136
137bool CodeStubGraphBuilderBase::BuildGraph() {
138 // Update the static counter each time a new code stub is generated.
139 isolate()->counters()->code_stubs()->Increment();
140
141 if (FLAG_trace_hydrogen_stubs) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 const char* name = CodeStub::MajorName(stub()->MajorKey());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 PrintF("-----------------------------------------------------------\n");
144 PrintF("Compiling stub %s using hydrogen\n", name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400145 isolate()->GetHTracer()->TraceCompilation(info());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146 }
147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000148 int param_count = GetParameterCount();
149 int register_param_count = GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150 HEnvironment* start_environment = graph()->start_environment();
151 HBasicBlock* next_block = CreateBasicBlock(start_environment);
152 Goto(next_block);
153 next_block->SetJoinId(BailoutId::StubEntry());
154 set_current_block(next_block);
155
156 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
157 HInstruction* stack_parameter_count = NULL;
158 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159 Representation r = GetParameterRepresentation(i);
160 HParameter* param;
161 if (i >= register_param_count) {
162 param = Add<HParameter>(i - register_param_count,
163 HParameter::STACK_PARAMETER, r);
164 } else {
165 param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
166 start_environment->Bind(i, param);
167 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168 parameters_[i] = param;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 if (i < register_param_count && IsParameterCountRegister(i)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 param->set_type(HType::Smi());
171 stack_parameter_count = param;
172 arguments_length_ = stack_parameter_count;
173 }
174 }
175
176 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
177 if (!runtime_stack_params) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000178 stack_parameter_count =
179 Add<HConstant>(param_count - register_param_count - 1);
180 // graph()->GetConstantMinus1();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181 arguments_length_ = graph()->GetConstant0();
182 }
183
184 context_ = Add<HContext>();
185 start_environment->BindContext(context_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100186 start_environment->Bind(param_count, context_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187
188 Add<HSimulate>(BailoutId::StubEntry());
189
190 NoObservableSideEffectsScope no_effects(this);
191
192 HValue* return_value = BuildCodeStub();
193
194 // We might have extra expressions to pop from the stack in addition to the
195 // arguments above.
196 HInstruction* stack_pop_count = stack_parameter_count;
197 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
198 if (!stack_parameter_count->IsConstant() &&
199 descriptor_.hint_stack_parameter_count() < 0) {
200 HInstruction* constant_one = graph()->GetConstant1();
201 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
202 stack_pop_count->ClearFlag(HValue::kCanOverflow);
203 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
204 // smi.
205 } else {
206 int count = descriptor_.hint_stack_parameter_count();
207 stack_pop_count = Add<HConstant>(count);
208 }
209 }
210
211 if (current_block() != NULL) {
212 HReturn* hreturn_instruction = New<HReturn>(return_value,
213 stack_pop_count);
214 FinishCurrentBlock(hreturn_instruction);
215 }
216 return true;
217}
218
219
220template <class Stub>
221class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
222 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100223 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
224 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000225
226 protected:
227 virtual HValue* BuildCodeStub() {
228 if (casted_stub()->IsUninitialized()) {
229 return BuildCodeUninitializedStub();
230 } else {
231 return BuildCodeInitializedStub();
232 }
233 }
234
235 virtual HValue* BuildCodeInitializedStub() {
236 UNIMPLEMENTED();
237 return NULL;
238 }
239
240 virtual HValue* BuildCodeUninitializedStub() {
241 // Force a deopt that falls back to the runtime.
242 HValue* undefined = graph()->GetConstantUndefined();
243 IfBuilder builder(this);
244 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
245 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000246 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000247 return undefined;
248 }
249
250 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
251};
252
253
254Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
255 ExternalReference miss) {
256 Factory* factory = isolate()->factory();
257
258 // Generate the new code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 MacroAssembler masm(isolate(), NULL, 256, CodeObjectRequired::kYes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000260
261 {
262 // Update the static counter each time a new code stub is generated.
263 isolate()->counters()->code_stubs()->Increment();
264
265 // Generate the code for the stub.
266 masm.set_generating_stub(true);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400267 // TODO(yangguo): remove this once we can serialize IC stubs.
268 masm.enable_serializer();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 NoCurrentFrameScope scope(&masm);
270 GenerateLightweightMiss(&masm, miss);
271 }
272
273 // Create the code object.
274 CodeDesc desc;
275 masm.GetCode(&desc);
276
277 // Copy the generated code into a heap object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 Handle<Code> new_object = factory->NewCode(
Ben Murdoch097c5b22016-05-18 11:27:45 +0100279 desc, GetCodeFlags(), masm.CodeObject(), NeedsImmovableCode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 return new_object;
281}
282
283
284template <class Stub>
285static Handle<Code> DoGenerateCode(Stub* stub) {
286 Isolate* isolate = stub->isolate();
287 CodeStubDescriptor descriptor(stub);
288
289 // If we are uninitialized we can use a light-weight stub to enter
290 // the runtime that is significantly faster than using the standard
291 // stub-failure deopt mechanism.
292 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
293 DCHECK(!descriptor.stack_parameter_count().is_valid());
294 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
295 }
296 base::ElapsedTimer timer;
297 if (FLAG_profile_hydrogen_code_stub_compilation) {
298 timer.Start();
299 }
Ben Murdochda12d292016-06-02 14:46:10 +0100300 Zone zone(isolate->allocator());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100301 CompilationInfo info(CodeStub::MajorName(stub->MajorKey()), isolate, &zone,
302 stub->GetCodeFlags());
303 // Parameter count is number of stack parameters.
304 int parameter_count = descriptor.GetStackParameterCount();
305 if (descriptor.function_mode() == NOT_JS_FUNCTION_STUB_MODE) {
306 parameter_count--;
307 }
308 info.set_parameter_count(parameter_count);
309 CodeStubGraphBuilder<Stub> builder(&info, stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000310 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000311 Handle<Code> code = chunk->Codegen();
312 if (FLAG_profile_hydrogen_code_stub_compilation) {
313 OFStream os(stdout);
314 os << "[Lazy compilation of " << stub << " took "
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400315 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 }
317 return code;
318}
319
320
321template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
323 info()->MarkAsSavesCallerDoubles();
324 HValue* number = GetParameter(NumberToStringStub::kNumber);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100325 return BuildNumberToString(number, Type::Number());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326}
327
328
329Handle<Code> NumberToStringStub::GenerateCode() {
330 return DoGenerateCode(this);
331}
332
333
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334// Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000335template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000336HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 Factory* factory = isolate()->factory();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000338 HConstant* number_string = Add<HConstant>(factory->number_string());
339 HValue* object = GetParameter(TypeofStub::kObject);
340
341 IfBuilder is_smi(this);
342 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
343 is_smi.Then();
344 { Push(number_string); }
345 is_smi.Else();
346 {
347 IfBuilder is_number(this);
348 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
349 is_number.Then();
350 { Push(number_string); }
351 is_number.Else();
352 {
353 HValue* map = AddLoadMap(object, smi_check);
354 HValue* instance_type = Add<HLoadNamedField>(
355 map, nullptr, HObjectAccess::ForMapInstanceType());
356 IfBuilder is_string(this);
357 is_string.If<HCompareNumericAndBranch>(
358 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
359 is_string.Then();
360 { Push(Add<HConstant>(factory->string_string())); }
361 is_string.Else();
362 {
363 HConstant* object_string = Add<HConstant>(factory->object_string());
364 IfBuilder is_oddball(this);
365 is_oddball.If<HCompareNumericAndBranch>(
366 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
367 is_oddball.Then();
368 {
369 Push(Add<HLoadNamedField>(object, nullptr,
370 HObjectAccess::ForOddballTypeOf()));
371 }
372 is_oddball.Else();
373 {
374 IfBuilder is_symbol(this);
375 is_symbol.If<HCompareNumericAndBranch>(
376 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
377 is_symbol.Then();
378 { Push(Add<HConstant>(factory->symbol_string())); }
379 is_symbol.Else();
380 {
381 HValue* bit_field = Add<HLoadNamedField>(
382 map, nullptr, HObjectAccess::ForMapBitField());
383 HValue* bit_field_masked = AddUncasted<HBitwise>(
384 Token::BIT_AND, bit_field,
385 Add<HConstant>((1 << Map::kIsCallable) |
386 (1 << Map::kIsUndetectable)));
387 IfBuilder is_function(this);
388 is_function.If<HCompareNumericAndBranch>(
389 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable),
390 Token::EQ);
391 is_function.Then();
392 { Push(Add<HConstant>(factory->function_string())); }
393 is_function.Else();
394 {
395#define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \
396 IfBuilder is_##type(this); \
397 is_##type.If<HCompareObjectEqAndBranch>( \
398 map, Add<HConstant>(factory->type##_map())); \
399 is_##type.Then(); \
400 { Push(Add<HConstant>(factory->type##_string())); } \
401 is_##type.Else(); {
402 SIMD128_TYPES(SIMD128_BUILDER_OPEN)
403#undef SIMD128_BUILDER_OPEN
404 // Is it an undetectable object?
405 IfBuilder is_undetectable(this);
406 is_undetectable.If<HCompareNumericAndBranch>(
407 bit_field_masked, graph()->GetConstant0(), Token::NE);
408 is_undetectable.Then();
409 {
410 // typeof an undetectable object is 'undefined'.
411 Push(Add<HConstant>(factory->undefined_string()));
412 }
413 is_undetectable.Else();
414 {
415 // For any kind of object not handled above, the spec rule for
416 // host objects gives that it is okay to return "object".
417 Push(object_string);
418 }
419#define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) }
420 SIMD128_TYPES(SIMD128_BUILDER_CLOSE)
421#undef SIMD128_BUILDER_CLOSE
422 }
423 is_function.End();
424 }
425 is_symbol.End();
426 }
427 is_oddball.End();
428 }
429 is_string.End();
430 }
431 is_number.End();
432 }
433 is_smi.End();
434
435 return environment()->Pop();
436}
437
438
439Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
440
441
442template <>
443HValue* CodeStubGraphBuilder<FastCloneRegExpStub>::BuildCodeStub() {
444 HValue* closure = GetParameter(0);
445 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446
447 // This stub is very performance sensitive, the generated code must be tuned
448 // so that it doesn't build and eager frame.
449 info()->MarkMustNotHaveEagerFrame();
450
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000451 HValue* literals_array = Add<HLoadNamedField>(
452 closure, nullptr, HObjectAccess::ForLiteralsPointer());
453 HInstruction* boilerplate = Add<HLoadKeyed>(
454 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
455 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
456
457 IfBuilder if_notundefined(this);
458 if_notundefined.IfNot<HCompareObjectEqAndBranch>(
459 boilerplate, graph()->GetConstantUndefined());
460 if_notundefined.Then();
461 {
462 int result_size =
463 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
464 HValue* result =
465 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(),
466 NOT_TENURED, JS_REGEXP_TYPE);
467 Add<HStoreNamedField>(
468 result, HObjectAccess::ForMap(),
469 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap()));
470 Add<HStoreNamedField>(
471 result, HObjectAccess::ForPropertiesPointer(),
472 Add<HLoadNamedField>(boilerplate, nullptr,
473 HObjectAccess::ForPropertiesPointer()));
474 Add<HStoreNamedField>(
475 result, HObjectAccess::ForElementsPointer(),
476 Add<HLoadNamedField>(boilerplate, nullptr,
477 HObjectAccess::ForElementsPointer()));
478 for (int offset = JSObject::kHeaderSize; offset < result_size;
479 offset += kPointerSize) {
480 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset);
481 Add<HStoreNamedField>(result, access,
482 Add<HLoadNamedField>(boilerplate, nullptr, access));
483 }
484 Push(result);
485 }
486 if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
487 if_notundefined.End();
488
489 return Pop();
490}
491
492
493Handle<Code> FastCloneRegExpStub::GenerateCode() {
494 return DoGenerateCode(this);
495}
496
497
498template <>
499HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
500 Factory* factory = isolate()->factory();
501 HValue* undefined = graph()->GetConstantUndefined();
502 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
503 HValue* closure = GetParameter(0);
504 HValue* literal_index = GetParameter(1);
505
506 // This stub is very performance sensitive, the generated code must be tuned
507 // so that it doesn't build and eager frame.
508 info()->MarkMustNotHaveEagerFrame();
509
510 HValue* literals_array = Add<HLoadNamedField>(
511 closure, nullptr, HObjectAccess::ForLiteralsPointer());
512
513 HInstruction* allocation_site = Add<HLoadKeyed>(
514 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
515 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000516 IfBuilder checker(this);
517 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
518 undefined);
519 checker.Then();
520
521 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
522 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400523 HInstruction* boilerplate =
524 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525 HValue* elements = AddLoadElements(boilerplate);
526 HValue* capacity = AddLoadFixedArrayLength(elements);
527 IfBuilder zero_capacity(this);
528 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
529 Token::EQ);
530 zero_capacity.Then();
531 Push(BuildCloneShallowArrayEmpty(boilerplate,
532 allocation_site,
533 alloc_site_mode));
534 zero_capacity.Else();
535 IfBuilder if_fixed_cow(this);
536 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
537 if_fixed_cow.Then();
538 Push(BuildCloneShallowArrayCow(boilerplate,
539 allocation_site,
540 alloc_site_mode,
541 FAST_ELEMENTS));
542 if_fixed_cow.Else();
543 IfBuilder if_fixed(this);
544 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
545 if_fixed.Then();
546 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
547 allocation_site,
548 alloc_site_mode,
549 FAST_ELEMENTS));
550
551 if_fixed.Else();
552 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
553 allocation_site,
554 alloc_site_mode,
555 FAST_DOUBLE_ELEMENTS));
556 if_fixed.End();
557 if_fixed_cow.End();
558 zero_capacity.End();
559
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000560 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561 checker.End();
562
563 return environment()->Pop();
564}
565
566
567Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
568 return DoGenerateCode(this);
569}
570
571
572template <>
573HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
574 HValue* undefined = graph()->GetConstantUndefined();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000575 HValue* closure = GetParameter(0);
576 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000578 HValue* literals_array = Add<HLoadNamedField>(
579 closure, nullptr, HObjectAccess::ForLiteralsPointer());
580
581 HInstruction* allocation_site = Add<HLoadKeyed>(
582 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
583 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000584
585 IfBuilder checker(this);
586 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
587 undefined);
588 checker.And();
589
590 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
591 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400592 HInstruction* boilerplate =
593 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400595 int length = casted_stub()->length();
596 if (length == 0) {
597 // Empty objects have some slack added to them.
598 length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
599 }
600 int size = JSObject::kHeaderSize + length * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000601 int object_size = size;
602 if (FLAG_allocation_site_pretenuring) {
603 size += AllocationMemento::kSize;
604 }
605
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400606 HValue* boilerplate_map =
607 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 HValue* boilerplate_size = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400609 boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000610 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
611 checker.If<HCompareNumericAndBranch>(boilerplate_size,
612 size_in_words, Token::EQ);
613 checker.Then();
614
615 HValue* size_in_bytes = Add<HConstant>(size);
616
617 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
618 NOT_TENURED, JS_OBJECT_TYPE);
619
620 for (int i = 0; i < object_size; i += kPointerSize) {
621 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400622 Add<HStoreNamedField>(object, access,
623 Add<HLoadNamedField>(boilerplate, nullptr, access));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000624 }
625
626 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
627 if (FLAG_allocation_site_pretenuring) {
628 BuildCreateAllocationMemento(
629 object, Add<HConstant>(object_size), allocation_site);
630 }
631
632 environment()->Push(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000633 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634 checker.End();
635
636 return environment()->Pop();
637}
638
639
640Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
641 return DoGenerateCode(this);
642}
643
644
645template <>
646HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000647 // This stub is performance sensitive, the generated code must be tuned
648 // so that it doesn't build an eager frame.
649 info()->MarkMustNotHaveEagerFrame();
650
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000651 HValue* size = Add<HConstant>(AllocationSite::kSize);
652 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
653 JS_OBJECT_TYPE);
654
655 // Store the map
656 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
657 AddStoreMapConstant(object, allocation_site_map);
658
659 // Store the payload (smi elements kind)
660 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
661 Add<HStoreNamedField>(object,
662 HObjectAccess::ForAllocationSiteOffset(
663 AllocationSite::kTransitionInfoOffset),
664 initial_elements_kind);
665
666 // Unlike literals, constructed arrays don't have nested sites
667 Add<HStoreNamedField>(object,
668 HObjectAccess::ForAllocationSiteOffset(
669 AllocationSite::kNestedSiteOffset),
670 graph()->GetConstant0());
671
672 // Pretenuring calculation field.
673 Add<HStoreNamedField>(object,
674 HObjectAccess::ForAllocationSiteOffset(
675 AllocationSite::kPretenureDataOffset),
676 graph()->GetConstant0());
677
678 // Pretenuring memento creation count field.
679 Add<HStoreNamedField>(object,
680 HObjectAccess::ForAllocationSiteOffset(
681 AllocationSite::kPretenureCreateCountOffset),
682 graph()->GetConstant0());
683
684 // Store an empty fixed array for the code dependency.
685 HConstant* empty_fixed_array =
686 Add<HConstant>(isolate()->factory()->empty_fixed_array());
687 Add<HStoreNamedField>(
688 object,
689 HObjectAccess::ForAllocationSiteOffset(
690 AllocationSite::kDependentCodeOffset),
691 empty_fixed_array);
692
693 // Link the object to the allocation site list
694 HValue* site_list = Add<HConstant>(
695 ExternalReference::allocation_sites_list_address(isolate()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400696 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
697 HObjectAccess::ForAllocationSiteList());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
699 // mark as such in order to skip the write barrier, once we have a unified
700 // system for weakness. For now we decided to keep it like this because having
701 // an initial write barrier backed store makes this pointer strong until the
702 // next GC, and allocation sites are designed to survive several GCs anyway.
703 Add<HStoreNamedField>(
704 object,
705 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
706 site);
707 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
708 object);
709
710 HInstruction* feedback_vector = GetParameter(0);
711 HInstruction* slot = GetParameter(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000712 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713 INITIALIZING_STORE);
714 return feedback_vector;
715}
716
717
718Handle<Code> CreateAllocationSiteStub::GenerateCode() {
719 return DoGenerateCode(this);
720}
721
722
723template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000724HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
725 // This stub is performance sensitive, the generated code must be tuned
726 // so that it doesn't build an eager frame.
727 info()->MarkMustNotHaveEagerFrame();
728
729 HValue* size = Add<HConstant>(WeakCell::kSize);
730 HInstruction* object =
731 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
732
733 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
734 AddStoreMapConstant(object, weak_cell_map);
735
736 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
737 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
738 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
739 graph()->GetConstantHole());
740
741 HInstruction* feedback_vector =
742 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
743 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
744 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
745 INITIALIZING_STORE);
746 return graph()->GetConstant0();
747}
748
749
750Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
751
752
753template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400754HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
755 int context_index = casted_stub()->context_index();
756 int slot_index = casted_stub()->slot_index();
757
758 HValue* script_context = BuildGetScriptContext(context_index);
759 return Add<HLoadNamedField>(script_context, nullptr,
760 HObjectAccess::ForContextSlot(slot_index));
761}
762
763
764Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
765 return DoGenerateCode(this);
766}
767
768
769template <>
770HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
771 int context_index = casted_stub()->context_index();
772 int slot_index = casted_stub()->slot_index();
773
774 HValue* script_context = BuildGetScriptContext(context_index);
775 Add<HStoreNamedField>(script_context,
776 HObjectAccess::ForContextSlot(slot_index),
777 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
778 return GetParameter(2);
779}
780
781
782Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
783 return DoGenerateCode(this);
784}
785
Ben Murdochda12d292016-06-02 14:46:10 +0100786HValue* CodeStubGraphBuilderBase::BuildPushElement(HValue* object, HValue* argc,
787 HValue* argument_elements,
788 ElementsKind kind) {
789 // Precheck whether all elements fit into the array.
790 if (!IsFastObjectElementsKind(kind)) {
791 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
792 HValue* start = graph()->GetConstant0();
793 HValue* key = builder.BeginBody(start, argc, Token::LT);
794 {
795 HInstruction* argument =
796 Add<HAccessArgumentsAt>(argument_elements, argc, key);
797 IfBuilder can_store(this);
798 can_store.IfNot<HIsSmiAndBranch>(argument);
799 if (IsFastDoubleElementsKind(kind)) {
800 can_store.And();
801 can_store.IfNot<HCompareMap>(argument,
802 isolate()->factory()->heap_number_map());
803 }
804 can_store.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
805 can_store.End();
806 }
807 builder.EndBody();
808 }
809
810 HValue* length = Add<HLoadNamedField>(object, nullptr,
811 HObjectAccess::ForArrayLength(kind));
812 HValue* new_length = AddUncasted<HAdd>(length, argc);
813 HValue* max_key = AddUncasted<HSub>(new_length, graph()->GetConstant1());
814
815 HValue* elements = Add<HLoadNamedField>(object, nullptr,
816 HObjectAccess::ForElementsPointer());
817 elements = BuildCheckForCapacityGrow(object, elements, kind, length, max_key,
818 true, STORE);
819
820 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
821 HValue* start = graph()->GetConstant0();
822 HValue* key = builder.BeginBody(start, argc, Token::LT);
823 {
824 HValue* argument = Add<HAccessArgumentsAt>(argument_elements, argc, key);
825 HValue* index = AddUncasted<HAdd>(key, length);
826 AddElementAccess(elements, index, argument, object, nullptr, kind, STORE);
827 }
828 builder.EndBody();
829 return new_length;
830}
831
832template <>
833HValue* CodeStubGraphBuilder<FastArrayPushStub>::BuildCodeStub() {
834 // TODO(verwaest): Fix deoptimizer messages.
835 HValue* argc = GetArgumentsLength();
836 HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
837 HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
838 graph()->GetConstantMinus1());
839 BuildCheckHeapObject(object);
840 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
841 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_JS_ARRAY);
842
843 // Disallow pushing onto prototypes. It might be the JSArray prototype.
844 // Disallow pushing onto non-extensible objects.
845 {
846 HValue* bit_field2 =
847 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
848 HValue* mask =
849 Add<HConstant>(static_cast<int>(Map::IsPrototypeMapBits::kMask) |
850 (1 << Map::kIsExtensible));
851 HValue* bits = AddUncasted<HBitwise>(Token::BIT_AND, bit_field2, mask);
852 IfBuilder check(this);
853 check.If<HCompareNumericAndBranch>(
854 bits, Add<HConstant>(1 << Map::kIsExtensible), Token::NE);
855 check.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
856 check.End();
857 }
858
859 // Disallow pushing onto observed objects.
860 {
861 HValue* bit_field =
862 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField());
863 HValue* mask = Add<HConstant>(1 << Map::kIsObserved);
864 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, bit_field, mask);
865 IfBuilder check(this);
866 check.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
867 check.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
868 check.End();
869 }
870
871 // Disallow pushing onto arrays in dictionary named property mode. We need to
872 // figure out whether the length property is still writable.
873 {
874 HValue* bit_field3 =
875 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
876 HValue* mask = Add<HConstant>(static_cast<int>(Map::DictionaryMap::kMask));
877 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, bit_field3, mask);
878 IfBuilder check(this);
879 check.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
880 check.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
881 check.End();
882 }
883
884 // Check whether the length property is writable. The length property is the
885 // only default named property on arrays. It's nonconfigurable, hence is
886 // guaranteed to stay the first property.
887 {
888 HValue* descriptors =
889 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapDescriptors());
890 HValue* details = Add<HLoadKeyed>(
891 descriptors, Add<HConstant>(DescriptorArray::ToDetailsIndex(0)),
892 nullptr, nullptr, FAST_SMI_ELEMENTS);
893 HValue* mask =
894 Add<HConstant>(READ_ONLY << PropertyDetails::AttributesField::kShift);
895 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, details, mask);
896 IfBuilder readonly(this);
897 readonly.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
898 readonly.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
899 readonly.End();
900 }
901
902 HValue* null = Add<HLoadRoot>(Heap::kNullValueRootIndex);
903 HValue* empty = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
904 environment()->Push(map);
905 LoopBuilder check_prototypes(this);
906 check_prototypes.BeginBody(1);
907 {
908 HValue* parent_map = environment()->Pop();
909 HValue* prototype = Add<HLoadNamedField>(parent_map, nullptr,
910 HObjectAccess::ForPrototype());
911
912 IfBuilder is_null(this);
913 is_null.If<HCompareObjectEqAndBranch>(prototype, null);
914 is_null.Then();
915 check_prototypes.Break();
916 is_null.End();
917
918 HValue* prototype_map =
919 Add<HLoadNamedField>(prototype, nullptr, HObjectAccess::ForMap());
920 HValue* instance_type = Add<HLoadNamedField>(
921 prototype_map, nullptr, HObjectAccess::ForMapInstanceType());
922 IfBuilder check_instance_type(this);
923 check_instance_type.If<HCompareNumericAndBranch>(
924 instance_type, Add<HConstant>(LAST_CUSTOM_ELEMENTS_RECEIVER),
925 Token::LTE);
926 check_instance_type.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
927 check_instance_type.End();
928
929 HValue* elements = Add<HLoadNamedField>(
930 prototype, nullptr, HObjectAccess::ForElementsPointer());
931 IfBuilder no_elements(this);
932 no_elements.IfNot<HCompareObjectEqAndBranch>(elements, empty);
933 no_elements.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
934 no_elements.End();
935
936 environment()->Push(prototype_map);
937 }
938 check_prototypes.EndBody();
939
940 HValue* bit_field2 =
941 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
942 HValue* kind = BuildDecodeField<Map::ElementsKindBits>(bit_field2);
943
944 // Below we only check the upper bound of the relevant ranges to include both
945 // holey and non-holey versions. We check them in order smi, object, double
946 // since smi < object < double.
947 STATIC_ASSERT(FAST_SMI_ELEMENTS < FAST_HOLEY_SMI_ELEMENTS);
948 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS < FAST_HOLEY_ELEMENTS);
949 STATIC_ASSERT(FAST_ELEMENTS < FAST_HOLEY_ELEMENTS);
950 STATIC_ASSERT(FAST_HOLEY_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
951 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
952 IfBuilder has_smi_elements(this);
953 has_smi_elements.If<HCompareNumericAndBranch>(
954 kind, Add<HConstant>(FAST_HOLEY_SMI_ELEMENTS), Token::LTE);
955 has_smi_elements.Then();
956 {
957 HValue* new_length = BuildPushElement(object, argc, argument_elements,
958 FAST_HOLEY_SMI_ELEMENTS);
959 environment()->Push(new_length);
960 }
961 has_smi_elements.Else();
962 {
963 IfBuilder has_object_elements(this);
964 has_object_elements.If<HCompareNumericAndBranch>(
965 kind, Add<HConstant>(FAST_HOLEY_ELEMENTS), Token::LTE);
966 has_object_elements.Then();
967 {
968 HValue* new_length = BuildPushElement(object, argc, argument_elements,
969 FAST_HOLEY_ELEMENTS);
970 environment()->Push(new_length);
971 }
972 has_object_elements.Else();
973 {
974 IfBuilder has_double_elements(this);
975 has_double_elements.If<HCompareNumericAndBranch>(
976 kind, Add<HConstant>(FAST_HOLEY_DOUBLE_ELEMENTS), Token::LTE);
977 has_double_elements.Then();
978 {
979 HValue* new_length = BuildPushElement(object, argc, argument_elements,
980 FAST_HOLEY_DOUBLE_ELEMENTS);
981 environment()->Push(new_length);
982 }
983 has_double_elements.ElseDeopt(Deoptimizer::kFastArrayPushFailed);
984 has_double_elements.End();
985 }
986 has_object_elements.End();
987 }
988 has_smi_elements.End();
989
990 return environment()->Pop();
991}
992
993Handle<Code> FastArrayPushStub::GenerateCode() { return DoGenerateCode(this); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400994
995template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
997 ElementsKind kind = casted_stub()->elements_kind();
998 if (IsFastDoubleElementsKind(kind)) {
999 info()->MarkAsSavesCallerDoubles();
1000 }
1001
1002 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
1003 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
1004
1005 HValue* elements = AddLoadElements(object);
1006 HValue* current_capacity = Add<HLoadNamedField>(
1007 elements, nullptr, HObjectAccess::ForFixedArrayLength());
1008
1009 HValue* length =
1010 casted_stub()->is_js_array()
1011 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1012 HObjectAccess::ForArrayLength(kind))
1013 : current_capacity;
1014
1015 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
1016 current_capacity, key);
1017}
1018
1019
1020Handle<Code> GrowArrayElementsStub::GenerateCode() {
1021 return DoGenerateCode(this);
1022}
1023
1024
1025template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001026HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
1028 ? CONVERT_HOLE_TO_UNDEFINED
1029 : NEVER_RETURN_HOLE;
1030
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
1032 GetParameter(LoadDescriptor::kReceiverIndex),
1033 GetParameter(LoadDescriptor::kNameIndex), NULL,
1034 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 hole_mode, STANDARD_STORE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001036 return load;
1037}
1038
1039
1040Handle<Code> LoadFastElementStub::GenerateCode() {
1041 return DoGenerateCode(this);
1042}
1043
1044
1045HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
1046 HValue* object, FieldIndex index) {
1047 Representation representation = index.is_double()
1048 ? Representation::Double()
1049 : Representation::Tagged();
1050 int offset = index.offset();
1051 HObjectAccess access = index.is_inobject()
1052 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
1053 : HObjectAccess::ForBackingStoreOffset(offset, representation);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001054 if (index.is_double() &&
1055 (!FLAG_unbox_double_fields || !index.is_inobject())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056 // Load the heap number.
1057 object = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001058 object, nullptr, access.WithRepresentation(Representation::Tagged()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059 // Load the double value from it.
1060 access = HObjectAccess::ForHeapNumberValue();
1061 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001062 return Add<HLoadNamedField>(object, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001063}
1064
1065
1066template<>
1067HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
1068 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
1069}
1070
1071
1072Handle<Code> LoadFieldStub::GenerateCode() {
1073 return DoGenerateCode(this);
1074}
1075
1076
1077template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001078HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
1079 return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
1080 casted_stub()->index());
1081}
1082
1083
1084Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
1085 return DoGenerateCode(this);
1086}
1087
1088
1089template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001090HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
1091 HValue* map = AddLoadMap(GetParameter(0), NULL);
1092 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
1093 Map::kDescriptorsOffset, Representation::Tagged());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001094 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
1096 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001097 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001098}
1099
1100
1101Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
1102
1103
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001104HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
1105 HValue* value) {
1106 HValue* result = NULL;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001107 HInstruction* backing_store =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, nullptr,
1109 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001111 HValue* backing_store_length = Add<HLoadNamedField>(
1112 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001113 IfBuilder in_unmapped_range(this);
1114 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
1115 Token::LT);
1116 in_unmapped_range.Then();
1117 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001118 if (value == NULL) {
1119 result = Add<HLoadKeyed>(backing_store, key, nullptr, nullptr,
1120 FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE);
1121 } else {
1122 Add<HStoreKeyed>(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS);
1123 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001125 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001126 in_unmapped_range.End();
1127 return result;
1128}
1129
1130
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001131HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
1132 HValue* key,
1133 HValue* value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001134 // Mapped arguments are actual arguments. Unmapped arguments are values added
1135 // to the arguments object after it was created for the call. Mapped arguments
1136 // are stored in the context at indexes given by elements[key + 2]. Unmapped
1137 // arguments are stored as regular indexed properties in the arguments array,
1138 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
1139 // look at argument object construction.
1140 //
1141 // The sloppy arguments elements array has a special format:
1142 //
1143 // 0: context
1144 // 1: unmapped arguments array
1145 // 2: mapped_index0,
1146 // 3: mapped_index1,
1147 // ...
1148 //
1149 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
1150 // If key + 2 >= elements.length then attempt to look in the unmapped
1151 // arguments array (given by elements[1]) and return the value at key, missing
1152 // to the runtime if the unmapped arguments array is not a fixed array or if
1153 // key >= unmapped_arguments_array.length.
1154 //
1155 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
1156 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
1157 // index into the context array given at elements[0]. Return the value at
1158 // context[t].
1159
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160 bool is_load = value == NULL;
1161
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001162 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
1163 IfBuilder positive_smi(this);
1164 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
1165 Token::LT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001166 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001167 positive_smi.End();
1168
1169 HValue* constant_two = Add<HConstant>(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001170 HValue* elements = AddLoadElements(receiver, nullptr);
1171 HValue* elements_length = Add<HLoadNamedField>(
1172 elements, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
1174 IfBuilder in_range(this);
1175 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
1176 in_range.Then();
1177 {
1178 HValue* index = AddUncasted<HAdd>(key, constant_two);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001179 HInstruction* mapped_index =
1180 Add<HLoadKeyed>(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS,
1181 ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001182
1183 IfBuilder is_valid(this);
1184 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
1185 graph()->GetConstantHole());
1186 is_valid.Then();
1187 {
1188 // TODO(mvstanton): I'd like to assert from this point, that if the
1189 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
1190 // smi check is being emitted.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001191 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001192 nullptr, nullptr, FAST_ELEMENTS);
1193 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
1194 if (is_load) {
1195 HValue* result =
1196 Add<HLoadKeyed>(the_context, mapped_index, nullptr, nullptr,
1197 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1198 environment()->Push(result);
1199 } else {
1200 DCHECK(value != NULL);
1201 Add<HStoreKeyed>(the_context, mapped_index, value, nullptr,
1202 FAST_ELEMENTS);
1203 environment()->Push(value);
1204 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 }
1206 is_valid.Else();
1207 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001208 HValue* result = UnmappedCase(elements, key, value);
1209 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210 }
1211 is_valid.End();
1212 }
1213 in_range.Else();
1214 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001215 HValue* result = UnmappedCase(elements, key, value);
1216 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 }
1218 in_range.End();
1219
1220 return environment()->Pop();
1221}
1222
1223
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001224template <>
1225HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
1226 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1227 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1228
1229 return EmitKeyedSloppyArguments(receiver, key, NULL);
1230}
1231
1232
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
1234 return DoGenerateCode(this);
1235}
1236
1237
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001238template <>
1239HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
1240 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
1241 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
1242 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
1243
1244 return EmitKeyedSloppyArguments(receiver, key, value);
1245}
1246
1247
1248Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
1249 return DoGenerateCode(this);
1250}
1251
1252
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001253void CodeStubGraphBuilderBase::BuildStoreNamedField(
1254 HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001255 Representation representation, bool transition_to_field) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001256 DCHECK(!index.is_double() || representation.IsDouble());
1257 int offset = index.offset();
1258 HObjectAccess access =
1259 index.is_inobject()
1260 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
1261 : HObjectAccess::ForBackingStoreOffset(offset, representation);
1262
1263 if (representation.IsDouble()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001264 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
1265 HObjectAccess heap_number_access =
1266 access.WithRepresentation(Representation::Tagged());
1267 if (transition_to_field) {
1268 // The store requires a mutable HeapNumber to be allocated.
1269 NoObservableSideEffectsScope no_side_effects(this);
1270 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
1271
1272 // TODO(hpayer): Allocation site pretenuring support.
1273 HInstruction* heap_number =
1274 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
1275 MUTABLE_HEAP_NUMBER_TYPE);
1276 AddStoreMapConstant(heap_number,
1277 isolate()->factory()->mutable_heap_number_map());
1278 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
1279 value);
1280 // Store the new mutable heap number into the object.
1281 access = heap_number_access;
1282 value = heap_number;
1283 } else {
1284 // Load the heap number.
1285 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
1286 // Store the double value into it.
1287 access = HObjectAccess::ForHeapNumberValue();
1288 }
1289 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001290 } else if (representation.IsHeapObject()) {
1291 BuildCheckHeapObject(value);
1292 }
1293
1294 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1295}
1296
1297
1298template <>
1299HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1300 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001301 casted_stub()->representation(), false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001302 return GetParameter(2);
1303}
1304
1305
1306Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1307
1308
1309template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001310HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001311 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001312
1313 switch (casted_stub()->store_mode()) {
1314 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1315 HValue* properties = Add<HLoadNamedField>(
1316 object, nullptr, HObjectAccess::ForPropertiesPointer());
1317 HValue* length = AddLoadFixedArrayLength(properties);
1318 HValue* delta =
1319 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1320 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1321
1322 // Grow properties array.
1323 ElementsKind kind = FAST_ELEMENTS;
1324 Add<HBoundsCheck>(new_capacity,
1325 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1326 FixedArray::kHeaderSize) >>
1327 ElementsKindToShiftSize(kind)));
1328
1329 // Reuse this code for properties backing store allocation.
1330 HValue* new_properties =
1331 BuildAllocateAndInitializeArray(kind, new_capacity);
1332
1333 BuildCopyProperties(properties, new_properties, length, new_capacity);
1334
1335 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1336 new_properties);
1337 }
1338 // Fall through.
1339 case StoreTransitionStub::StoreMapAndValue:
1340 // Store the new value into the "extended" object.
1341 BuildStoreNamedField(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001342 object, GetParameter(StoreTransitionHelper::ValueIndex()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001343 casted_stub()->index(), casted_stub()->representation(), true);
1344 // Fall through.
1345
1346 case StoreTransitionStub::StoreMapOnly:
1347 // And finally update the map.
1348 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001349 GetParameter(StoreTransitionHelper::MapIndex()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001350 break;
1351 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001352 return GetParameter(StoreTransitionHelper::ValueIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001353}
1354
1355
1356Handle<Code> StoreTransitionStub::GenerateCode() {
1357 return DoGenerateCode(this);
1358}
1359
1360
1361template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1363 BuildUncheckedMonomorphicElementAccess(
1364 GetParameter(StoreDescriptor::kReceiverIndex),
1365 GetParameter(StoreDescriptor::kNameIndex),
1366 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1367 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1368 casted_stub()->store_mode());
1369
1370 return GetParameter(2);
1371}
1372
1373
1374Handle<Code> StoreFastElementStub::GenerateCode() {
1375 return DoGenerateCode(this);
1376}
1377
1378
1379template <>
1380HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1381 info()->MarkAsSavesCallerDoubles();
1382
1383 BuildTransitionElementsKind(GetParameter(0),
1384 GetParameter(1),
1385 casted_stub()->from_kind(),
1386 casted_stub()->to_kind(),
1387 casted_stub()->is_js_array());
1388
1389 return GetParameter(0);
1390}
1391
1392
1393Handle<Code> TransitionElementsKindStub::GenerateCode() {
1394 return DoGenerateCode(this);
1395}
1396
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001397
1398template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001399HValue* CodeStubGraphBuilder<AllocateInNewSpaceStub>::BuildCodeStub() {
1400 HValue* result = Add<HAllocate>(GetParameter(0), HType::Tagged(), NOT_TENURED,
1401 JS_OBJECT_TYPE);
1402 return result;
1403}
1404
1405
1406Handle<Code> AllocateInNewSpaceStub::GenerateCode() {
1407 return DoGenerateCode(this);
1408}
1409
1410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001411HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1412 ElementsKind kind,
1413 AllocationSiteOverrideMode override_mode,
1414 ArgumentClass argument_class) {
1415 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1416 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1417 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1418 override_mode);
1419 HValue* result = NULL;
1420 switch (argument_class) {
1421 case NONE:
1422 // This stub is very performance sensitive, the generated code must be
1423 // tuned so that it doesn't build and eager frame.
1424 info()->MarkMustNotHaveEagerFrame();
1425 result = array_builder.AllocateEmptyArray();
1426 break;
1427 case SINGLE:
1428 result = BuildArraySingleArgumentConstructor(&array_builder);
1429 break;
1430 case MULTIPLE:
1431 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1432 break;
1433 }
1434
1435 return result;
1436}
1437
1438
1439HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1440 ElementsKind kind, ArgumentClass argument_class) {
1441 HValue* constructor = GetParameter(
1442 InternalArrayConstructorStubBase::kConstructor);
1443 JSArrayBuilder array_builder(this, kind, constructor);
1444
1445 HValue* result = NULL;
1446 switch (argument_class) {
1447 case NONE:
1448 // This stub is very performance sensitive, the generated code must be
1449 // tuned so that it doesn't build and eager frame.
1450 info()->MarkMustNotHaveEagerFrame();
1451 result = array_builder.AllocateEmptyArray();
1452 break;
1453 case SINGLE:
1454 result = BuildArraySingleArgumentConstructor(&array_builder);
1455 break;
1456 case MULTIPLE:
1457 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1458 break;
1459 }
1460 return result;
1461}
1462
1463
1464HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1465 JSArrayBuilder* array_builder) {
1466 // Smi check and range check on the input arg.
1467 HValue* constant_one = graph()->GetConstant1();
1468 HValue* constant_zero = graph()->GetConstant0();
1469
1470 HInstruction* elements = Add<HArgumentsElements>(false);
1471 HInstruction* argument = Add<HAccessArgumentsAt>(
1472 elements, constant_one, constant_zero);
1473
1474 return BuildAllocateArrayFromLength(array_builder, argument);
1475}
1476
1477
1478HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1479 JSArrayBuilder* array_builder, ElementsKind kind) {
1480 // Insert a bounds check because the number of arguments might exceed
1481 // the kInitialMaxFastElementArray limit. This cannot happen for code
1482 // that was parsed, but calling via Array.apply(thisArg, [...]) might
1483 // trigger it.
1484 HValue* length = GetArgumentsLength();
1485 HConstant* max_alloc_length =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001486 Add<HConstant>(JSArray::kInitialMaxFastElementArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001487 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1488
1489 // We need to fill with the hole if it's a smi array in the multi-argument
1490 // case because we might have to bail out while copying arguments into
1491 // the array because they aren't compatible with a smi array.
1492 // If it's a double array, no problem, and if it's fast then no
1493 // problem either because doubles are boxed.
1494 //
1495 // TODO(mvstanton): consider an instruction to memset fill the array
1496 // with zero in this case instead.
1497 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1498 ? JSArrayBuilder::FILL_WITH_HOLE
1499 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1500 HValue* new_object = array_builder->AllocateArray(checked_length,
1501 max_alloc_length,
1502 checked_length,
1503 fill_mode);
1504 HValue* elements = array_builder->GetElementsLocation();
1505 DCHECK(elements != NULL);
1506
1507 // Now populate the elements correctly.
1508 LoopBuilder builder(this,
1509 context(),
1510 LoopBuilder::kPostIncrement);
1511 HValue* start = graph()->GetConstant0();
1512 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1513 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1514 HInstruction* argument = Add<HAccessArgumentsAt>(
1515 argument_elements, checked_length, key);
1516
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001517 Add<HStoreKeyed>(elements, key, argument, nullptr, kind);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001518 builder.EndBody();
1519 return new_object;
1520}
1521
1522
1523template <>
1524HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1525 ElementsKind kind = casted_stub()->elements_kind();
1526 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1527 return BuildArrayConstructor(kind, override_mode, NONE);
1528}
1529
1530
1531Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1532 return DoGenerateCode(this);
1533}
1534
1535
1536template <>
1537HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1538 BuildCodeStub() {
1539 ElementsKind kind = casted_stub()->elements_kind();
1540 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1541 return BuildArrayConstructor(kind, override_mode, SINGLE);
1542}
1543
1544
1545Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1546 return DoGenerateCode(this);
1547}
1548
1549
1550template <>
1551HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1552 ElementsKind kind = casted_stub()->elements_kind();
1553 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1554 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1555}
1556
1557
1558Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1559 return DoGenerateCode(this);
1560}
1561
1562
1563template <>
1564HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1565 BuildCodeStub() {
1566 ElementsKind kind = casted_stub()->elements_kind();
1567 return BuildInternalArrayConstructor(kind, NONE);
1568}
1569
1570
1571Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1572 return DoGenerateCode(this);
1573}
1574
1575
1576template <>
1577HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1578 BuildCodeStub() {
1579 ElementsKind kind = casted_stub()->elements_kind();
1580 return BuildInternalArrayConstructor(kind, SINGLE);
1581}
1582
1583
1584Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1585 return DoGenerateCode(this);
1586}
1587
1588
1589template <>
1590HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1591 BuildCodeStub() {
1592 ElementsKind kind = casted_stub()->elements_kind();
1593 return BuildInternalArrayConstructor(kind, MULTIPLE);
1594}
1595
1596
1597Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1598 return DoGenerateCode(this);
1599}
1600
1601
1602template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001603HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1604 BinaryOpICState state = casted_stub()->state();
1605
1606 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1607 HValue* right = GetParameter(BinaryOpICStub::kRight);
1608
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609 Type* left_type = state.GetLeftType();
1610 Type* right_type = state.GetRightType();
1611 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612
1613 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1614 (state.HasSideEffects() || !result_type->Is(Type::None())));
1615
1616 HValue* result = NULL;
1617 HAllocationMode allocation_mode(NOT_TENURED);
1618 if (state.op() == Token::ADD &&
1619 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1620 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1621 // For the generic add stub a fast case for string addition is performance
1622 // critical.
1623 if (left_type->Maybe(Type::String())) {
1624 IfBuilder if_leftisstring(this);
1625 if_leftisstring.If<HIsStringAndBranch>(left);
1626 if_leftisstring.Then();
1627 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001628 Push(BuildBinaryOperation(state.op(), left, right, Type::String(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 right_type, result_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001630 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001631 }
1632 if_leftisstring.Else();
1633 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001634 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1635 right_type, result_type,
1636 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001637 }
1638 if_leftisstring.End();
1639 result = Pop();
1640 } else {
1641 IfBuilder if_rightisstring(this);
1642 if_rightisstring.If<HIsStringAndBranch>(right);
1643 if_rightisstring.Then();
1644 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001645 Push(BuildBinaryOperation(state.op(), left, right, left_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001646 Type::String(), result_type,
1647 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001648 }
1649 if_rightisstring.Else();
1650 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001651 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1652 right_type, result_type,
1653 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001654 }
1655 if_rightisstring.End();
1656 result = Pop();
1657 }
1658 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001659 result = BuildBinaryOperation(state.op(), left, right, left_type,
1660 right_type, result_type,
1661 state.fixed_right_arg(), allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001662 }
1663
1664 // If we encounter a generic argument, the number conversion is
1665 // observable, thus we cannot afford to bail out after the fact.
1666 if (!state.HasSideEffects()) {
1667 result = EnforceNumberType(result, result_type);
1668 }
1669
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670 return result;
1671}
1672
1673
1674Handle<Code> BinaryOpICStub::GenerateCode() {
1675 return DoGenerateCode(this);
1676}
1677
1678
1679template <>
1680HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1681 BinaryOpICState state = casted_stub()->state();
1682
1683 HValue* allocation_site = GetParameter(
1684 BinaryOpWithAllocationSiteStub::kAllocationSite);
1685 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1686 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1687
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688 Type* left_type = state.GetLeftType();
1689 Type* right_type = state.GetRightType();
1690 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 HAllocationMode allocation_mode(allocation_site);
1692
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1694 result_type, state.fixed_right_arg(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001695 allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001696}
1697
1698
1699Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1700 return DoGenerateCode(this);
1701}
1702
1703
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001704HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) {
1705 if (!convert) return BuildCheckString(input);
1706 IfBuilder if_inputissmi(this);
1707 HValue* inputissmi = if_inputissmi.If<HIsSmiAndBranch>(input);
1708 if_inputissmi.Then();
1709 {
1710 // Convert the input smi to a string.
1711 Push(BuildNumberToString(input, Type::SignedSmall()));
1712 }
1713 if_inputissmi.Else();
1714 {
1715 HValue* input_map =
1716 Add<HLoadNamedField>(input, inputissmi, HObjectAccess::ForMap());
1717 HValue* input_instance_type = Add<HLoadNamedField>(
1718 input_map, inputissmi, HObjectAccess::ForMapInstanceType());
1719 IfBuilder if_inputisstring(this);
1720 if_inputisstring.If<HCompareNumericAndBranch>(
1721 input_instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
1722 if_inputisstring.Then();
1723 {
1724 // The input is already a string.
1725 Push(input);
1726 }
1727 if_inputisstring.Else();
1728 {
1729 // Convert to primitive first (if necessary), see
1730 // ES6 section 12.7.3 The Addition operator.
1731 IfBuilder if_inputisprimitive(this);
1732 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1733 if_inputisprimitive.If<HCompareNumericAndBranch>(
1734 input_instance_type, Add<HConstant>(LAST_PRIMITIVE_TYPE), Token::LTE);
1735 if_inputisprimitive.Then();
1736 {
1737 // The input is already a primitive.
1738 Push(input);
1739 }
1740 if_inputisprimitive.Else();
1741 {
1742 // Convert the input to a primitive.
1743 Push(BuildToPrimitive(input, input_map));
1744 }
1745 if_inputisprimitive.End();
1746 // Convert the primitive to a string value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001747 ToStringStub stub(isolate());
1748 HValue* values[] = {context(), Pop()};
1749 Push(AddUncasted<HCallWithDescriptor>(
Ben Murdochda12d292016-06-02 14:46:10 +01001750 Add<HConstant>(stub.GetCode()), 0, stub.GetCallInterfaceDescriptor(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 Vector<HValue*>(values, arraysize(values))));
1752 }
1753 if_inputisstring.End();
1754 }
1755 if_inputissmi.End();
1756 return Pop();
1757}
1758
1759
1760HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input,
1761 HValue* input_map) {
1762 // Get the native context of the caller.
1763 HValue* native_context = BuildGetNativeContext();
1764
1765 // Determine the initial map of the %ObjectPrototype%.
1766 HValue* object_function_prototype_map =
1767 Add<HLoadNamedField>(native_context, nullptr,
1768 HObjectAccess::ForContextSlot(
1769 Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX));
1770
1771 // Determine the initial map of the %StringPrototype%.
1772 HValue* string_function_prototype_map =
1773 Add<HLoadNamedField>(native_context, nullptr,
1774 HObjectAccess::ForContextSlot(
1775 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
1776
1777 // Determine the initial map of the String function.
1778 HValue* string_function = Add<HLoadNamedField>(
1779 native_context, nullptr,
1780 HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX));
1781 HValue* string_function_initial_map = Add<HLoadNamedField>(
1782 string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
1783
1784 // Determine the map of the [[Prototype]] of {input}.
1785 HValue* input_prototype =
1786 Add<HLoadNamedField>(input_map, nullptr, HObjectAccess::ForPrototype());
1787 HValue* input_prototype_map =
1788 Add<HLoadNamedField>(input_prototype, nullptr, HObjectAccess::ForMap());
1789
1790 // For string wrappers (JSValue instances with [[StringData]] internal
1791 // fields), we can shortcirciut the ToPrimitive if
1792 //
1793 // (a) the {input} map matches the initial map of the String function,
1794 // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e.
1795 // no one monkey-patched toString, @@toPrimitive or valueOf), and
1796 // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the
1797 // %StringPrototype%) is also unmodified, that is no one sneaked a
1798 // @@toPrimitive into the %ObjectPrototype%.
1799 //
1800 // If all these assumptions hold, we can just take the [[StringData]] value
1801 // and return it.
1802 // TODO(bmeurer): This just repairs a regression introduced by removing the
1803 // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which
1804 // was intendend to something similar to this, although less efficient and
1805 // wrong in the presence of @@toPrimitive. Long-term we might want to move
1806 // into the direction of having a ToPrimitiveStub that can do common cases
1807 // while staying in JavaScript land (i.e. not going to C++).
1808 IfBuilder if_inputisstringwrapper(this);
1809 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1810 input_map, string_function_initial_map);
1811 if_inputisstringwrapper.And();
1812 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1813 input_prototype_map, string_function_prototype_map);
1814 if_inputisstringwrapper.And();
1815 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1816 Add<HLoadNamedField>(Add<HLoadNamedField>(input_prototype_map, nullptr,
1817 HObjectAccess::ForPrototype()),
1818 nullptr, HObjectAccess::ForMap()),
1819 object_function_prototype_map);
1820 if_inputisstringwrapper.Then();
1821 {
1822 Push(BuildLoadNamedField(
1823 input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset)));
1824 }
1825 if_inputisstringwrapper.Else();
1826 {
1827 // TODO(bmeurer): Add support for fast ToPrimitive conversion using
1828 // a dedicated ToPrimitiveStub.
1829 Add<HPushArguments>(input);
1830 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kToPrimitive), 1));
1831 }
1832 if_inputisstringwrapper.End();
1833 return Pop();
1834}
1835
1836
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001837template <>
1838HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1839 StringAddStub* stub = casted_stub();
1840 StringAddFlags flags = stub->flags();
1841 PretenureFlag pretenure_flag = stub->pretenure_flag();
1842
1843 HValue* left = GetParameter(StringAddStub::kLeft);
1844 HValue* right = GetParameter(StringAddStub::kRight);
1845
1846 // Make sure that both arguments are strings if not known in advance.
1847 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001848 left =
1849 BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001850 }
1851 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001852 right = BuildToString(right,
1853 (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001854 }
1855
1856 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1857}
1858
1859
1860Handle<Code> StringAddStub::GenerateCode() {
1861 return DoGenerateCode(this);
1862}
1863
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001864template <>
Ben Murdochda12d292016-06-02 14:46:10 +01001865HValue* CodeStubGraphBuilder<ToBooleanICStub>::BuildCodeInitializedStub() {
1866 ToBooleanICStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001867 IfBuilder if_true(this);
1868 if_true.If<HBranch>(GetParameter(0), stub->types());
1869 if_true.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001870 if_true.Return(graph()->GetConstantTrue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001871 if_true.Else();
1872 if_true.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001873 return graph()->GetConstantFalse();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001874}
1875
Ben Murdochda12d292016-06-02 14:46:10 +01001876Handle<Code> ToBooleanICStub::GenerateCode() { return DoGenerateCode(this); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001877
1878template <>
1879HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1880 StoreGlobalStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001881 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001882 if (stub->check_global()) {
1883 // Check that the map of the global has not changed: use a placeholder map
1884 // that will be replaced later with the global object's map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001885 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1886 HValue* proxy_map =
1887 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1888 HValue* global =
1889 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1890 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1891 StoreGlobalStub::global_map_placeholder(isolate())));
1892 HValue* expected_map = Add<HLoadNamedField>(
1893 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1894 HValue* map =
1895 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1896 IfBuilder map_check(this);
1897 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1898 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1899 map_check.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001900 }
1901
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001902 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1903 StoreGlobalStub::property_cell_placeholder(isolate())));
1904 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1905 HObjectAccess::ForWeakCellValue());
1906 Add<HCheckHeapObject>(cell);
1907 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1908 // Load the payload of the global parameter cell. A hole indicates that the
1909 // cell has been invalidated and that the store must be handled by the
1910 // runtime.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001911 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001913 auto cell_type = stub->cell_type();
1914 if (cell_type == PropertyCellType::kConstant ||
1915 cell_type == PropertyCellType::kUndefined) {
1916 // This is always valid for all states a cell can be in.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917 IfBuilder builder(this);
1918 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1919 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001920 builder.ElseDeopt(
1921 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001922 builder.End();
1923 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 IfBuilder builder(this);
1925 HValue* hole_value = graph()->GetConstantHole();
1926 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1927 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001928 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001929 builder.Else();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001930 // When dealing with constant types, the type may be allowed to change, as
1931 // long as optimized code remains valid.
1932 if (cell_type == PropertyCellType::kConstantType) {
1933 switch (stub->constant_type()) {
1934 case PropertyCellConstantType::kSmi:
1935 access = access.WithRepresentation(Representation::Smi());
1936 break;
1937 case PropertyCellConstantType::kStableMap: {
1938 // It is sufficient here to check that the value and cell contents
1939 // have identical maps, no matter if they are stable or not or if they
1940 // are the maps that were originally in the cell or not. If optimized
1941 // code will deopt when a cell has a unstable map and if it has a
1942 // dependency on a stable map, it will deopt if the map destabilizes.
1943 Add<HCheckHeapObject>(value);
1944 Add<HCheckHeapObject>(cell_contents);
1945 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1946 HObjectAccess::ForMap());
1947 HValue* map =
1948 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1949 IfBuilder map_check(this);
1950 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1951 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1952 map_check.End();
1953 access = access.WithRepresentation(Representation::HeapObject());
1954 break;
1955 }
1956 }
1957 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001958 Add<HStoreNamedField>(cell, access, value);
1959 builder.End();
1960 }
1961
1962 return value;
1963}
1964
1965
1966Handle<Code> StoreGlobalStub::GenerateCode() {
1967 return DoGenerateCode(this);
1968}
1969
1970
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001971template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001972HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001973 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
1974 HValue* key = GetParameter(StoreTransitionHelper::NameIndex());
1975 HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
1976 HValue* map = GetParameter(StoreTransitionHelper::MapIndex());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977
1978 if (FLAG_trace_elements_transitions) {
1979 // Tracing elements transitions is the job of the runtime.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001980 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1981 Deoptimizer::EAGER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001982 } else {
1983 info()->MarkAsSavesCallerDoubles();
1984
1985 BuildTransitionElementsKind(object, map,
1986 casted_stub()->from_kind(),
1987 casted_stub()->to_kind(),
1988 casted_stub()->is_jsarray());
1989
1990 BuildUncheckedMonomorphicElementAccess(object, key, value,
1991 casted_stub()->is_jsarray(),
1992 casted_stub()->to_kind(),
1993 STORE, ALLOW_RETURN_HOLE,
1994 casted_stub()->store_mode());
1995 }
1996
1997 return value;
1998}
1999
2000
2001Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
2002 return DoGenerateCode(this);
2003}
2004
2005
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002006template <>
2007HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() {
Ben Murdochda12d292016-06-02 14:46:10 +01002008 HValue* receiver = GetParameter(TypeConversionDescriptor::kArgumentIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002009 return BuildToObject(receiver);
2010}
2011
2012
2013Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); }
2014
2015
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002016void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
2017 HValue* js_function,
2018 HValue* native_context,
2019 IfBuilder* builder,
2020 HValue* optimized_map,
2021 HValue* map_index) {
2022 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
2023 HValue* context_slot = LoadFromOptimizedCodeMap(
2024 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002025 context_slot = Add<HLoadNamedField>(context_slot, nullptr,
2026 HObjectAccess::ForWeakCellValue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002027 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
2028 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002029 HValue* code_object = LoadFromOptimizedCodeMap(
2030 optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset);
2031 code_object = Add<HLoadNamedField>(code_object, nullptr,
2032 HObjectAccess::ForWeakCellValue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002033 builder->If<HCompareObjectEqAndBranch>(native_context,
2034 context_slot);
2035 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002036 builder->And();
2037 builder->IfNot<HCompareObjectEqAndBranch>(code_object,
2038 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002039 builder->Then();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002040 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
2041 map_index, SharedFunctionInfo::kLiteralsOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002042 literals = Add<HLoadNamedField>(literals, nullptr,
2043 HObjectAccess::ForWeakCellValue());
2044 IfBuilder maybe_deopt(this);
2045 maybe_deopt.If<HCompareObjectEqAndBranch>(literals, graph()->GetConstant0());
2046 maybe_deopt.ThenDeopt(Deoptimizer::kLiteralsWereDisposed);
2047 maybe_deopt.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002049 BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
2050
2051 // The builder continues in the "then" after this function.
2052}
2053
2054
2055void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function,
2056 HValue* native_context,
2057 HValue* code_object,
2058 HValue* literals) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002059 Counters* counters = isolate()->counters();
2060 AddIncrementCounter(counters->fast_new_closure_install_optimized());
2061
2062 // TODO(fschneider): Idea: store proper code pointers in the optimized code
2063 // map and either unmangle them on marking or do nothing as the whole map is
2064 // discarded on major GC anyway.
2065 Add<HStoreCodeEntry>(js_function, code_object);
2066 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
2067 literals);
2068
2069 // Now link a function into a list of optimized functions.
2070 HValue* optimized_functions_list = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002071 native_context, nullptr,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002072 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
2073 Add<HStoreNamedField>(js_function,
2074 HObjectAccess::ForNextFunctionLinkPointer(),
2075 optimized_functions_list);
2076
2077 // This store is the only one that should have a write barrier.
2078 Add<HStoreNamedField>(native_context,
2079 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
2080 js_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002081}
2082
2083
2084void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
2085 HValue* shared_info) {
2086 Add<HStoreNamedField>(js_function,
2087 HObjectAccess::ForNextFunctionLinkPointer(),
2088 graph()->GetConstantUndefined());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002089 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
2090 HObjectAccess::ForCodeOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002091 Add<HStoreCodeEntry>(js_function, code_object);
2092}
2093
2094
2095HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
2096 HValue* optimized_map,
2097 HValue* iterator,
2098 int field_offset) {
2099 // By making sure to express these loads in the form [<hvalue> + constant]
2100 // the keyed load can be hoisted.
2101 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
2102 HValue* field_slot = iterator;
2103 if (field_offset > 0) {
2104 HValue* field_offset_value = Add<HConstant>(field_offset);
2105 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
2106 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002107 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
2108 nullptr, nullptr, FAST_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002109 return field_entry;
2110}
2111
2112
2113void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
2114 HValue* js_function,
2115 HValue* shared_info,
2116 HValue* native_context) {
2117 Counters* counters = isolate()->counters();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 Factory* factory = isolate()->factory();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002119 IfBuilder is_optimized(this);
2120 HInstruction* optimized_map = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002121 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002122 HValue* null_constant = Add<HConstant>(0);
2123 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
2124 is_optimized.Then();
2125 {
2126 BuildInstallCode(js_function, shared_info);
2127 }
2128 is_optimized.Else();
2129 {
2130 AddIncrementCounter(counters->fast_new_closure_try_optimized());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002131 // The {optimized_map} points to fixed array of 4-element entries:
2132 // (native context, optimized code, literals, ast-id).
2133 // Iterate through the {optimized_map} backwards. After the loop, if no
2134 // matching optimized code was found, install unoptimized code.
2135 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
2136 // i >= SharedFunctionInfo::kEntriesStart;
2137 // i -= SharedFunctionInfo::kEntryLength) { ... }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002138 HValue* first_entry_index =
2139 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 HValue* shared_function_entry_length =
2141 Add<HConstant>(SharedFunctionInfo::kEntryLength);
2142 LoopBuilder loop_builder(this, context(), LoopBuilder::kPostDecrement,
2143 shared_function_entry_length);
2144 HValue* array_length = Add<HLoadNamedField>(
2145 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
2146 HValue* start_pos =
2147 AddUncasted<HSub>(array_length, shared_function_entry_length);
2148 HValue* slot_iterator =
2149 loop_builder.BeginBody(start_pos, first_entry_index, Token::GTE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002150 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002151 IfBuilder done_check(this);
2152 BuildCheckAndInstallOptimizedCode(js_function, native_context,
2153 &done_check, optimized_map,
2154 slot_iterator);
2155 // Fall out of the loop
2156 loop_builder.Break();
2157 }
2158 loop_builder.EndBody();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002159
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 // If {slot_iterator} is less than the first entry index, then we failed to
2161 // find a context-dependent code and try context-independent code next.
2162 IfBuilder no_optimized_code_check(this);
2163 no_optimized_code_check.If<HCompareNumericAndBranch>(
2164 slot_iterator, first_entry_index, Token::LT);
2165 no_optimized_code_check.Then();
2166 {
2167 IfBuilder shared_code_check(this);
2168 HValue* shared_code =
2169 Add<HLoadNamedField>(optimized_map, nullptr,
2170 HObjectAccess::ForOptimizedCodeMapSharedCode());
2171 shared_code = Add<HLoadNamedField>(shared_code, nullptr,
2172 HObjectAccess::ForWeakCellValue());
2173 shared_code_check.IfNot<HCompareObjectEqAndBranch>(
2174 shared_code, graph()->GetConstant0());
2175 shared_code_check.Then();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002176 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002177 // Store the context-independent optimized code.
2178 HValue* literals = Add<HConstant>(factory->empty_fixed_array());
2179 BuildInstallOptimizedCode(js_function, native_context, shared_code,
2180 literals);
2181 }
2182 shared_code_check.Else();
2183 {
2184 // Store the unoptimized code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002185 BuildInstallCode(js_function, shared_info);
2186 }
2187 }
2188 }
2189}
2190
2191
2192template<>
2193HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
2194 Counters* counters = isolate()->counters();
2195 Factory* factory = isolate()->factory();
2196 HInstruction* empty_fixed_array =
2197 Add<HConstant>(factory->empty_fixed_array());
2198 HValue* shared_info = GetParameter(0);
2199
2200 AddIncrementCounter(counters->fast_new_closure_total());
2201
2202 // Create a new closure from the given function info in new space
2203 HValue* size = Add<HConstant>(JSFunction::kSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002204 HInstruction* js_function =
2205 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002207 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208 casted_stub()->kind());
2209
2210 // Compute the function map in the current native context and set that
2211 // as the map of the allocated object.
2212 HInstruction* native_context = BuildGetNativeContext();
2213 HInstruction* map_slot_value = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002214 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002215 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
2216
2217 // Initialize the rest of the function.
2218 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
2219 empty_fixed_array);
2220 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
2221 empty_fixed_array);
2222 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
2223 empty_fixed_array);
2224 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
2225 graph()->GetConstantHole());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002226 Add<HStoreNamedField>(
2227 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002228 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
2229 context());
2230
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002231 // Initialize the code pointer in the function to be the one found in the
2232 // shared function info object. But first check if there is an optimized
2233 // version for our context.
2234 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002235
2236 return js_function;
2237}
2238
2239
2240Handle<Code> FastNewClosureStub::GenerateCode() {
2241 return DoGenerateCode(this);
2242}
2243
2244
2245template<>
2246HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
2247 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
2248
2249 // Get the function.
2250 HParameter* function = GetParameter(FastNewContextStub::kFunction);
2251
2252 // Allocate the context in new space.
2253 HAllocate* function_context = Add<HAllocate>(
2254 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
2255 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
2256
2257 // Set up the object header.
2258 AddStoreMapConstant(function_context,
2259 isolate()->factory()->function_context_map());
2260 Add<HStoreNamedField>(function_context,
2261 HObjectAccess::ForFixedArrayLength(),
2262 Add<HConstant>(length));
2263
2264 // Set up the fixed slots.
2265 Add<HStoreNamedField>(function_context,
2266 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
2267 function);
2268 Add<HStoreNamedField>(function_context,
2269 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
2270 context());
2271 Add<HStoreNamedField>(function_context,
2272 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002273 graph()->GetConstantHole());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002274
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002275 // Copy the native context from the previous context.
2276 HValue* native_context = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002277 context(), nullptr,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002278 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2279 Add<HStoreNamedField>(function_context, HObjectAccess::ForContextSlot(
2280 Context::NATIVE_CONTEXT_INDEX),
2281 native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002282
2283 // Initialize the rest of the slots to undefined.
2284 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
2285 Add<HStoreNamedField>(function_context,
2286 HObjectAccess::ForContextSlot(i),
2287 graph()->GetConstantUndefined());
2288 }
2289
2290 return function_context;
2291}
2292
2293
2294Handle<Code> FastNewContextStub::GenerateCode() {
2295 return DoGenerateCode(this);
2296}
2297
2298
2299template <>
2300HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2301 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2302 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2303
2304 Add<HCheckSmi>(key);
2305
2306 HValue* elements = AddLoadElements(receiver);
2307
2308 HValue* hash = BuildElementIndexHash(key);
2309
Ben Murdoch097c5b22016-05-18 11:27:45 +01002310 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002311}
2312
2313
2314Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2315 return DoGenerateCode(this);
2316}
2317
2318
2319template<>
2320HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2321 // Determine the parameters.
2322 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2323 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2324 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2325
2326 info()->MarkMustNotHaveEagerFrame();
2327
2328 return BuildRegExpConstructResult(length, index, input);
2329}
2330
2331
2332Handle<Code> RegExpConstructResultStub::GenerateCode() {
2333 return DoGenerateCode(this);
2334}
2335
2336
2337template <>
2338class CodeStubGraphBuilder<KeyedLoadGenericStub>
2339 : public CodeStubGraphBuilderBase {
2340 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002341 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
2342 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002343
2344 protected:
2345 virtual HValue* BuildCodeStub();
2346
2347 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2348 HValue* bit_field2,
2349 ElementsKind kind);
2350
2351 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2352 HValue* receiver,
2353 HValue* key,
2354 HValue* instance_type,
2355 HValue* bit_field2,
2356 ElementsKind kind);
2357
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002358 KeyedLoadGenericStub* casted_stub() {
2359 return static_cast<KeyedLoadGenericStub*>(stub());
2360 }
2361};
2362
2363
2364void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2365 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2366 ElementsKind kind) {
2367 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2368 HValue* kind_limit = Add<HConstant>(
2369 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2370
2371 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2372 if_builder->Then();
2373}
2374
2375
2376void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2377 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2378 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2380
2381 IfBuilder js_array_check(this);
2382 js_array_check.If<HCompareNumericAndBranch>(
2383 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2384 js_array_check.Then();
2385 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2386 true, kind,
2387 LOAD, NEVER_RETURN_HOLE,
2388 STANDARD_STORE));
2389 js_array_check.Else();
2390 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2391 false, kind,
2392 LOAD, NEVER_RETURN_HOLE,
2393 STANDARD_STORE));
2394 js_array_check.End();
2395}
2396
2397
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002398HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2399 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2400 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002401 // Split into a smi/integer case and unique string case.
2402 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2403 graph()->CreateBasicBlock());
2404
2405 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2406
2407 IfBuilder index_name_split(this, &index_name_split_continuation);
2408 index_name_split.Then();
2409 {
2410 // Key is an index (number)
2411 key = Pop();
2412
2413 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2414 (1 << Map::kHasIndexedInterceptor);
2415 BuildJSObjectCheck(receiver, bit_field_mask);
2416
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002417 HValue* map =
2418 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002419
2420 HValue* instance_type =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002421 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002422
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002423 HValue* bit_field2 =
2424 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002425
2426 IfBuilder kind_if(this);
2427 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2428 FAST_HOLEY_ELEMENTS);
2429
2430 kind_if.Else();
2431 {
2432 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2433 FAST_HOLEY_DOUBLE_ELEMENTS);
2434 }
2435 kind_if.Else();
2436
2437 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2438 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2439 {
2440 HValue* elements = AddLoadElements(receiver);
2441
2442 HValue* hash = BuildElementIndexHash(key);
2443
Ben Murdoch097c5b22016-05-18 11:27:45 +01002444 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002445 }
2446 kind_if.Else();
2447
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002448 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2449 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2450 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451 BuildElementsKindLimitCheck(&kind_if, bit_field2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002452 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002453 // Non-strict elements are not handled.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002454 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002455 Deoptimizer::EAGER);
2456 Push(graph()->GetConstant0());
2457
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002458 kind_if.ElseDeopt(
2459 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002460
2461 kind_if.End();
2462 }
2463 index_name_split.Else();
2464 {
2465 // Key is a unique string.
2466 key = Pop();
2467
2468 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2469 (1 << Map::kHasNamedInterceptor);
2470 BuildJSObjectCheck(receiver, bit_field_mask);
2471
2472 HIfContinuation continuation;
2473 BuildTestForDictionaryProperties(receiver, &continuation);
2474 IfBuilder if_dict_properties(this, &continuation);
2475 if_dict_properties.Then();
2476 {
2477 // Key is string, properties are dictionary mode
2478 BuildNonGlobalObjectCheck(receiver);
2479
2480 HValue* properties = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002481 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002482
2483 HValue* hash =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002484 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002485
2486 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2487
Ben Murdoch097c5b22016-05-18 11:27:45 +01002488 HValue* value =
2489 BuildUncheckedDictionaryElementLoad(receiver, properties, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002490 Push(value);
2491 }
2492 if_dict_properties.Else();
2493 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002494 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2495 // megamorphic stub cache.
2496 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002497 // Key is string, properties are fast mode
2498 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2499
2500 ExternalReference cache_keys_ref =
2501 ExternalReference::keyed_lookup_cache_keys(isolate());
2502 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2503
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002504 HValue* map =
2505 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002506 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2507 base_index->ClearFlag(HValue::kCanOverflow);
2508
2509 HIfContinuation inline_or_runtime_continuation(
2510 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2511 {
2512 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2513 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2514 ++probe) {
2515 IfBuilder* lookup_if = &lookup_ifs[probe];
2516 lookup_if->Initialize(this);
2517 int probe_base = probe * KeyedLookupCache::kEntryLength;
2518 HValue* map_index = AddUncasted<HAdd>(
2519 base_index,
2520 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2521 map_index->ClearFlag(HValue::kCanOverflow);
2522 HValue* key_index = AddUncasted<HAdd>(
2523 base_index,
2524 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2525 key_index->ClearFlag(HValue::kCanOverflow);
2526 HValue* map_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002527 Add<HLoadKeyed>(cache_keys, map_index, nullptr, nullptr,
2528 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002529 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2530 lookup_if->And();
2531 HValue* key_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002532 Add<HLoadKeyed>(cache_keys, key_index, nullptr, nullptr,
2533 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002534 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2535 lookup_if->Then();
2536 {
2537 ExternalReference cache_field_offsets_ref =
2538 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2539 HValue* cache_field_offsets =
2540 Add<HConstant>(cache_field_offsets_ref);
2541 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2542 index->ClearFlag(HValue::kCanOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002543 HValue* property_index =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002544 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, cache_keys,
2545 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002546 Push(property_index);
2547 }
2548 lookup_if->Else();
2549 }
2550 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2551 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2552 }
2553 }
2554
2555 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2556 inline_or_runtime.Then();
2557 {
2558 // Found a cached index, load property inline.
2559 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2560 }
2561 inline_or_runtime.Else();
2562 {
2563 // KeyedLookupCache miss; call runtime.
2564 Add<HPushArguments>(receiver, key);
2565 Push(Add<HCallRuntime>(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002566 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002567 }
2568 inline_or_runtime.End();
2569 }
2570 if_dict_properties.End();
2571 }
2572 index_name_split.End();
2573
2574 return Pop();
2575}
2576
2577
2578Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2579 return DoGenerateCode(this);
2580}
2581
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002582} // namespace internal
2583} // namespace v8