blob: 461baaa4e6d71447b186938e8bba55a87ec3f7e7 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/code-stubs.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006
7#include "src/bailout-reason.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/crankshaft/hydrogen.h"
9#include "src/crankshaft/lithium.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/field-index.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/ic/ic.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15
16
17static LChunk* OptimizeGraph(HGraph* graph) {
18 DisallowHeapAllocation no_allocation;
19 DisallowHandleAllocation no_handles;
20 DisallowHandleDereference no_deref;
21
22 DCHECK(graph != NULL);
23 BailoutReason bailout_reason = kNoReason;
24 if (!graph->Optimize(&bailout_reason)) {
25 FATAL(GetBailoutReason(bailout_reason));
26 }
27 LChunk* chunk = LChunk::NewChunk(graph);
28 if (chunk == NULL) {
29 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
30 }
31 return chunk;
32}
33
34
35class CodeStubGraphBuilderBase : public HGraphBuilder {
36 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +010037 explicit CodeStubGraphBuilderBase(CompilationInfo* info, CodeStub* code_stub)
38 : HGraphBuilder(info, code_stub->GetCallInterfaceDescriptor()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039 arguments_length_(NULL),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040040 info_(info),
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 code_stub_(code_stub),
42 descriptor_(code_stub),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 context_(NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 int parameter_count = GetParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 parameters_.Reset(new HParameter*[parameter_count]);
46 }
47 virtual bool BuildGraph();
48
49 protected:
50 virtual HValue* BuildCodeStub() = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 int GetParameterCount() const { return descriptor_.GetParameterCount(); }
52 int GetRegisterParameterCount() const {
53 return descriptor_.GetRegisterParameterCount();
54 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 HParameter* GetParameter(int parameter) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000056 DCHECK(parameter < GetParameterCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 return parameters_[parameter];
58 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 Representation GetParameterRepresentation(int parameter) {
60 return RepresentationFromType(descriptor_.GetParameterType(parameter));
61 }
62 bool IsParameterCountRegister(int index) const {
63 return descriptor_.GetRegisterParameter(index)
64 .is(descriptor_.stack_parameter_count());
65 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 HValue* GetArgumentsLength() {
67 // This is initialized in BuildGraph()
68 DCHECK(arguments_length_ != NULL);
69 return arguments_length_;
70 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 CompilationInfo* info() { return info_; }
Ben Murdoch097c5b22016-05-18 11:27:45 +010072 CodeStub* stub() { return code_stub_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 HContext* context() { return context_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 Isolate* isolate() { return info_->isolate(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078 Representation representation,
79 bool transition_to_field);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080
81 enum ArgumentClass {
82 NONE,
83 SINGLE,
84 MULTIPLE
85 };
86
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
88 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
89 HValue* value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000090
91 HValue* BuildArrayConstructor(ElementsKind kind,
92 AllocationSiteOverrideMode override_mode,
93 ArgumentClass argument_class);
94 HValue* BuildInternalArrayConstructor(ElementsKind kind,
95 ArgumentClass argument_class);
96
97 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
98 // function found in the optimized code map at map_index in js_function, if
99 // the function at map_index matches the given native_context. Builder is
100 // left in the "Then()" state after the install.
101 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
102 HValue* native_context,
103 IfBuilder* builder,
104 HValue* optimized_map,
105 HValue* map_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000106 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
107 HValue* code_object, HValue* literals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108 void BuildInstallCode(HValue* js_function, HValue* shared_info);
109
110 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
111 HValue* iterator,
112 int field_offset);
113 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
114 HValue* shared_info,
115 HValue* native_context);
116
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000117 HValue* BuildToString(HValue* input, bool convert);
118 HValue* BuildToPrimitive(HValue* input, HValue* input_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400119
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 private:
121 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
122 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
123 ElementsKind kind);
124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000125 base::SmartArrayPointer<HParameter*> parameters_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 HValue* arguments_length_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000127 CompilationInfo* info_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100128 CodeStub* code_stub_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 CodeStubDescriptor descriptor_;
130 HContext* context_;
131};
132
133
134bool CodeStubGraphBuilderBase::BuildGraph() {
135 // Update the static counter each time a new code stub is generated.
136 isolate()->counters()->code_stubs()->Increment();
137
138 if (FLAG_trace_hydrogen_stubs) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139 const char* name = CodeStub::MajorName(stub()->MajorKey());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 PrintF("-----------------------------------------------------------\n");
141 PrintF("Compiling stub %s using hydrogen\n", name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400142 isolate()->GetHTracer()->TraceCompilation(info());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 }
144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 int param_count = GetParameterCount();
146 int register_param_count = GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 HEnvironment* start_environment = graph()->start_environment();
148 HBasicBlock* next_block = CreateBasicBlock(start_environment);
149 Goto(next_block);
150 next_block->SetJoinId(BailoutId::StubEntry());
151 set_current_block(next_block);
152
153 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
154 HInstruction* stack_parameter_count = NULL;
155 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000156 Representation r = GetParameterRepresentation(i);
157 HParameter* param;
158 if (i >= register_param_count) {
159 param = Add<HParameter>(i - register_param_count,
160 HParameter::STACK_PARAMETER, r);
161 } else {
162 param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
163 start_environment->Bind(i, param);
164 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165 parameters_[i] = param;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000166 if (i < register_param_count && IsParameterCountRegister(i)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000167 param->set_type(HType::Smi());
168 stack_parameter_count = param;
169 arguments_length_ = stack_parameter_count;
170 }
171 }
172
173 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
174 if (!runtime_stack_params) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000175 stack_parameter_count =
176 Add<HConstant>(param_count - register_param_count - 1);
177 // graph()->GetConstantMinus1();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 arguments_length_ = graph()->GetConstant0();
179 }
180
181 context_ = Add<HContext>();
182 start_environment->BindContext(context_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100183 start_environment->Bind(param_count, context_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184
185 Add<HSimulate>(BailoutId::StubEntry());
186
187 NoObservableSideEffectsScope no_effects(this);
188
189 HValue* return_value = BuildCodeStub();
190
191 // We might have extra expressions to pop from the stack in addition to the
192 // arguments above.
193 HInstruction* stack_pop_count = stack_parameter_count;
194 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
195 if (!stack_parameter_count->IsConstant() &&
196 descriptor_.hint_stack_parameter_count() < 0) {
197 HInstruction* constant_one = graph()->GetConstant1();
198 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
199 stack_pop_count->ClearFlag(HValue::kCanOverflow);
200 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
201 // smi.
202 } else {
203 int count = descriptor_.hint_stack_parameter_count();
204 stack_pop_count = Add<HConstant>(count);
205 }
206 }
207
208 if (current_block() != NULL) {
209 HReturn* hreturn_instruction = New<HReturn>(return_value,
210 stack_pop_count);
211 FinishCurrentBlock(hreturn_instruction);
212 }
213 return true;
214}
215
216
217template <class Stub>
218class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
219 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100220 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
221 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222
223 protected:
224 virtual HValue* BuildCodeStub() {
225 if (casted_stub()->IsUninitialized()) {
226 return BuildCodeUninitializedStub();
227 } else {
228 return BuildCodeInitializedStub();
229 }
230 }
231
232 virtual HValue* BuildCodeInitializedStub() {
233 UNIMPLEMENTED();
234 return NULL;
235 }
236
237 virtual HValue* BuildCodeUninitializedStub() {
238 // Force a deopt that falls back to the runtime.
239 HValue* undefined = graph()->GetConstantUndefined();
240 IfBuilder builder(this);
241 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
242 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000243 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 return undefined;
245 }
246
247 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
248};
249
250
251Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
252 ExternalReference miss) {
253 Factory* factory = isolate()->factory();
254
255 // Generate the new code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 MacroAssembler masm(isolate(), NULL, 256, CodeObjectRequired::kYes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000257
258 {
259 // Update the static counter each time a new code stub is generated.
260 isolate()->counters()->code_stubs()->Increment();
261
262 // Generate the code for the stub.
263 masm.set_generating_stub(true);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264 // TODO(yangguo): remove this once we can serialize IC stubs.
265 masm.enable_serializer();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 NoCurrentFrameScope scope(&masm);
267 GenerateLightweightMiss(&masm, miss);
268 }
269
270 // Create the code object.
271 CodeDesc desc;
272 masm.GetCode(&desc);
273
274 // Copy the generated code into a heap object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 Handle<Code> new_object = factory->NewCode(
Ben Murdoch097c5b22016-05-18 11:27:45 +0100276 desc, GetCodeFlags(), masm.CodeObject(), NeedsImmovableCode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000277 return new_object;
278}
279
280
281template <class Stub>
282static Handle<Code> DoGenerateCode(Stub* stub) {
283 Isolate* isolate = stub->isolate();
284 CodeStubDescriptor descriptor(stub);
285
286 // If we are uninitialized we can use a light-weight stub to enter
287 // the runtime that is significantly faster than using the standard
288 // stub-failure deopt mechanism.
289 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
290 DCHECK(!descriptor.stack_parameter_count().is_valid());
291 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
292 }
293 base::ElapsedTimer timer;
294 if (FLAG_profile_hydrogen_code_stub_compilation) {
295 timer.Start();
296 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000297 Zone zone;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100298 CompilationInfo info(CodeStub::MajorName(stub->MajorKey()), isolate, &zone,
299 stub->GetCodeFlags());
300 // Parameter count is number of stack parameters.
301 int parameter_count = descriptor.GetStackParameterCount();
302 if (descriptor.function_mode() == NOT_JS_FUNCTION_STUB_MODE) {
303 parameter_count--;
304 }
305 info.set_parameter_count(parameter_count);
306 CodeStubGraphBuilder<Stub> builder(&info, stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 Handle<Code> code = chunk->Codegen();
309 if (FLAG_profile_hydrogen_code_stub_compilation) {
310 OFStream os(stdout);
311 os << "[Lazy compilation of " << stub << " took "
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400312 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000313 }
314 return code;
315}
316
317
318template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000319HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
320 info()->MarkAsSavesCallerDoubles();
321 HValue* number = GetParameter(NumberToStringStub::kNumber);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100322 return BuildNumberToString(number, Type::Number());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323}
324
325
326Handle<Code> NumberToStringStub::GenerateCode() {
327 return DoGenerateCode(this);
328}
329
330
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000331// Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000332template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000334 Factory* factory = isolate()->factory();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000335 HConstant* number_string = Add<HConstant>(factory->number_string());
336 HValue* object = GetParameter(TypeofStub::kObject);
337
338 IfBuilder is_smi(this);
339 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
340 is_smi.Then();
341 { Push(number_string); }
342 is_smi.Else();
343 {
344 IfBuilder is_number(this);
345 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
346 is_number.Then();
347 { Push(number_string); }
348 is_number.Else();
349 {
350 HValue* map = AddLoadMap(object, smi_check);
351 HValue* instance_type = Add<HLoadNamedField>(
352 map, nullptr, HObjectAccess::ForMapInstanceType());
353 IfBuilder is_string(this);
354 is_string.If<HCompareNumericAndBranch>(
355 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
356 is_string.Then();
357 { Push(Add<HConstant>(factory->string_string())); }
358 is_string.Else();
359 {
360 HConstant* object_string = Add<HConstant>(factory->object_string());
361 IfBuilder is_oddball(this);
362 is_oddball.If<HCompareNumericAndBranch>(
363 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
364 is_oddball.Then();
365 {
366 Push(Add<HLoadNamedField>(object, nullptr,
367 HObjectAccess::ForOddballTypeOf()));
368 }
369 is_oddball.Else();
370 {
371 IfBuilder is_symbol(this);
372 is_symbol.If<HCompareNumericAndBranch>(
373 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
374 is_symbol.Then();
375 { Push(Add<HConstant>(factory->symbol_string())); }
376 is_symbol.Else();
377 {
378 HValue* bit_field = Add<HLoadNamedField>(
379 map, nullptr, HObjectAccess::ForMapBitField());
380 HValue* bit_field_masked = AddUncasted<HBitwise>(
381 Token::BIT_AND, bit_field,
382 Add<HConstant>((1 << Map::kIsCallable) |
383 (1 << Map::kIsUndetectable)));
384 IfBuilder is_function(this);
385 is_function.If<HCompareNumericAndBranch>(
386 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable),
387 Token::EQ);
388 is_function.Then();
389 { Push(Add<HConstant>(factory->function_string())); }
390 is_function.Else();
391 {
392#define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \
393 IfBuilder is_##type(this); \
394 is_##type.If<HCompareObjectEqAndBranch>( \
395 map, Add<HConstant>(factory->type##_map())); \
396 is_##type.Then(); \
397 { Push(Add<HConstant>(factory->type##_string())); } \
398 is_##type.Else(); {
399 SIMD128_TYPES(SIMD128_BUILDER_OPEN)
400#undef SIMD128_BUILDER_OPEN
401 // Is it an undetectable object?
402 IfBuilder is_undetectable(this);
403 is_undetectable.If<HCompareNumericAndBranch>(
404 bit_field_masked, graph()->GetConstant0(), Token::NE);
405 is_undetectable.Then();
406 {
407 // typeof an undetectable object is 'undefined'.
408 Push(Add<HConstant>(factory->undefined_string()));
409 }
410 is_undetectable.Else();
411 {
412 // For any kind of object not handled above, the spec rule for
413 // host objects gives that it is okay to return "object".
414 Push(object_string);
415 }
416#define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) }
417 SIMD128_TYPES(SIMD128_BUILDER_CLOSE)
418#undef SIMD128_BUILDER_CLOSE
419 }
420 is_function.End();
421 }
422 is_symbol.End();
423 }
424 is_oddball.End();
425 }
426 is_string.End();
427 }
428 is_number.End();
429 }
430 is_smi.End();
431
432 return environment()->Pop();
433}
434
435
436Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
437
438
439template <>
440HValue* CodeStubGraphBuilder<FastCloneRegExpStub>::BuildCodeStub() {
441 HValue* closure = GetParameter(0);
442 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443
444 // This stub is very performance sensitive, the generated code must be tuned
445 // so that it doesn't build and eager frame.
446 info()->MarkMustNotHaveEagerFrame();
447
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448 HValue* literals_array = Add<HLoadNamedField>(
449 closure, nullptr, HObjectAccess::ForLiteralsPointer());
450 HInstruction* boilerplate = Add<HLoadKeyed>(
451 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
452 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
453
454 IfBuilder if_notundefined(this);
455 if_notundefined.IfNot<HCompareObjectEqAndBranch>(
456 boilerplate, graph()->GetConstantUndefined());
457 if_notundefined.Then();
458 {
459 int result_size =
460 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
461 HValue* result =
462 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(),
463 NOT_TENURED, JS_REGEXP_TYPE);
464 Add<HStoreNamedField>(
465 result, HObjectAccess::ForMap(),
466 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap()));
467 Add<HStoreNamedField>(
468 result, HObjectAccess::ForPropertiesPointer(),
469 Add<HLoadNamedField>(boilerplate, nullptr,
470 HObjectAccess::ForPropertiesPointer()));
471 Add<HStoreNamedField>(
472 result, HObjectAccess::ForElementsPointer(),
473 Add<HLoadNamedField>(boilerplate, nullptr,
474 HObjectAccess::ForElementsPointer()));
475 for (int offset = JSObject::kHeaderSize; offset < result_size;
476 offset += kPointerSize) {
477 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset);
478 Add<HStoreNamedField>(result, access,
479 Add<HLoadNamedField>(boilerplate, nullptr, access));
480 }
481 Push(result);
482 }
483 if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
484 if_notundefined.End();
485
486 return Pop();
487}
488
489
490Handle<Code> FastCloneRegExpStub::GenerateCode() {
491 return DoGenerateCode(this);
492}
493
494
495template <>
496HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
497 Factory* factory = isolate()->factory();
498 HValue* undefined = graph()->GetConstantUndefined();
499 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
500 HValue* closure = GetParameter(0);
501 HValue* literal_index = GetParameter(1);
502
503 // This stub is very performance sensitive, the generated code must be tuned
504 // so that it doesn't build and eager frame.
505 info()->MarkMustNotHaveEagerFrame();
506
507 HValue* literals_array = Add<HLoadNamedField>(
508 closure, nullptr, HObjectAccess::ForLiteralsPointer());
509
510 HInstruction* allocation_site = Add<HLoadKeyed>(
511 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
512 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513 IfBuilder checker(this);
514 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
515 undefined);
516 checker.Then();
517
518 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
519 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400520 HInstruction* boilerplate =
521 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522 HValue* elements = AddLoadElements(boilerplate);
523 HValue* capacity = AddLoadFixedArrayLength(elements);
524 IfBuilder zero_capacity(this);
525 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
526 Token::EQ);
527 zero_capacity.Then();
528 Push(BuildCloneShallowArrayEmpty(boilerplate,
529 allocation_site,
530 alloc_site_mode));
531 zero_capacity.Else();
532 IfBuilder if_fixed_cow(this);
533 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
534 if_fixed_cow.Then();
535 Push(BuildCloneShallowArrayCow(boilerplate,
536 allocation_site,
537 alloc_site_mode,
538 FAST_ELEMENTS));
539 if_fixed_cow.Else();
540 IfBuilder if_fixed(this);
541 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
542 if_fixed.Then();
543 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
544 allocation_site,
545 alloc_site_mode,
546 FAST_ELEMENTS));
547
548 if_fixed.Else();
549 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
550 allocation_site,
551 alloc_site_mode,
552 FAST_DOUBLE_ELEMENTS));
553 if_fixed.End();
554 if_fixed_cow.End();
555 zero_capacity.End();
556
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000557 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000558 checker.End();
559
560 return environment()->Pop();
561}
562
563
564Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
565 return DoGenerateCode(this);
566}
567
568
569template <>
570HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
571 HValue* undefined = graph()->GetConstantUndefined();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000572 HValue* closure = GetParameter(0);
573 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000575 HValue* literals_array = Add<HLoadNamedField>(
576 closure, nullptr, HObjectAccess::ForLiteralsPointer());
577
578 HInstruction* allocation_site = Add<HLoadKeyed>(
579 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
580 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000581
582 IfBuilder checker(this);
583 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
584 undefined);
585 checker.And();
586
587 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
588 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400589 HInstruction* boilerplate =
590 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000591
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400592 int length = casted_stub()->length();
593 if (length == 0) {
594 // Empty objects have some slack added to them.
595 length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
596 }
597 int size = JSObject::kHeaderSize + length * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598 int object_size = size;
599 if (FLAG_allocation_site_pretenuring) {
600 size += AllocationMemento::kSize;
601 }
602
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400603 HValue* boilerplate_map =
604 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000605 HValue* boilerplate_size = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400606 boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
608 checker.If<HCompareNumericAndBranch>(boilerplate_size,
609 size_in_words, Token::EQ);
610 checker.Then();
611
612 HValue* size_in_bytes = Add<HConstant>(size);
613
614 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
615 NOT_TENURED, JS_OBJECT_TYPE);
616
617 for (int i = 0; i < object_size; i += kPointerSize) {
618 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400619 Add<HStoreNamedField>(object, access,
620 Add<HLoadNamedField>(boilerplate, nullptr, access));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 }
622
623 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
624 if (FLAG_allocation_site_pretenuring) {
625 BuildCreateAllocationMemento(
626 object, Add<HConstant>(object_size), allocation_site);
627 }
628
629 environment()->Push(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631 checker.End();
632
633 return environment()->Pop();
634}
635
636
637Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
638 return DoGenerateCode(this);
639}
640
641
642template <>
643HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 // This stub is performance sensitive, the generated code must be tuned
645 // so that it doesn't build an eager frame.
646 info()->MarkMustNotHaveEagerFrame();
647
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648 HValue* size = Add<HConstant>(AllocationSite::kSize);
649 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
650 JS_OBJECT_TYPE);
651
652 // Store the map
653 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
654 AddStoreMapConstant(object, allocation_site_map);
655
656 // Store the payload (smi elements kind)
657 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
658 Add<HStoreNamedField>(object,
659 HObjectAccess::ForAllocationSiteOffset(
660 AllocationSite::kTransitionInfoOffset),
661 initial_elements_kind);
662
663 // Unlike literals, constructed arrays don't have nested sites
664 Add<HStoreNamedField>(object,
665 HObjectAccess::ForAllocationSiteOffset(
666 AllocationSite::kNestedSiteOffset),
667 graph()->GetConstant0());
668
669 // Pretenuring calculation field.
670 Add<HStoreNamedField>(object,
671 HObjectAccess::ForAllocationSiteOffset(
672 AllocationSite::kPretenureDataOffset),
673 graph()->GetConstant0());
674
675 // Pretenuring memento creation count field.
676 Add<HStoreNamedField>(object,
677 HObjectAccess::ForAllocationSiteOffset(
678 AllocationSite::kPretenureCreateCountOffset),
679 graph()->GetConstant0());
680
681 // Store an empty fixed array for the code dependency.
682 HConstant* empty_fixed_array =
683 Add<HConstant>(isolate()->factory()->empty_fixed_array());
684 Add<HStoreNamedField>(
685 object,
686 HObjectAccess::ForAllocationSiteOffset(
687 AllocationSite::kDependentCodeOffset),
688 empty_fixed_array);
689
690 // Link the object to the allocation site list
691 HValue* site_list = Add<HConstant>(
692 ExternalReference::allocation_sites_list_address(isolate()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400693 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
694 HObjectAccess::ForAllocationSiteList());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
696 // mark as such in order to skip the write barrier, once we have a unified
697 // system for weakness. For now we decided to keep it like this because having
698 // an initial write barrier backed store makes this pointer strong until the
699 // next GC, and allocation sites are designed to survive several GCs anyway.
700 Add<HStoreNamedField>(
701 object,
702 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
703 site);
704 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
705 object);
706
707 HInstruction* feedback_vector = GetParameter(0);
708 HInstruction* slot = GetParameter(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000710 INITIALIZING_STORE);
711 return feedback_vector;
712}
713
714
715Handle<Code> CreateAllocationSiteStub::GenerateCode() {
716 return DoGenerateCode(this);
717}
718
719
720template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000721HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
722 // This stub is performance sensitive, the generated code must be tuned
723 // so that it doesn't build an eager frame.
724 info()->MarkMustNotHaveEagerFrame();
725
726 HValue* size = Add<HConstant>(WeakCell::kSize);
727 HInstruction* object =
728 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE);
729
730 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
731 AddStoreMapConstant(object, weak_cell_map);
732
733 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
734 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
735 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
736 graph()->GetConstantHole());
737
738 HInstruction* feedback_vector =
739 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
740 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
741 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
742 INITIALIZING_STORE);
743 return graph()->GetConstant0();
744}
745
746
747Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
748
749
750template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400751HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
752 int context_index = casted_stub()->context_index();
753 int slot_index = casted_stub()->slot_index();
754
755 HValue* script_context = BuildGetScriptContext(context_index);
756 return Add<HLoadNamedField>(script_context, nullptr,
757 HObjectAccess::ForContextSlot(slot_index));
758}
759
760
761Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
762 return DoGenerateCode(this);
763}
764
765
766template <>
767HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
768 int context_index = casted_stub()->context_index();
769 int slot_index = casted_stub()->slot_index();
770
771 HValue* script_context = BuildGetScriptContext(context_index);
772 Add<HStoreNamedField>(script_context,
773 HObjectAccess::ForContextSlot(slot_index),
774 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
775 return GetParameter(2);
776}
777
778
779Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
780 return DoGenerateCode(this);
781}
782
783
784template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000785HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
786 ElementsKind kind = casted_stub()->elements_kind();
787 if (IsFastDoubleElementsKind(kind)) {
788 info()->MarkAsSavesCallerDoubles();
789 }
790
791 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
792 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
793
794 HValue* elements = AddLoadElements(object);
795 HValue* current_capacity = Add<HLoadNamedField>(
796 elements, nullptr, HObjectAccess::ForFixedArrayLength());
797
798 HValue* length =
799 casted_stub()->is_js_array()
800 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
801 HObjectAccess::ForArrayLength(kind))
802 : current_capacity;
803
804 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
805 current_capacity, key);
806}
807
808
809Handle<Code> GrowArrayElementsStub::GenerateCode() {
810 return DoGenerateCode(this);
811}
812
813
814template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000815HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000816 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
817 ? CONVERT_HOLE_TO_UNDEFINED
818 : NEVER_RETURN_HOLE;
819
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000820 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
821 GetParameter(LoadDescriptor::kReceiverIndex),
822 GetParameter(LoadDescriptor::kNameIndex), NULL,
823 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000824 hole_mode, STANDARD_STORE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000825 return load;
826}
827
828
829Handle<Code> LoadFastElementStub::GenerateCode() {
830 return DoGenerateCode(this);
831}
832
833
834HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
835 HValue* object, FieldIndex index) {
836 Representation representation = index.is_double()
837 ? Representation::Double()
838 : Representation::Tagged();
839 int offset = index.offset();
840 HObjectAccess access = index.is_inobject()
841 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
842 : HObjectAccess::ForBackingStoreOffset(offset, representation);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400843 if (index.is_double() &&
844 (!FLAG_unbox_double_fields || !index.is_inobject())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 // Load the heap number.
846 object = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400847 object, nullptr, access.WithRepresentation(Representation::Tagged()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000848 // Load the double value from it.
849 access = HObjectAccess::ForHeapNumberValue();
850 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400851 return Add<HLoadNamedField>(object, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852}
853
854
855template<>
856HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
857 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
858}
859
860
861Handle<Code> LoadFieldStub::GenerateCode() {
862 return DoGenerateCode(this);
863}
864
865
866template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000867HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
868 return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
869 casted_stub()->index());
870}
871
872
873Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
874 return DoGenerateCode(this);
875}
876
877
878template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
880 HValue* map = AddLoadMap(GetParameter(0), NULL);
881 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
882 Map::kDescriptorsOffset, Representation::Tagged());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400883 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
885 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400886 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887}
888
889
890Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
891
892
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000893HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
894 HValue* value) {
895 HValue* result = NULL;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400896 HInstruction* backing_store =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000897 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, nullptr,
898 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400900 HValue* backing_store_length = Add<HLoadNamedField>(
901 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 IfBuilder in_unmapped_range(this);
903 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
904 Token::LT);
905 in_unmapped_range.Then();
906 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000907 if (value == NULL) {
908 result = Add<HLoadKeyed>(backing_store, key, nullptr, nullptr,
909 FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE);
910 } else {
911 Add<HStoreKeyed>(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS);
912 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000913 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000914 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000915 in_unmapped_range.End();
916 return result;
917}
918
919
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000920HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
921 HValue* key,
922 HValue* value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923 // Mapped arguments are actual arguments. Unmapped arguments are values added
924 // to the arguments object after it was created for the call. Mapped arguments
925 // are stored in the context at indexes given by elements[key + 2]. Unmapped
926 // arguments are stored as regular indexed properties in the arguments array,
927 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
928 // look at argument object construction.
929 //
930 // The sloppy arguments elements array has a special format:
931 //
932 // 0: context
933 // 1: unmapped arguments array
934 // 2: mapped_index0,
935 // 3: mapped_index1,
936 // ...
937 //
938 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
939 // If key + 2 >= elements.length then attempt to look in the unmapped
940 // arguments array (given by elements[1]) and return the value at key, missing
941 // to the runtime if the unmapped arguments array is not a fixed array or if
942 // key >= unmapped_arguments_array.length.
943 //
944 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
945 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
946 // index into the context array given at elements[0]. Return the value at
947 // context[t].
948
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000949 bool is_load = value == NULL;
950
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000951 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
952 IfBuilder positive_smi(this);
953 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
954 Token::LT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000955 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000956 positive_smi.End();
957
958 HValue* constant_two = Add<HConstant>(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400959 HValue* elements = AddLoadElements(receiver, nullptr);
960 HValue* elements_length = Add<HLoadNamedField>(
961 elements, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000962 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
963 IfBuilder in_range(this);
964 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
965 in_range.Then();
966 {
967 HValue* index = AddUncasted<HAdd>(key, constant_two);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000968 HInstruction* mapped_index =
969 Add<HLoadKeyed>(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS,
970 ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000971
972 IfBuilder is_valid(this);
973 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
974 graph()->GetConstantHole());
975 is_valid.Then();
976 {
977 // TODO(mvstanton): I'd like to assert from this point, that if the
978 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
979 // smi check is being emitted.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400980 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981 nullptr, nullptr, FAST_ELEMENTS);
982 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
983 if (is_load) {
984 HValue* result =
985 Add<HLoadKeyed>(the_context, mapped_index, nullptr, nullptr,
986 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
987 environment()->Push(result);
988 } else {
989 DCHECK(value != NULL);
990 Add<HStoreKeyed>(the_context, mapped_index, value, nullptr,
991 FAST_ELEMENTS);
992 environment()->Push(value);
993 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000994 }
995 is_valid.Else();
996 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000997 HValue* result = UnmappedCase(elements, key, value);
998 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000999 }
1000 is_valid.End();
1001 }
1002 in_range.Else();
1003 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001004 HValue* result = UnmappedCase(elements, key, value);
1005 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006 }
1007 in_range.End();
1008
1009 return environment()->Pop();
1010}
1011
1012
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013template <>
1014HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
1015 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1016 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1017
1018 return EmitKeyedSloppyArguments(receiver, key, NULL);
1019}
1020
1021
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001022Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
1023 return DoGenerateCode(this);
1024}
1025
1026
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027template <>
1028HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
1029 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
1030 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
1031 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
1032
1033 return EmitKeyedSloppyArguments(receiver, key, value);
1034}
1035
1036
1037Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
1038 return DoGenerateCode(this);
1039}
1040
1041
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001042void CodeStubGraphBuilderBase::BuildStoreNamedField(
1043 HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001044 Representation representation, bool transition_to_field) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001045 DCHECK(!index.is_double() || representation.IsDouble());
1046 int offset = index.offset();
1047 HObjectAccess access =
1048 index.is_inobject()
1049 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
1050 : HObjectAccess::ForBackingStoreOffset(offset, representation);
1051
1052 if (representation.IsDouble()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001053 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
1054 HObjectAccess heap_number_access =
1055 access.WithRepresentation(Representation::Tagged());
1056 if (transition_to_field) {
1057 // The store requires a mutable HeapNumber to be allocated.
1058 NoObservableSideEffectsScope no_side_effects(this);
1059 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
1060
1061 // TODO(hpayer): Allocation site pretenuring support.
1062 HInstruction* heap_number =
1063 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
1064 MUTABLE_HEAP_NUMBER_TYPE);
1065 AddStoreMapConstant(heap_number,
1066 isolate()->factory()->mutable_heap_number_map());
1067 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
1068 value);
1069 // Store the new mutable heap number into the object.
1070 access = heap_number_access;
1071 value = heap_number;
1072 } else {
1073 // Load the heap number.
1074 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
1075 // Store the double value into it.
1076 access = HObjectAccess::ForHeapNumberValue();
1077 }
1078 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001079 } else if (representation.IsHeapObject()) {
1080 BuildCheckHeapObject(value);
1081 }
1082
1083 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1084}
1085
1086
1087template <>
1088HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1089 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001090 casted_stub()->representation(), false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001091 return GetParameter(2);
1092}
1093
1094
1095Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1096
1097
1098template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001099HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001100 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001101
1102 switch (casted_stub()->store_mode()) {
1103 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1104 HValue* properties = Add<HLoadNamedField>(
1105 object, nullptr, HObjectAccess::ForPropertiesPointer());
1106 HValue* length = AddLoadFixedArrayLength(properties);
1107 HValue* delta =
1108 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1109 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1110
1111 // Grow properties array.
1112 ElementsKind kind = FAST_ELEMENTS;
1113 Add<HBoundsCheck>(new_capacity,
1114 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1115 FixedArray::kHeaderSize) >>
1116 ElementsKindToShiftSize(kind)));
1117
1118 // Reuse this code for properties backing store allocation.
1119 HValue* new_properties =
1120 BuildAllocateAndInitializeArray(kind, new_capacity);
1121
1122 BuildCopyProperties(properties, new_properties, length, new_capacity);
1123
1124 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1125 new_properties);
1126 }
1127 // Fall through.
1128 case StoreTransitionStub::StoreMapAndValue:
1129 // Store the new value into the "extended" object.
1130 BuildStoreNamedField(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001131 object, GetParameter(StoreTransitionHelper::ValueIndex()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001132 casted_stub()->index(), casted_stub()->representation(), true);
1133 // Fall through.
1134
1135 case StoreTransitionStub::StoreMapOnly:
1136 // And finally update the map.
1137 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001138 GetParameter(StoreTransitionHelper::MapIndex()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001139 break;
1140 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001141 return GetParameter(StoreTransitionHelper::ValueIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001142}
1143
1144
1145Handle<Code> StoreTransitionStub::GenerateCode() {
1146 return DoGenerateCode(this);
1147}
1148
1149
1150template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1152 BuildUncheckedMonomorphicElementAccess(
1153 GetParameter(StoreDescriptor::kReceiverIndex),
1154 GetParameter(StoreDescriptor::kNameIndex),
1155 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1156 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1157 casted_stub()->store_mode());
1158
1159 return GetParameter(2);
1160}
1161
1162
1163Handle<Code> StoreFastElementStub::GenerateCode() {
1164 return DoGenerateCode(this);
1165}
1166
1167
1168template <>
1169HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1170 info()->MarkAsSavesCallerDoubles();
1171
1172 BuildTransitionElementsKind(GetParameter(0),
1173 GetParameter(1),
1174 casted_stub()->from_kind(),
1175 casted_stub()->to_kind(),
1176 casted_stub()->is_js_array());
1177
1178 return GetParameter(0);
1179}
1180
1181
1182Handle<Code> TransitionElementsKindStub::GenerateCode() {
1183 return DoGenerateCode(this);
1184}
1185
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001186
1187template <>
1188HValue* CodeStubGraphBuilder<AllocateHeapNumberStub>::BuildCodeStub() {
1189 HValue* result =
1190 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapNumber(),
1191 NOT_TENURED, HEAP_NUMBER_TYPE);
1192 AddStoreMapConstant(result, isolate()->factory()->heap_number_map());
1193 return result;
1194}
1195
1196
1197Handle<Code> AllocateHeapNumberStub::GenerateCode() {
1198 return DoGenerateCode(this);
1199}
1200
1201
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001202template <>
1203HValue* CodeStubGraphBuilder<AllocateMutableHeapNumberStub>::BuildCodeStub() {
1204 HValue* result =
1205 Add<HAllocate>(Add<HConstant>(HeapNumber::kSize), HType::HeapObject(),
1206 NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE);
1207 AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map());
1208 return result;
1209}
1210
1211
1212Handle<Code> AllocateMutableHeapNumberStub::GenerateCode() {
1213 return DoGenerateCode(this);
1214}
1215
1216
1217template <>
1218HValue* CodeStubGraphBuilder<AllocateInNewSpaceStub>::BuildCodeStub() {
1219 HValue* result = Add<HAllocate>(GetParameter(0), HType::Tagged(), NOT_TENURED,
1220 JS_OBJECT_TYPE);
1221 return result;
1222}
1223
1224
1225Handle<Code> AllocateInNewSpaceStub::GenerateCode() {
1226 return DoGenerateCode(this);
1227}
1228
1229
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1231 ElementsKind kind,
1232 AllocationSiteOverrideMode override_mode,
1233 ArgumentClass argument_class) {
1234 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1235 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1236 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1237 override_mode);
1238 HValue* result = NULL;
1239 switch (argument_class) {
1240 case NONE:
1241 // This stub is very performance sensitive, the generated code must be
1242 // tuned so that it doesn't build and eager frame.
1243 info()->MarkMustNotHaveEagerFrame();
1244 result = array_builder.AllocateEmptyArray();
1245 break;
1246 case SINGLE:
1247 result = BuildArraySingleArgumentConstructor(&array_builder);
1248 break;
1249 case MULTIPLE:
1250 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1251 break;
1252 }
1253
1254 return result;
1255}
1256
1257
1258HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1259 ElementsKind kind, ArgumentClass argument_class) {
1260 HValue* constructor = GetParameter(
1261 InternalArrayConstructorStubBase::kConstructor);
1262 JSArrayBuilder array_builder(this, kind, constructor);
1263
1264 HValue* result = NULL;
1265 switch (argument_class) {
1266 case NONE:
1267 // This stub is very performance sensitive, the generated code must be
1268 // tuned so that it doesn't build and eager frame.
1269 info()->MarkMustNotHaveEagerFrame();
1270 result = array_builder.AllocateEmptyArray();
1271 break;
1272 case SINGLE:
1273 result = BuildArraySingleArgumentConstructor(&array_builder);
1274 break;
1275 case MULTIPLE:
1276 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1277 break;
1278 }
1279 return result;
1280}
1281
1282
1283HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1284 JSArrayBuilder* array_builder) {
1285 // Smi check and range check on the input arg.
1286 HValue* constant_one = graph()->GetConstant1();
1287 HValue* constant_zero = graph()->GetConstant0();
1288
1289 HInstruction* elements = Add<HArgumentsElements>(false);
1290 HInstruction* argument = Add<HAccessArgumentsAt>(
1291 elements, constant_one, constant_zero);
1292
1293 return BuildAllocateArrayFromLength(array_builder, argument);
1294}
1295
1296
1297HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1298 JSArrayBuilder* array_builder, ElementsKind kind) {
1299 // Insert a bounds check because the number of arguments might exceed
1300 // the kInitialMaxFastElementArray limit. This cannot happen for code
1301 // that was parsed, but calling via Array.apply(thisArg, [...]) might
1302 // trigger it.
1303 HValue* length = GetArgumentsLength();
1304 HConstant* max_alloc_length =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001305 Add<HConstant>(JSArray::kInitialMaxFastElementArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001306 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1307
1308 // We need to fill with the hole if it's a smi array in the multi-argument
1309 // case because we might have to bail out while copying arguments into
1310 // the array because they aren't compatible with a smi array.
1311 // If it's a double array, no problem, and if it's fast then no
1312 // problem either because doubles are boxed.
1313 //
1314 // TODO(mvstanton): consider an instruction to memset fill the array
1315 // with zero in this case instead.
1316 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1317 ? JSArrayBuilder::FILL_WITH_HOLE
1318 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1319 HValue* new_object = array_builder->AllocateArray(checked_length,
1320 max_alloc_length,
1321 checked_length,
1322 fill_mode);
1323 HValue* elements = array_builder->GetElementsLocation();
1324 DCHECK(elements != NULL);
1325
1326 // Now populate the elements correctly.
1327 LoopBuilder builder(this,
1328 context(),
1329 LoopBuilder::kPostIncrement);
1330 HValue* start = graph()->GetConstant0();
1331 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1332 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1333 HInstruction* argument = Add<HAccessArgumentsAt>(
1334 argument_elements, checked_length, key);
1335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 Add<HStoreKeyed>(elements, key, argument, nullptr, kind);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001337 builder.EndBody();
1338 return new_object;
1339}
1340
1341
1342template <>
1343HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1344 ElementsKind kind = casted_stub()->elements_kind();
1345 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1346 return BuildArrayConstructor(kind, override_mode, NONE);
1347}
1348
1349
1350Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
1351 return DoGenerateCode(this);
1352}
1353
1354
1355template <>
1356HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1357 BuildCodeStub() {
1358 ElementsKind kind = casted_stub()->elements_kind();
1359 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1360 return BuildArrayConstructor(kind, override_mode, SINGLE);
1361}
1362
1363
1364Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1365 return DoGenerateCode(this);
1366}
1367
1368
1369template <>
1370HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1371 ElementsKind kind = casted_stub()->elements_kind();
1372 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1373 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1374}
1375
1376
1377Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1378 return DoGenerateCode(this);
1379}
1380
1381
1382template <>
1383HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1384 BuildCodeStub() {
1385 ElementsKind kind = casted_stub()->elements_kind();
1386 return BuildInternalArrayConstructor(kind, NONE);
1387}
1388
1389
1390Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
1391 return DoGenerateCode(this);
1392}
1393
1394
1395template <>
1396HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1397 BuildCodeStub() {
1398 ElementsKind kind = casted_stub()->elements_kind();
1399 return BuildInternalArrayConstructor(kind, SINGLE);
1400}
1401
1402
1403Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1404 return DoGenerateCode(this);
1405}
1406
1407
1408template <>
1409HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1410 BuildCodeStub() {
1411 ElementsKind kind = casted_stub()->elements_kind();
1412 return BuildInternalArrayConstructor(kind, MULTIPLE);
1413}
1414
1415
1416Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1417 return DoGenerateCode(this);
1418}
1419
1420
1421template <>
1422HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1423 Isolate* isolate = graph()->isolate();
1424 CompareNilICStub* stub = casted_stub();
1425 HIfContinuation continuation;
1426 Handle<Map> sentinel_map(isolate->heap()->meta_map());
1427 Type* type = stub->GetType(zone(), sentinel_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001428 BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001429 IfBuilder if_nil(this, &continuation);
1430 if_nil.Then();
1431 if (continuation.IsFalseReachable()) {
1432 if_nil.Else();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001433 if_nil.Return(graph()->GetConstantFalse());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001434 }
1435 if_nil.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001436 return continuation.IsTrueReachable() ? graph()->GetConstantTrue()
1437 : graph()->GetConstantUndefined();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438}
1439
1440
1441Handle<Code> CompareNilICStub::GenerateCode() {
1442 return DoGenerateCode(this);
1443}
1444
1445
1446template <>
1447HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1448 BinaryOpICState state = casted_stub()->state();
1449
1450 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1451 HValue* right = GetParameter(BinaryOpICStub::kRight);
1452
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453 Type* left_type = state.GetLeftType();
1454 Type* right_type = state.GetRightType();
1455 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001456
1457 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1458 (state.HasSideEffects() || !result_type->Is(Type::None())));
1459
1460 HValue* result = NULL;
1461 HAllocationMode allocation_mode(NOT_TENURED);
1462 if (state.op() == Token::ADD &&
1463 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1464 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1465 // For the generic add stub a fast case for string addition is performance
1466 // critical.
1467 if (left_type->Maybe(Type::String())) {
1468 IfBuilder if_leftisstring(this);
1469 if_leftisstring.If<HIsStringAndBranch>(left);
1470 if_leftisstring.Then();
1471 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001472 Push(BuildBinaryOperation(state.op(), left, right, Type::String(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473 right_type, result_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001474 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001475 }
1476 if_leftisstring.Else();
1477 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001478 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1479 right_type, result_type,
1480 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001481 }
1482 if_leftisstring.End();
1483 result = Pop();
1484 } else {
1485 IfBuilder if_rightisstring(this);
1486 if_rightisstring.If<HIsStringAndBranch>(right);
1487 if_rightisstring.Then();
1488 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001489 Push(BuildBinaryOperation(state.op(), left, right, left_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001490 Type::String(), result_type,
1491 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001492 }
1493 if_rightisstring.Else();
1494 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001495 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1496 right_type, result_type,
1497 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001498 }
1499 if_rightisstring.End();
1500 result = Pop();
1501 }
1502 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001503 result = BuildBinaryOperation(state.op(), left, right, left_type,
1504 right_type, result_type,
1505 state.fixed_right_arg(), allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001506 }
1507
1508 // If we encounter a generic argument, the number conversion is
1509 // observable, thus we cannot afford to bail out after the fact.
1510 if (!state.HasSideEffects()) {
1511 result = EnforceNumberType(result, result_type);
1512 }
1513
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001514 return result;
1515}
1516
1517
1518Handle<Code> BinaryOpICStub::GenerateCode() {
1519 return DoGenerateCode(this);
1520}
1521
1522
1523template <>
1524HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1525 BinaryOpICState state = casted_stub()->state();
1526
1527 HValue* allocation_site = GetParameter(
1528 BinaryOpWithAllocationSiteStub::kAllocationSite);
1529 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1530 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1531
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001532 Type* left_type = state.GetLeftType();
1533 Type* right_type = state.GetRightType();
1534 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001535 HAllocationMode allocation_mode(allocation_site);
1536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1538 result_type, state.fixed_right_arg(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001539 allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001540}
1541
1542
1543Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1544 return DoGenerateCode(this);
1545}
1546
1547
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001548HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) {
1549 if (!convert) return BuildCheckString(input);
1550 IfBuilder if_inputissmi(this);
1551 HValue* inputissmi = if_inputissmi.If<HIsSmiAndBranch>(input);
1552 if_inputissmi.Then();
1553 {
1554 // Convert the input smi to a string.
1555 Push(BuildNumberToString(input, Type::SignedSmall()));
1556 }
1557 if_inputissmi.Else();
1558 {
1559 HValue* input_map =
1560 Add<HLoadNamedField>(input, inputissmi, HObjectAccess::ForMap());
1561 HValue* input_instance_type = Add<HLoadNamedField>(
1562 input_map, inputissmi, HObjectAccess::ForMapInstanceType());
1563 IfBuilder if_inputisstring(this);
1564 if_inputisstring.If<HCompareNumericAndBranch>(
1565 input_instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
1566 if_inputisstring.Then();
1567 {
1568 // The input is already a string.
1569 Push(input);
1570 }
1571 if_inputisstring.Else();
1572 {
1573 // Convert to primitive first (if necessary), see
1574 // ES6 section 12.7.3 The Addition operator.
1575 IfBuilder if_inputisprimitive(this);
1576 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1577 if_inputisprimitive.If<HCompareNumericAndBranch>(
1578 input_instance_type, Add<HConstant>(LAST_PRIMITIVE_TYPE), Token::LTE);
1579 if_inputisprimitive.Then();
1580 {
1581 // The input is already a primitive.
1582 Push(input);
1583 }
1584 if_inputisprimitive.Else();
1585 {
1586 // Convert the input to a primitive.
1587 Push(BuildToPrimitive(input, input_map));
1588 }
1589 if_inputisprimitive.End();
1590 // Convert the primitive to a string value.
1591 ToStringDescriptor descriptor(isolate());
1592 ToStringStub stub(isolate());
1593 HValue* values[] = {context(), Pop()};
1594 Push(AddUncasted<HCallWithDescriptor>(
1595 Add<HConstant>(stub.GetCode()), 0, descriptor,
1596 Vector<HValue*>(values, arraysize(values))));
1597 }
1598 if_inputisstring.End();
1599 }
1600 if_inputissmi.End();
1601 return Pop();
1602}
1603
1604
1605HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input,
1606 HValue* input_map) {
1607 // Get the native context of the caller.
1608 HValue* native_context = BuildGetNativeContext();
1609
1610 // Determine the initial map of the %ObjectPrototype%.
1611 HValue* object_function_prototype_map =
1612 Add<HLoadNamedField>(native_context, nullptr,
1613 HObjectAccess::ForContextSlot(
1614 Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX));
1615
1616 // Determine the initial map of the %StringPrototype%.
1617 HValue* string_function_prototype_map =
1618 Add<HLoadNamedField>(native_context, nullptr,
1619 HObjectAccess::ForContextSlot(
1620 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
1621
1622 // Determine the initial map of the String function.
1623 HValue* string_function = Add<HLoadNamedField>(
1624 native_context, nullptr,
1625 HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX));
1626 HValue* string_function_initial_map = Add<HLoadNamedField>(
1627 string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
1628
1629 // Determine the map of the [[Prototype]] of {input}.
1630 HValue* input_prototype =
1631 Add<HLoadNamedField>(input_map, nullptr, HObjectAccess::ForPrototype());
1632 HValue* input_prototype_map =
1633 Add<HLoadNamedField>(input_prototype, nullptr, HObjectAccess::ForMap());
1634
1635 // For string wrappers (JSValue instances with [[StringData]] internal
1636 // fields), we can shortcirciut the ToPrimitive if
1637 //
1638 // (a) the {input} map matches the initial map of the String function,
1639 // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e.
1640 // no one monkey-patched toString, @@toPrimitive or valueOf), and
1641 // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the
1642 // %StringPrototype%) is also unmodified, that is no one sneaked a
1643 // @@toPrimitive into the %ObjectPrototype%.
1644 //
1645 // If all these assumptions hold, we can just take the [[StringData]] value
1646 // and return it.
1647 // TODO(bmeurer): This just repairs a regression introduced by removing the
1648 // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which
1649 // was intendend to something similar to this, although less efficient and
1650 // wrong in the presence of @@toPrimitive. Long-term we might want to move
1651 // into the direction of having a ToPrimitiveStub that can do common cases
1652 // while staying in JavaScript land (i.e. not going to C++).
1653 IfBuilder if_inputisstringwrapper(this);
1654 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1655 input_map, string_function_initial_map);
1656 if_inputisstringwrapper.And();
1657 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1658 input_prototype_map, string_function_prototype_map);
1659 if_inputisstringwrapper.And();
1660 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1661 Add<HLoadNamedField>(Add<HLoadNamedField>(input_prototype_map, nullptr,
1662 HObjectAccess::ForPrototype()),
1663 nullptr, HObjectAccess::ForMap()),
1664 object_function_prototype_map);
1665 if_inputisstringwrapper.Then();
1666 {
1667 Push(BuildLoadNamedField(
1668 input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset)));
1669 }
1670 if_inputisstringwrapper.Else();
1671 {
1672 // TODO(bmeurer): Add support for fast ToPrimitive conversion using
1673 // a dedicated ToPrimitiveStub.
1674 Add<HPushArguments>(input);
1675 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kToPrimitive), 1));
1676 }
1677 if_inputisstringwrapper.End();
1678 return Pop();
1679}
1680
1681
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682template <>
1683HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1684 StringAddStub* stub = casted_stub();
1685 StringAddFlags flags = stub->flags();
1686 PretenureFlag pretenure_flag = stub->pretenure_flag();
1687
1688 HValue* left = GetParameter(StringAddStub::kLeft);
1689 HValue* right = GetParameter(StringAddStub::kRight);
1690
1691 // Make sure that both arguments are strings if not known in advance.
1692 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 left =
1694 BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001695 }
1696 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001697 right = BuildToString(right,
1698 (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001699 }
1700
1701 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1702}
1703
1704
1705Handle<Code> StringAddStub::GenerateCode() {
1706 return DoGenerateCode(this);
1707}
1708
1709
1710template <>
1711HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1712 ToBooleanStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713 IfBuilder if_true(this);
1714 if_true.If<HBranch>(GetParameter(0), stub->types());
1715 if_true.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 if_true.Return(graph()->GetConstantTrue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001717 if_true.Else();
1718 if_true.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001719 return graph()->GetConstantFalse();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720}
1721
1722
1723Handle<Code> ToBooleanStub::GenerateCode() {
1724 return DoGenerateCode(this);
1725}
1726
1727
1728template <>
1729HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1730 StoreGlobalStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001731 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732 if (stub->check_global()) {
1733 // Check that the map of the global has not changed: use a placeholder map
1734 // that will be replaced later with the global object's map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001735 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1736 HValue* proxy_map =
1737 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1738 HValue* global =
1739 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1740 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1741 StoreGlobalStub::global_map_placeholder(isolate())));
1742 HValue* expected_map = Add<HLoadNamedField>(
1743 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1744 HValue* map =
1745 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1746 IfBuilder map_check(this);
1747 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1748 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1749 map_check.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001750 }
1751
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001752 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1753 StoreGlobalStub::property_cell_placeholder(isolate())));
1754 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1755 HObjectAccess::ForWeakCellValue());
1756 Add<HCheckHeapObject>(cell);
1757 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1758 // Load the payload of the global parameter cell. A hole indicates that the
1759 // cell has been invalidated and that the store must be handled by the
1760 // runtime.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001761 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001762
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001763 auto cell_type = stub->cell_type();
1764 if (cell_type == PropertyCellType::kConstant ||
1765 cell_type == PropertyCellType::kUndefined) {
1766 // This is always valid for all states a cell can be in.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767 IfBuilder builder(this);
1768 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1769 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001770 builder.ElseDeopt(
1771 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001772 builder.End();
1773 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001774 IfBuilder builder(this);
1775 HValue* hole_value = graph()->GetConstantHole();
1776 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1777 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001778 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001779 builder.Else();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780 // When dealing with constant types, the type may be allowed to change, as
1781 // long as optimized code remains valid.
1782 if (cell_type == PropertyCellType::kConstantType) {
1783 switch (stub->constant_type()) {
1784 case PropertyCellConstantType::kSmi:
1785 access = access.WithRepresentation(Representation::Smi());
1786 break;
1787 case PropertyCellConstantType::kStableMap: {
1788 // It is sufficient here to check that the value and cell contents
1789 // have identical maps, no matter if they are stable or not or if they
1790 // are the maps that were originally in the cell or not. If optimized
1791 // code will deopt when a cell has a unstable map and if it has a
1792 // dependency on a stable map, it will deopt if the map destabilizes.
1793 Add<HCheckHeapObject>(value);
1794 Add<HCheckHeapObject>(cell_contents);
1795 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1796 HObjectAccess::ForMap());
1797 HValue* map =
1798 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1799 IfBuilder map_check(this);
1800 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1801 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1802 map_check.End();
1803 access = access.WithRepresentation(Representation::HeapObject());
1804 break;
1805 }
1806 }
1807 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 Add<HStoreNamedField>(cell, access, value);
1809 builder.End();
1810 }
1811
1812 return value;
1813}
1814
1815
1816Handle<Code> StoreGlobalStub::GenerateCode() {
1817 return DoGenerateCode(this);
1818}
1819
1820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001821template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001822HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
1824 HValue* key = GetParameter(StoreTransitionHelper::NameIndex());
1825 HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
1826 HValue* map = GetParameter(StoreTransitionHelper::MapIndex());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001827
1828 if (FLAG_trace_elements_transitions) {
1829 // Tracing elements transitions is the job of the runtime.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001830 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1831 Deoptimizer::EAGER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001832 } else {
1833 info()->MarkAsSavesCallerDoubles();
1834
1835 BuildTransitionElementsKind(object, map,
1836 casted_stub()->from_kind(),
1837 casted_stub()->to_kind(),
1838 casted_stub()->is_jsarray());
1839
1840 BuildUncheckedMonomorphicElementAccess(object, key, value,
1841 casted_stub()->is_jsarray(),
1842 casted_stub()->to_kind(),
1843 STORE, ALLOW_RETURN_HOLE,
1844 casted_stub()->store_mode());
1845 }
1846
1847 return value;
1848}
1849
1850
1851Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1852 return DoGenerateCode(this);
1853}
1854
1855
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001856template <>
1857HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() {
1858 HValue* receiver = GetParameter(ToObjectDescriptor::kReceiverIndex);
1859 return BuildToObject(receiver);
1860}
1861
1862
1863Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); }
1864
1865
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001866void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1867 HValue* js_function,
1868 HValue* native_context,
1869 IfBuilder* builder,
1870 HValue* optimized_map,
1871 HValue* map_index) {
1872 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1873 HValue* context_slot = LoadFromOptimizedCodeMap(
1874 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001875 context_slot = Add<HLoadNamedField>(context_slot, nullptr,
1876 HObjectAccess::ForWeakCellValue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001877 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1878 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001879 HValue* code_object = LoadFromOptimizedCodeMap(
1880 optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset);
1881 code_object = Add<HLoadNamedField>(code_object, nullptr,
1882 HObjectAccess::ForWeakCellValue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001883 builder->If<HCompareObjectEqAndBranch>(native_context,
1884 context_slot);
1885 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001886 builder->And();
1887 builder->IfNot<HCompareObjectEqAndBranch>(code_object,
1888 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001889 builder->Then();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1891 map_index, SharedFunctionInfo::kLiteralsOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001892 literals = Add<HLoadNamedField>(literals, nullptr,
1893 HObjectAccess::ForWeakCellValue());
1894 IfBuilder maybe_deopt(this);
1895 maybe_deopt.If<HCompareObjectEqAndBranch>(literals, graph()->GetConstant0());
1896 maybe_deopt.ThenDeopt(Deoptimizer::kLiteralsWereDisposed);
1897 maybe_deopt.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001898
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001899 BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
1900
1901 // The builder continues in the "then" after this function.
1902}
1903
1904
1905void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(HValue* js_function,
1906 HValue* native_context,
1907 HValue* code_object,
1908 HValue* literals) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001909 Counters* counters = isolate()->counters();
1910 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1911
1912 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1913 // map and either unmangle them on marking or do nothing as the whole map is
1914 // discarded on major GC anyway.
1915 Add<HStoreCodeEntry>(js_function, code_object);
1916 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1917 literals);
1918
1919 // Now link a function into a list of optimized functions.
1920 HValue* optimized_functions_list = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001921 native_context, nullptr,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001922 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1923 Add<HStoreNamedField>(js_function,
1924 HObjectAccess::ForNextFunctionLinkPointer(),
1925 optimized_functions_list);
1926
1927 // This store is the only one that should have a write barrier.
1928 Add<HStoreNamedField>(native_context,
1929 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1930 js_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001931}
1932
1933
1934void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1935 HValue* shared_info) {
1936 Add<HStoreNamedField>(js_function,
1937 HObjectAccess::ForNextFunctionLinkPointer(),
1938 graph()->GetConstantUndefined());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001939 HValue* code_object = Add<HLoadNamedField>(shared_info, nullptr,
1940 HObjectAccess::ForCodeOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001941 Add<HStoreCodeEntry>(js_function, code_object);
1942}
1943
1944
1945HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1946 HValue* optimized_map,
1947 HValue* iterator,
1948 int field_offset) {
1949 // By making sure to express these loads in the form [<hvalue> + constant]
1950 // the keyed load can be hoisted.
1951 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1952 HValue* field_slot = iterator;
1953 if (field_offset > 0) {
1954 HValue* field_offset_value = Add<HConstant>(field_offset);
1955 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1956 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001957 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
1958 nullptr, nullptr, FAST_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 return field_entry;
1960}
1961
1962
1963void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1964 HValue* js_function,
1965 HValue* shared_info,
1966 HValue* native_context) {
1967 Counters* counters = isolate()->counters();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001968 Factory* factory = isolate()->factory();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969 IfBuilder is_optimized(this);
1970 HInstruction* optimized_map = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001971 shared_info, nullptr, HObjectAccess::ForOptimizedCodeMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001972 HValue* null_constant = Add<HConstant>(0);
1973 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1974 is_optimized.Then();
1975 {
1976 BuildInstallCode(js_function, shared_info);
1977 }
1978 is_optimized.Else();
1979 {
1980 AddIncrementCounter(counters->fast_new_closure_try_optimized());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001981 // The {optimized_map} points to fixed array of 4-element entries:
1982 // (native context, optimized code, literals, ast-id).
1983 // Iterate through the {optimized_map} backwards. After the loop, if no
1984 // matching optimized code was found, install unoptimized code.
1985 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1986 // i >= SharedFunctionInfo::kEntriesStart;
1987 // i -= SharedFunctionInfo::kEntryLength) { ... }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001988 HValue* first_entry_index =
1989 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001990 HValue* shared_function_entry_length =
1991 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1992 LoopBuilder loop_builder(this, context(), LoopBuilder::kPostDecrement,
1993 shared_function_entry_length);
1994 HValue* array_length = Add<HLoadNamedField>(
1995 optimized_map, nullptr, HObjectAccess::ForFixedArrayLength());
1996 HValue* start_pos =
1997 AddUncasted<HSub>(array_length, shared_function_entry_length);
1998 HValue* slot_iterator =
1999 loop_builder.BeginBody(start_pos, first_entry_index, Token::GTE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002000 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002001 IfBuilder done_check(this);
2002 BuildCheckAndInstallOptimizedCode(js_function, native_context,
2003 &done_check, optimized_map,
2004 slot_iterator);
2005 // Fall out of the loop
2006 loop_builder.Break();
2007 }
2008 loop_builder.EndBody();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002009
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002010 // If {slot_iterator} is less than the first entry index, then we failed to
2011 // find a context-dependent code and try context-independent code next.
2012 IfBuilder no_optimized_code_check(this);
2013 no_optimized_code_check.If<HCompareNumericAndBranch>(
2014 slot_iterator, first_entry_index, Token::LT);
2015 no_optimized_code_check.Then();
2016 {
2017 IfBuilder shared_code_check(this);
2018 HValue* shared_code =
2019 Add<HLoadNamedField>(optimized_map, nullptr,
2020 HObjectAccess::ForOptimizedCodeMapSharedCode());
2021 shared_code = Add<HLoadNamedField>(shared_code, nullptr,
2022 HObjectAccess::ForWeakCellValue());
2023 shared_code_check.IfNot<HCompareObjectEqAndBranch>(
2024 shared_code, graph()->GetConstant0());
2025 shared_code_check.Then();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002026 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002027 // Store the context-independent optimized code.
2028 HValue* literals = Add<HConstant>(factory->empty_fixed_array());
2029 BuildInstallOptimizedCode(js_function, native_context, shared_code,
2030 literals);
2031 }
2032 shared_code_check.Else();
2033 {
2034 // Store the unoptimized code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002035 BuildInstallCode(js_function, shared_info);
2036 }
2037 }
2038 }
2039}
2040
2041
2042template<>
2043HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
2044 Counters* counters = isolate()->counters();
2045 Factory* factory = isolate()->factory();
2046 HInstruction* empty_fixed_array =
2047 Add<HConstant>(factory->empty_fixed_array());
2048 HValue* shared_info = GetParameter(0);
2049
2050 AddIncrementCounter(counters->fast_new_closure_total());
2051
2052 // Create a new closure from the given function info in new space
2053 HValue* size = Add<HConstant>(JSFunction::kSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002054 HInstruction* js_function =
2055 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002058 casted_stub()->kind());
2059
2060 // Compute the function map in the current native context and set that
2061 // as the map of the allocated object.
2062 HInstruction* native_context = BuildGetNativeContext();
2063 HInstruction* map_slot_value = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002064 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
2066
2067 // Initialize the rest of the function.
2068 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
2069 empty_fixed_array);
2070 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
2071 empty_fixed_array);
2072 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
2073 empty_fixed_array);
2074 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
2075 graph()->GetConstantHole());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002076 Add<HStoreNamedField>(
2077 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002078 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
2079 context());
2080
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002081 // Initialize the code pointer in the function to be the one found in the
2082 // shared function info object. But first check if there is an optimized
2083 // version for our context.
2084 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085
2086 return js_function;
2087}
2088
2089
2090Handle<Code> FastNewClosureStub::GenerateCode() {
2091 return DoGenerateCode(this);
2092}
2093
2094
2095template<>
2096HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
2097 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
2098
2099 // Get the function.
2100 HParameter* function = GetParameter(FastNewContextStub::kFunction);
2101
2102 // Allocate the context in new space.
2103 HAllocate* function_context = Add<HAllocate>(
2104 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
2105 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
2106
2107 // Set up the object header.
2108 AddStoreMapConstant(function_context,
2109 isolate()->factory()->function_context_map());
2110 Add<HStoreNamedField>(function_context,
2111 HObjectAccess::ForFixedArrayLength(),
2112 Add<HConstant>(length));
2113
2114 // Set up the fixed slots.
2115 Add<HStoreNamedField>(function_context,
2116 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
2117 function);
2118 Add<HStoreNamedField>(function_context,
2119 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
2120 context());
2121 Add<HStoreNamedField>(function_context,
2122 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 graph()->GetConstantHole());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002125 // Copy the native context from the previous context.
2126 HValue* native_context = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002127 context(), nullptr,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002128 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2129 Add<HStoreNamedField>(function_context, HObjectAccess::ForContextSlot(
2130 Context::NATIVE_CONTEXT_INDEX),
2131 native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002132
2133 // Initialize the rest of the slots to undefined.
2134 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
2135 Add<HStoreNamedField>(function_context,
2136 HObjectAccess::ForContextSlot(i),
2137 graph()->GetConstantUndefined());
2138 }
2139
2140 return function_context;
2141}
2142
2143
2144Handle<Code> FastNewContextStub::GenerateCode() {
2145 return DoGenerateCode(this);
2146}
2147
2148
2149template <>
2150HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2151 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2152 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2153
2154 Add<HCheckSmi>(key);
2155
2156 HValue* elements = AddLoadElements(receiver);
2157
2158 HValue* hash = BuildElementIndexHash(key);
2159
Ben Murdoch097c5b22016-05-18 11:27:45 +01002160 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002161}
2162
2163
2164Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2165 return DoGenerateCode(this);
2166}
2167
2168
2169template<>
2170HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2171 // Determine the parameters.
2172 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2173 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2174 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2175
2176 info()->MarkMustNotHaveEagerFrame();
2177
2178 return BuildRegExpConstructResult(length, index, input);
2179}
2180
2181
2182Handle<Code> RegExpConstructResultStub::GenerateCode() {
2183 return DoGenerateCode(this);
2184}
2185
2186
2187template <>
2188class CodeStubGraphBuilder<KeyedLoadGenericStub>
2189 : public CodeStubGraphBuilderBase {
2190 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002191 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
2192 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002193
2194 protected:
2195 virtual HValue* BuildCodeStub();
2196
2197 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2198 HValue* bit_field2,
2199 ElementsKind kind);
2200
2201 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2202 HValue* receiver,
2203 HValue* key,
2204 HValue* instance_type,
2205 HValue* bit_field2,
2206 ElementsKind kind);
2207
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208 KeyedLoadGenericStub* casted_stub() {
2209 return static_cast<KeyedLoadGenericStub*>(stub());
2210 }
2211};
2212
2213
2214void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2215 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2216 ElementsKind kind) {
2217 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2218 HValue* kind_limit = Add<HConstant>(
2219 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2220
2221 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2222 if_builder->Then();
2223}
2224
2225
2226void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2227 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2228 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002229 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2230
2231 IfBuilder js_array_check(this);
2232 js_array_check.If<HCompareNumericAndBranch>(
2233 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2234 js_array_check.Then();
2235 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2236 true, kind,
2237 LOAD, NEVER_RETURN_HOLE,
2238 STANDARD_STORE));
2239 js_array_check.Else();
2240 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2241 false, kind,
2242 LOAD, NEVER_RETURN_HOLE,
2243 STANDARD_STORE));
2244 js_array_check.End();
2245}
2246
2247
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002248HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2249 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2250 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002251 // Split into a smi/integer case and unique string case.
2252 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2253 graph()->CreateBasicBlock());
2254
2255 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2256
2257 IfBuilder index_name_split(this, &index_name_split_continuation);
2258 index_name_split.Then();
2259 {
2260 // Key is an index (number)
2261 key = Pop();
2262
2263 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2264 (1 << Map::kHasIndexedInterceptor);
2265 BuildJSObjectCheck(receiver, bit_field_mask);
2266
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002267 HValue* map =
2268 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002269
2270 HValue* instance_type =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002271 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002272
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002273 HValue* bit_field2 =
2274 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002275
2276 IfBuilder kind_if(this);
2277 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2278 FAST_HOLEY_ELEMENTS);
2279
2280 kind_if.Else();
2281 {
2282 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2283 FAST_HOLEY_DOUBLE_ELEMENTS);
2284 }
2285 kind_if.Else();
2286
2287 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2288 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2289 {
2290 HValue* elements = AddLoadElements(receiver);
2291
2292 HValue* hash = BuildElementIndexHash(key);
2293
Ben Murdoch097c5b22016-05-18 11:27:45 +01002294 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002295 }
2296 kind_if.Else();
2297
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002298 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2299 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2300 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002301 BuildElementsKindLimitCheck(&kind_if, bit_field2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002302 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002303 // Non-strict elements are not handled.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002304 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002305 Deoptimizer::EAGER);
2306 Push(graph()->GetConstant0());
2307
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308 kind_if.ElseDeopt(
2309 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002310
2311 kind_if.End();
2312 }
2313 index_name_split.Else();
2314 {
2315 // Key is a unique string.
2316 key = Pop();
2317
2318 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2319 (1 << Map::kHasNamedInterceptor);
2320 BuildJSObjectCheck(receiver, bit_field_mask);
2321
2322 HIfContinuation continuation;
2323 BuildTestForDictionaryProperties(receiver, &continuation);
2324 IfBuilder if_dict_properties(this, &continuation);
2325 if_dict_properties.Then();
2326 {
2327 // Key is string, properties are dictionary mode
2328 BuildNonGlobalObjectCheck(receiver);
2329
2330 HValue* properties = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002331 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002332
2333 HValue* hash =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002334 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002335
2336 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2337
Ben Murdoch097c5b22016-05-18 11:27:45 +01002338 HValue* value =
2339 BuildUncheckedDictionaryElementLoad(receiver, properties, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002340 Push(value);
2341 }
2342 if_dict_properties.Else();
2343 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002344 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2345 // megamorphic stub cache.
2346 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002347 // Key is string, properties are fast mode
2348 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2349
2350 ExternalReference cache_keys_ref =
2351 ExternalReference::keyed_lookup_cache_keys(isolate());
2352 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2353
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002354 HValue* map =
2355 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002356 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2357 base_index->ClearFlag(HValue::kCanOverflow);
2358
2359 HIfContinuation inline_or_runtime_continuation(
2360 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2361 {
2362 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2363 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2364 ++probe) {
2365 IfBuilder* lookup_if = &lookup_ifs[probe];
2366 lookup_if->Initialize(this);
2367 int probe_base = probe * KeyedLookupCache::kEntryLength;
2368 HValue* map_index = AddUncasted<HAdd>(
2369 base_index,
2370 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2371 map_index->ClearFlag(HValue::kCanOverflow);
2372 HValue* key_index = AddUncasted<HAdd>(
2373 base_index,
2374 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2375 key_index->ClearFlag(HValue::kCanOverflow);
2376 HValue* map_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002377 Add<HLoadKeyed>(cache_keys, map_index, nullptr, nullptr,
2378 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2380 lookup_if->And();
2381 HValue* key_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002382 Add<HLoadKeyed>(cache_keys, key_index, nullptr, nullptr,
2383 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002384 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2385 lookup_if->Then();
2386 {
2387 ExternalReference cache_field_offsets_ref =
2388 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2389 HValue* cache_field_offsets =
2390 Add<HConstant>(cache_field_offsets_ref);
2391 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2392 index->ClearFlag(HValue::kCanOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002393 HValue* property_index =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002394 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, cache_keys,
2395 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002396 Push(property_index);
2397 }
2398 lookup_if->Else();
2399 }
2400 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2401 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2402 }
2403 }
2404
2405 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2406 inline_or_runtime.Then();
2407 {
2408 // Found a cached index, load property inline.
2409 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2410 }
2411 inline_or_runtime.Else();
2412 {
2413 // KeyedLookupCache miss; call runtime.
2414 Add<HPushArguments>(receiver, key);
2415 Push(Add<HCallRuntime>(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002416 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002417 }
2418 inline_or_runtime.End();
2419 }
2420 if_dict_properties.End();
2421 }
2422 index_name_split.End();
2423
2424 return Pop();
2425}
2426
2427
2428Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2429 return DoGenerateCode(this);
2430}
2431
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002432} // namespace internal
2433} // namespace v8