blob: 650e5389a5a5a464a1ca2a1b39ef10e57de9267b [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/code-stubs.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006
7#include "src/bailout-reason.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/crankshaft/hydrogen.h"
9#include "src/crankshaft/lithium.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/field-index.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/ic/ic.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15
16
17static LChunk* OptimizeGraph(HGraph* graph) {
18 DisallowHeapAllocation no_allocation;
19 DisallowHandleAllocation no_handles;
20 DisallowHandleDereference no_deref;
21
22 DCHECK(graph != NULL);
23 BailoutReason bailout_reason = kNoReason;
24 if (!graph->Optimize(&bailout_reason)) {
25 FATAL(GetBailoutReason(bailout_reason));
26 }
27 LChunk* chunk = LChunk::NewChunk(graph);
28 if (chunk == NULL) {
29 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
30 }
31 return chunk;
32}
33
34
35class CodeStubGraphBuilderBase : public HGraphBuilder {
36 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +010037 explicit CodeStubGraphBuilderBase(CompilationInfo* info, CodeStub* code_stub)
38 : HGraphBuilder(info, code_stub->GetCallInterfaceDescriptor()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039 arguments_length_(NULL),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040040 info_(info),
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 code_stub_(code_stub),
42 descriptor_(code_stub),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 context_(NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 int parameter_count = GetParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 parameters_.Reset(new HParameter*[parameter_count]);
46 }
47 virtual bool BuildGraph();
48
49 protected:
50 virtual HValue* BuildCodeStub() = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 int GetParameterCount() const { return descriptor_.GetParameterCount(); }
52 int GetRegisterParameterCount() const {
53 return descriptor_.GetRegisterParameterCount();
54 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 HParameter* GetParameter(int parameter) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000056 DCHECK(parameter < GetParameterCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 return parameters_[parameter];
58 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 Representation GetParameterRepresentation(int parameter) {
60 return RepresentationFromType(descriptor_.GetParameterType(parameter));
61 }
62 bool IsParameterCountRegister(int index) const {
63 return descriptor_.GetRegisterParameter(index)
64 .is(descriptor_.stack_parameter_count());
65 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 HValue* GetArgumentsLength() {
67 // This is initialized in BuildGraph()
68 DCHECK(arguments_length_ != NULL);
69 return arguments_length_;
70 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 CompilationInfo* info() { return info_; }
Ben Murdoch097c5b22016-05-18 11:27:45 +010072 CodeStub* stub() { return code_stub_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 HContext* context() { return context_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 Isolate* isolate() { return info_->isolate(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078 Representation representation,
79 bool transition_to_field);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080
Ben Murdochda12d292016-06-02 14:46:10 +010081 HValue* BuildPushElement(HValue* object, HValue* argc,
82 HValue* argument_elements, ElementsKind kind);
83
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000084 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
85 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
86 HValue* value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000088 HValue* BuildToString(HValue* input, bool convert);
89 HValue* BuildToPrimitive(HValue* input, HValue* input_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040090
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000092 base::SmartArrayPointer<HParameter*> parameters_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093 HValue* arguments_length_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 CompilationInfo* info_;
Ben Murdoch097c5b22016-05-18 11:27:45 +010095 CodeStub* code_stub_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 CodeStubDescriptor descriptor_;
97 HContext* context_;
98};
99
100
101bool CodeStubGraphBuilderBase::BuildGraph() {
102 // Update the static counter each time a new code stub is generated.
103 isolate()->counters()->code_stubs()->Increment();
104
105 if (FLAG_trace_hydrogen_stubs) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000106 const char* name = CodeStub::MajorName(stub()->MajorKey());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 PrintF("-----------------------------------------------------------\n");
108 PrintF("Compiling stub %s using hydrogen\n", name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400109 isolate()->GetHTracer()->TraceCompilation(info());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000110 }
111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000112 int param_count = GetParameterCount();
113 int register_param_count = GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000114 HEnvironment* start_environment = graph()->start_environment();
115 HBasicBlock* next_block = CreateBasicBlock(start_environment);
116 Goto(next_block);
117 next_block->SetJoinId(BailoutId::StubEntry());
118 set_current_block(next_block);
119
120 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
121 HInstruction* stack_parameter_count = NULL;
122 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 Representation r = GetParameterRepresentation(i);
124 HParameter* param;
125 if (i >= register_param_count) {
126 param = Add<HParameter>(i - register_param_count,
127 HParameter::STACK_PARAMETER, r);
128 } else {
129 param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
130 start_environment->Bind(i, param);
131 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132 parameters_[i] = param;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000133 if (i < register_param_count && IsParameterCountRegister(i)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000134 param->set_type(HType::Smi());
135 stack_parameter_count = param;
136 arguments_length_ = stack_parameter_count;
137 }
138 }
139
140 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
141 if (!runtime_stack_params) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 stack_parameter_count =
143 Add<HConstant>(param_count - register_param_count - 1);
144 // graph()->GetConstantMinus1();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000145 arguments_length_ = graph()->GetConstant0();
146 }
147
148 context_ = Add<HContext>();
149 start_environment->BindContext(context_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100150 start_environment->Bind(param_count, context_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151
152 Add<HSimulate>(BailoutId::StubEntry());
153
154 NoObservableSideEffectsScope no_effects(this);
155
156 HValue* return_value = BuildCodeStub();
157
158 // We might have extra expressions to pop from the stack in addition to the
159 // arguments above.
160 HInstruction* stack_pop_count = stack_parameter_count;
161 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
162 if (!stack_parameter_count->IsConstant() &&
163 descriptor_.hint_stack_parameter_count() < 0) {
164 HInstruction* constant_one = graph()->GetConstant1();
165 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
166 stack_pop_count->ClearFlag(HValue::kCanOverflow);
167 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
168 // smi.
169 } else {
170 int count = descriptor_.hint_stack_parameter_count();
171 stack_pop_count = Add<HConstant>(count);
172 }
173 }
174
175 if (current_block() != NULL) {
176 HReturn* hreturn_instruction = New<HReturn>(return_value,
177 stack_pop_count);
178 FinishCurrentBlock(hreturn_instruction);
179 }
180 return true;
181}
182
183
184template <class Stub>
185class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
186 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100187 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
188 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189
190 protected:
191 virtual HValue* BuildCodeStub() {
192 if (casted_stub()->IsUninitialized()) {
193 return BuildCodeUninitializedStub();
194 } else {
195 return BuildCodeInitializedStub();
196 }
197 }
198
199 virtual HValue* BuildCodeInitializedStub() {
200 UNIMPLEMENTED();
201 return NULL;
202 }
203
204 virtual HValue* BuildCodeUninitializedStub() {
205 // Force a deopt that falls back to the runtime.
206 HValue* undefined = graph()->GetConstantUndefined();
207 IfBuilder builder(this);
208 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
209 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000210 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 return undefined;
212 }
213
214 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
215};
216
217
218Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
219 ExternalReference miss) {
220 Factory* factory = isolate()->factory();
221
222 // Generate the new code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000223 MacroAssembler masm(isolate(), NULL, 256, CodeObjectRequired::kYes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224
225 {
226 // Update the static counter each time a new code stub is generated.
227 isolate()->counters()->code_stubs()->Increment();
228
229 // Generate the code for the stub.
230 masm.set_generating_stub(true);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400231 // TODO(yangguo): remove this once we can serialize IC stubs.
232 masm.enable_serializer();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000233 NoCurrentFrameScope scope(&masm);
234 GenerateLightweightMiss(&masm, miss);
235 }
236
237 // Create the code object.
238 CodeDesc desc;
239 masm.GetCode(&desc);
240
241 // Copy the generated code into a heap object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 Handle<Code> new_object = factory->NewCode(
Ben Murdoch097c5b22016-05-18 11:27:45 +0100243 desc, GetCodeFlags(), masm.CodeObject(), NeedsImmovableCode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 return new_object;
245}
246
247
248template <class Stub>
249static Handle<Code> DoGenerateCode(Stub* stub) {
250 Isolate* isolate = stub->isolate();
251 CodeStubDescriptor descriptor(stub);
252
253 // If we are uninitialized we can use a light-weight stub to enter
254 // the runtime that is significantly faster than using the standard
255 // stub-failure deopt mechanism.
256 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
257 DCHECK(!descriptor.stack_parameter_count().is_valid());
258 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
259 }
260 base::ElapsedTimer timer;
261 if (FLAG_profile_hydrogen_code_stub_compilation) {
262 timer.Start();
263 }
Ben Murdochda12d292016-06-02 14:46:10 +0100264 Zone zone(isolate->allocator());
Ben Murdochc5610432016-08-08 18:44:38 +0100265 CompilationInfo info(CStrVector(CodeStub::MajorName(stub->MajorKey())),
266 isolate, &zone, stub->GetCodeFlags());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100267 // Parameter count is number of stack parameters.
268 int parameter_count = descriptor.GetStackParameterCount();
269 if (descriptor.function_mode() == NOT_JS_FUNCTION_STUB_MODE) {
270 parameter_count--;
271 }
272 info.set_parameter_count(parameter_count);
273 CodeStubGraphBuilder<Stub> builder(&info, stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 Handle<Code> code = chunk->Codegen();
276 if (FLAG_profile_hydrogen_code_stub_compilation) {
277 OFStream os(stdout);
278 os << "[Lazy compilation of " << stub << " took "
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400279 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 }
281 return code;
282}
283
284
285template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000286HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
287 info()->MarkAsSavesCallerDoubles();
288 HValue* number = GetParameter(NumberToStringStub::kNumber);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100289 return BuildNumberToString(number, Type::Number());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290}
291
292
293Handle<Code> NumberToStringStub::GenerateCode() {
294 return DoGenerateCode(this);
295}
296
297
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000298// Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000299template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 Factory* factory = isolate()->factory();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302 HConstant* number_string = Add<HConstant>(factory->number_string());
303 HValue* object = GetParameter(TypeofStub::kObject);
304
305 IfBuilder is_smi(this);
306 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
307 is_smi.Then();
308 { Push(number_string); }
309 is_smi.Else();
310 {
311 IfBuilder is_number(this);
312 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
313 is_number.Then();
314 { Push(number_string); }
315 is_number.Else();
316 {
317 HValue* map = AddLoadMap(object, smi_check);
318 HValue* instance_type = Add<HLoadNamedField>(
319 map, nullptr, HObjectAccess::ForMapInstanceType());
320 IfBuilder is_string(this);
321 is_string.If<HCompareNumericAndBranch>(
322 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
323 is_string.Then();
324 { Push(Add<HConstant>(factory->string_string())); }
325 is_string.Else();
326 {
327 HConstant* object_string = Add<HConstant>(factory->object_string());
328 IfBuilder is_oddball(this);
329 is_oddball.If<HCompareNumericAndBranch>(
330 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
331 is_oddball.Then();
332 {
333 Push(Add<HLoadNamedField>(object, nullptr,
334 HObjectAccess::ForOddballTypeOf()));
335 }
336 is_oddball.Else();
337 {
338 IfBuilder is_symbol(this);
339 is_symbol.If<HCompareNumericAndBranch>(
340 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
341 is_symbol.Then();
342 { Push(Add<HConstant>(factory->symbol_string())); }
343 is_symbol.Else();
344 {
345 HValue* bit_field = Add<HLoadNamedField>(
346 map, nullptr, HObjectAccess::ForMapBitField());
347 HValue* bit_field_masked = AddUncasted<HBitwise>(
348 Token::BIT_AND, bit_field,
349 Add<HConstant>((1 << Map::kIsCallable) |
350 (1 << Map::kIsUndetectable)));
351 IfBuilder is_function(this);
352 is_function.If<HCompareNumericAndBranch>(
353 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable),
354 Token::EQ);
355 is_function.Then();
356 { Push(Add<HConstant>(factory->function_string())); }
357 is_function.Else();
358 {
359#define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \
360 IfBuilder is_##type(this); \
361 is_##type.If<HCompareObjectEqAndBranch>( \
362 map, Add<HConstant>(factory->type##_map())); \
363 is_##type.Then(); \
364 { Push(Add<HConstant>(factory->type##_string())); } \
365 is_##type.Else(); {
366 SIMD128_TYPES(SIMD128_BUILDER_OPEN)
367#undef SIMD128_BUILDER_OPEN
368 // Is it an undetectable object?
369 IfBuilder is_undetectable(this);
370 is_undetectable.If<HCompareNumericAndBranch>(
371 bit_field_masked, graph()->GetConstant0(), Token::NE);
372 is_undetectable.Then();
373 {
374 // typeof an undetectable object is 'undefined'.
375 Push(Add<HConstant>(factory->undefined_string()));
376 }
377 is_undetectable.Else();
378 {
379 // For any kind of object not handled above, the spec rule for
380 // host objects gives that it is okay to return "object".
381 Push(object_string);
382 }
383#define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) }
384 SIMD128_TYPES(SIMD128_BUILDER_CLOSE)
385#undef SIMD128_BUILDER_CLOSE
386 }
387 is_function.End();
388 }
389 is_symbol.End();
390 }
391 is_oddball.End();
392 }
393 is_string.End();
394 }
395 is_number.End();
396 }
397 is_smi.End();
398
399 return environment()->Pop();
400}
401
402
403Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
404
405
406template <>
407HValue* CodeStubGraphBuilder<FastCloneRegExpStub>::BuildCodeStub() {
408 HValue* closure = GetParameter(0);
409 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000410
411 // This stub is very performance sensitive, the generated code must be tuned
412 // so that it doesn't build and eager frame.
413 info()->MarkMustNotHaveEagerFrame();
414
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000415 HValue* literals_array = Add<HLoadNamedField>(
416 closure, nullptr, HObjectAccess::ForLiteralsPointer());
417 HInstruction* boilerplate = Add<HLoadKeyed>(
418 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
419 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
420
421 IfBuilder if_notundefined(this);
422 if_notundefined.IfNot<HCompareObjectEqAndBranch>(
423 boilerplate, graph()->GetConstantUndefined());
424 if_notundefined.Then();
425 {
426 int result_size =
427 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
428 HValue* result =
429 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(),
Ben Murdochc5610432016-08-08 18:44:38 +0100430 NOT_TENURED, JS_REGEXP_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431 Add<HStoreNamedField>(
432 result, HObjectAccess::ForMap(),
433 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap()));
434 Add<HStoreNamedField>(
435 result, HObjectAccess::ForPropertiesPointer(),
436 Add<HLoadNamedField>(boilerplate, nullptr,
437 HObjectAccess::ForPropertiesPointer()));
438 Add<HStoreNamedField>(
439 result, HObjectAccess::ForElementsPointer(),
440 Add<HLoadNamedField>(boilerplate, nullptr,
441 HObjectAccess::ForElementsPointer()));
442 for (int offset = JSObject::kHeaderSize; offset < result_size;
443 offset += kPointerSize) {
444 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset);
445 Add<HStoreNamedField>(result, access,
446 Add<HLoadNamedField>(boilerplate, nullptr, access));
447 }
448 Push(result);
449 }
450 if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
451 if_notundefined.End();
452
453 return Pop();
454}
455
456
457Handle<Code> FastCloneRegExpStub::GenerateCode() {
458 return DoGenerateCode(this);
459}
460
461
462template <>
463HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
464 Factory* factory = isolate()->factory();
465 HValue* undefined = graph()->GetConstantUndefined();
466 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
467 HValue* closure = GetParameter(0);
468 HValue* literal_index = GetParameter(1);
469
Ben Murdoch61f157c2016-09-16 13:49:30 +0100470 // TODO(turbofan): This codestub has regressed to need a frame on ia32 at some
471 // point and wasn't caught since it wasn't built in the snapshot. We should
472 // probably just replace with a TurboFan stub rather than fixing it.
473#if !(V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000474 // This stub is very performance sensitive, the generated code must be tuned
475 // so that it doesn't build and eager frame.
476 info()->MarkMustNotHaveEagerFrame();
Ben Murdoch61f157c2016-09-16 13:49:30 +0100477#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478
479 HValue* literals_array = Add<HLoadNamedField>(
480 closure, nullptr, HObjectAccess::ForLiteralsPointer());
481
482 HInstruction* allocation_site = Add<HLoadKeyed>(
483 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
484 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000485 IfBuilder checker(this);
486 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
487 undefined);
488 checker.Then();
489
490 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
491 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400492 HInstruction* boilerplate =
493 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494 HValue* elements = AddLoadElements(boilerplate);
495 HValue* capacity = AddLoadFixedArrayLength(elements);
496 IfBuilder zero_capacity(this);
497 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
498 Token::EQ);
499 zero_capacity.Then();
500 Push(BuildCloneShallowArrayEmpty(boilerplate,
501 allocation_site,
502 alloc_site_mode));
503 zero_capacity.Else();
504 IfBuilder if_fixed_cow(this);
505 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
506 if_fixed_cow.Then();
507 Push(BuildCloneShallowArrayCow(boilerplate,
508 allocation_site,
509 alloc_site_mode,
510 FAST_ELEMENTS));
511 if_fixed_cow.Else();
512 IfBuilder if_fixed(this);
513 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
514 if_fixed.Then();
515 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
516 allocation_site,
517 alloc_site_mode,
518 FAST_ELEMENTS));
519
520 if_fixed.Else();
521 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
522 allocation_site,
523 alloc_site_mode,
524 FAST_DOUBLE_ELEMENTS));
525 if_fixed.End();
526 if_fixed_cow.End();
527 zero_capacity.End();
528
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000529 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530 checker.End();
531
532 return environment()->Pop();
533}
534
535
536Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
537 return DoGenerateCode(this);
538}
539
540
541template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 // This stub is performance sensitive, the generated code must be tuned
544 // so that it doesn't build an eager frame.
545 info()->MarkMustNotHaveEagerFrame();
546
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547 HValue* size = Add<HConstant>(AllocationSite::kSize);
Ben Murdochc5610432016-08-08 18:44:38 +0100548 HInstruction* object =
549 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE,
550 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000551
552 // Store the map
553 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
554 AddStoreMapConstant(object, allocation_site_map);
555
556 // Store the payload (smi elements kind)
557 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
558 Add<HStoreNamedField>(object,
559 HObjectAccess::ForAllocationSiteOffset(
560 AllocationSite::kTransitionInfoOffset),
561 initial_elements_kind);
562
563 // Unlike literals, constructed arrays don't have nested sites
564 Add<HStoreNamedField>(object,
565 HObjectAccess::ForAllocationSiteOffset(
566 AllocationSite::kNestedSiteOffset),
567 graph()->GetConstant0());
568
569 // Pretenuring calculation field.
570 Add<HStoreNamedField>(object,
571 HObjectAccess::ForAllocationSiteOffset(
572 AllocationSite::kPretenureDataOffset),
573 graph()->GetConstant0());
574
575 // Pretenuring memento creation count field.
576 Add<HStoreNamedField>(object,
577 HObjectAccess::ForAllocationSiteOffset(
578 AllocationSite::kPretenureCreateCountOffset),
579 graph()->GetConstant0());
580
581 // Store an empty fixed array for the code dependency.
582 HConstant* empty_fixed_array =
583 Add<HConstant>(isolate()->factory()->empty_fixed_array());
584 Add<HStoreNamedField>(
585 object,
586 HObjectAccess::ForAllocationSiteOffset(
587 AllocationSite::kDependentCodeOffset),
588 empty_fixed_array);
589
590 // Link the object to the allocation site list
591 HValue* site_list = Add<HConstant>(
592 ExternalReference::allocation_sites_list_address(isolate()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400593 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
594 HObjectAccess::ForAllocationSiteList());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
596 // mark as such in order to skip the write barrier, once we have a unified
597 // system for weakness. For now we decided to keep it like this because having
598 // an initial write barrier backed store makes this pointer strong until the
599 // next GC, and allocation sites are designed to survive several GCs anyway.
600 Add<HStoreNamedField>(
601 object,
602 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
603 site);
604 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
605 object);
606
607 HInstruction* feedback_vector = GetParameter(0);
608 HInstruction* slot = GetParameter(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000609 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000610 INITIALIZING_STORE);
611 return feedback_vector;
612}
613
614
615Handle<Code> CreateAllocationSiteStub::GenerateCode() {
616 return DoGenerateCode(this);
617}
618
619
620template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000621HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
622 // This stub is performance sensitive, the generated code must be tuned
623 // so that it doesn't build an eager frame.
624 info()->MarkMustNotHaveEagerFrame();
625
626 HValue* size = Add<HConstant>(WeakCell::kSize);
627 HInstruction* object =
Ben Murdochc5610432016-08-08 18:44:38 +0100628 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE,
629 graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630
631 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
632 AddStoreMapConstant(object, weak_cell_map);
633
634 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
635 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
636 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
637 graph()->GetConstantHole());
638
639 HInstruction* feedback_vector =
640 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
641 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
642 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
643 INITIALIZING_STORE);
644 return graph()->GetConstant0();
645}
646
647
648Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
649
650
651template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400652HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
653 int context_index = casted_stub()->context_index();
654 int slot_index = casted_stub()->slot_index();
655
656 HValue* script_context = BuildGetScriptContext(context_index);
657 return Add<HLoadNamedField>(script_context, nullptr,
658 HObjectAccess::ForContextSlot(slot_index));
659}
660
661
662Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
663 return DoGenerateCode(this);
664}
665
666
667template <>
668HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
669 int context_index = casted_stub()->context_index();
670 int slot_index = casted_stub()->slot_index();
671
672 HValue* script_context = BuildGetScriptContext(context_index);
673 Add<HStoreNamedField>(script_context,
674 HObjectAccess::ForContextSlot(slot_index),
675 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
676 return GetParameter(2);
677}
678
679
680Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
681 return DoGenerateCode(this);
682}
683
Ben Murdochda12d292016-06-02 14:46:10 +0100684HValue* CodeStubGraphBuilderBase::BuildPushElement(HValue* object, HValue* argc,
685 HValue* argument_elements,
686 ElementsKind kind) {
687 // Precheck whether all elements fit into the array.
688 if (!IsFastObjectElementsKind(kind)) {
689 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
690 HValue* start = graph()->GetConstant0();
691 HValue* key = builder.BeginBody(start, argc, Token::LT);
692 {
693 HInstruction* argument =
694 Add<HAccessArgumentsAt>(argument_elements, argc, key);
695 IfBuilder can_store(this);
696 can_store.IfNot<HIsSmiAndBranch>(argument);
697 if (IsFastDoubleElementsKind(kind)) {
698 can_store.And();
699 can_store.IfNot<HCompareMap>(argument,
700 isolate()->factory()->heap_number_map());
701 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100702 can_store.ThenDeopt(Deoptimizer::kFastPathFailed);
Ben Murdochda12d292016-06-02 14:46:10 +0100703 can_store.End();
704 }
705 builder.EndBody();
706 }
707
708 HValue* length = Add<HLoadNamedField>(object, nullptr,
709 HObjectAccess::ForArrayLength(kind));
710 HValue* new_length = AddUncasted<HAdd>(length, argc);
711 HValue* max_key = AddUncasted<HSub>(new_length, graph()->GetConstant1());
712
713 HValue* elements = Add<HLoadNamedField>(object, nullptr,
714 HObjectAccess::ForElementsPointer());
715 elements = BuildCheckForCapacityGrow(object, elements, kind, length, max_key,
716 true, STORE);
717
718 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
719 HValue* start = graph()->GetConstant0();
720 HValue* key = builder.BeginBody(start, argc, Token::LT);
721 {
722 HValue* argument = Add<HAccessArgumentsAt>(argument_elements, argc, key);
723 HValue* index = AddUncasted<HAdd>(key, length);
724 AddElementAccess(elements, index, argument, object, nullptr, kind, STORE);
725 }
726 builder.EndBody();
727 return new_length;
728}
729
730template <>
731HValue* CodeStubGraphBuilder<FastArrayPushStub>::BuildCodeStub() {
732 // TODO(verwaest): Fix deoptimizer messages.
733 HValue* argc = GetArgumentsLength();
734 HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
735 HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
736 graph()->GetConstantMinus1());
737 BuildCheckHeapObject(object);
738 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
739 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_JS_ARRAY);
740
741 // Disallow pushing onto prototypes. It might be the JSArray prototype.
742 // Disallow pushing onto non-extensible objects.
743 {
744 HValue* bit_field2 =
745 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
746 HValue* mask =
747 Add<HConstant>(static_cast<int>(Map::IsPrototypeMapBits::kMask) |
748 (1 << Map::kIsExtensible));
749 HValue* bits = AddUncasted<HBitwise>(Token::BIT_AND, bit_field2, mask);
750 IfBuilder check(this);
751 check.If<HCompareNumericAndBranch>(
752 bits, Add<HConstant>(1 << Map::kIsExtensible), Token::NE);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100753 check.ThenDeopt(Deoptimizer::kFastPathFailed);
Ben Murdochda12d292016-06-02 14:46:10 +0100754 check.End();
755 }
756
Ben Murdochda12d292016-06-02 14:46:10 +0100757 // Disallow pushing onto arrays in dictionary named property mode. We need to
758 // figure out whether the length property is still writable.
759 {
760 HValue* bit_field3 =
761 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
762 HValue* mask = Add<HConstant>(static_cast<int>(Map::DictionaryMap::kMask));
763 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, bit_field3, mask);
764 IfBuilder check(this);
765 check.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100766 check.ThenDeopt(Deoptimizer::kFastPathFailed);
Ben Murdochda12d292016-06-02 14:46:10 +0100767 check.End();
768 }
769
770 // Check whether the length property is writable. The length property is the
771 // only default named property on arrays. It's nonconfigurable, hence is
772 // guaranteed to stay the first property.
773 {
774 HValue* descriptors =
775 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapDescriptors());
776 HValue* details = Add<HLoadKeyed>(
777 descriptors, Add<HConstant>(DescriptorArray::ToDetailsIndex(0)),
778 nullptr, nullptr, FAST_SMI_ELEMENTS);
779 HValue* mask =
780 Add<HConstant>(READ_ONLY << PropertyDetails::AttributesField::kShift);
781 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, details, mask);
782 IfBuilder readonly(this);
783 readonly.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100784 readonly.ThenDeopt(Deoptimizer::kFastPathFailed);
Ben Murdochda12d292016-06-02 14:46:10 +0100785 readonly.End();
786 }
787
788 HValue* null = Add<HLoadRoot>(Heap::kNullValueRootIndex);
789 HValue* empty = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
790 environment()->Push(map);
791 LoopBuilder check_prototypes(this);
792 check_prototypes.BeginBody(1);
793 {
794 HValue* parent_map = environment()->Pop();
795 HValue* prototype = Add<HLoadNamedField>(parent_map, nullptr,
796 HObjectAccess::ForPrototype());
797
798 IfBuilder is_null(this);
799 is_null.If<HCompareObjectEqAndBranch>(prototype, null);
800 is_null.Then();
801 check_prototypes.Break();
802 is_null.End();
803
804 HValue* prototype_map =
805 Add<HLoadNamedField>(prototype, nullptr, HObjectAccess::ForMap());
806 HValue* instance_type = Add<HLoadNamedField>(
807 prototype_map, nullptr, HObjectAccess::ForMapInstanceType());
808 IfBuilder check_instance_type(this);
809 check_instance_type.If<HCompareNumericAndBranch>(
810 instance_type, Add<HConstant>(LAST_CUSTOM_ELEMENTS_RECEIVER),
811 Token::LTE);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100812 check_instance_type.ThenDeopt(Deoptimizer::kFastPathFailed);
Ben Murdochda12d292016-06-02 14:46:10 +0100813 check_instance_type.End();
814
815 HValue* elements = Add<HLoadNamedField>(
816 prototype, nullptr, HObjectAccess::ForElementsPointer());
817 IfBuilder no_elements(this);
818 no_elements.IfNot<HCompareObjectEqAndBranch>(elements, empty);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100819 no_elements.ThenDeopt(Deoptimizer::kFastPathFailed);
Ben Murdochda12d292016-06-02 14:46:10 +0100820 no_elements.End();
821
822 environment()->Push(prototype_map);
823 }
824 check_prototypes.EndBody();
825
826 HValue* bit_field2 =
827 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
828 HValue* kind = BuildDecodeField<Map::ElementsKindBits>(bit_field2);
829
830 // Below we only check the upper bound of the relevant ranges to include both
831 // holey and non-holey versions. We check them in order smi, object, double
832 // since smi < object < double.
833 STATIC_ASSERT(FAST_SMI_ELEMENTS < FAST_HOLEY_SMI_ELEMENTS);
834 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS < FAST_HOLEY_ELEMENTS);
835 STATIC_ASSERT(FAST_ELEMENTS < FAST_HOLEY_ELEMENTS);
836 STATIC_ASSERT(FAST_HOLEY_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
837 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
838 IfBuilder has_smi_elements(this);
839 has_smi_elements.If<HCompareNumericAndBranch>(
840 kind, Add<HConstant>(FAST_HOLEY_SMI_ELEMENTS), Token::LTE);
841 has_smi_elements.Then();
842 {
843 HValue* new_length = BuildPushElement(object, argc, argument_elements,
844 FAST_HOLEY_SMI_ELEMENTS);
845 environment()->Push(new_length);
846 }
847 has_smi_elements.Else();
848 {
849 IfBuilder has_object_elements(this);
850 has_object_elements.If<HCompareNumericAndBranch>(
851 kind, Add<HConstant>(FAST_HOLEY_ELEMENTS), Token::LTE);
852 has_object_elements.Then();
853 {
854 HValue* new_length = BuildPushElement(object, argc, argument_elements,
855 FAST_HOLEY_ELEMENTS);
856 environment()->Push(new_length);
857 }
858 has_object_elements.Else();
859 {
860 IfBuilder has_double_elements(this);
861 has_double_elements.If<HCompareNumericAndBranch>(
862 kind, Add<HConstant>(FAST_HOLEY_DOUBLE_ELEMENTS), Token::LTE);
863 has_double_elements.Then();
864 {
865 HValue* new_length = BuildPushElement(object, argc, argument_elements,
866 FAST_HOLEY_DOUBLE_ELEMENTS);
867 environment()->Push(new_length);
868 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100869 has_double_elements.ElseDeopt(Deoptimizer::kFastPathFailed);
Ben Murdochda12d292016-06-02 14:46:10 +0100870 has_double_elements.End();
871 }
872 has_object_elements.End();
873 }
874 has_smi_elements.End();
875
876 return environment()->Pop();
877}
878
879Handle<Code> FastArrayPushStub::GenerateCode() { return DoGenerateCode(this); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400880
881template <>
Ben Murdoch61f157c2016-09-16 13:49:30 +0100882HValue* CodeStubGraphBuilder<FastFunctionBindStub>::BuildCodeStub() {
883 // TODO(verwaest): Fix deoptimizer messages.
884 HValue* argc = GetArgumentsLength();
885 HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
886 HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
887 graph()->GetConstantMinus1());
888 BuildCheckHeapObject(object);
889 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
890 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_JS_FUNCTION);
891
892 // Disallow binding of slow-mode functions. We need to figure out whether the
893 // length and name property are in the original state.
894 {
895 HValue* bit_field3 =
896 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
897 HValue* mask = Add<HConstant>(static_cast<int>(Map::DictionaryMap::kMask));
898 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, bit_field3, mask);
899 IfBuilder check(this);
900 check.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
901 check.ThenDeopt(Deoptimizer::kFastPathFailed);
902 check.End();
903 }
904
905 // Check whether the length and name properties are still present as
906 // AccessorInfo objects. In that case, their value can be recomputed even if
907 // the actual value on the object changes.
908 {
909 HValue* descriptors =
910 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapDescriptors());
911
912 HValue* descriptors_length = Add<HLoadNamedField>(
913 descriptors, nullptr, HObjectAccess::ForFixedArrayLength());
914 IfBuilder range(this);
915 range.If<HCompareNumericAndBranch>(descriptors_length,
916 graph()->GetConstant1(), Token::LTE);
917 range.ThenDeopt(Deoptimizer::kFastPathFailed);
918 range.End();
919
920 // Verify .length.
921 const int length_index = JSFunction::kLengthDescriptorIndex;
922 HValue* maybe_length = Add<HLoadKeyed>(
923 descriptors, Add<HConstant>(DescriptorArray::ToKeyIndex(length_index)),
924 nullptr, nullptr, FAST_ELEMENTS);
925 Unique<Name> length_string = Unique<Name>::CreateUninitialized(
926 isolate()->factory()->length_string());
927 Add<HCheckValue>(maybe_length, length_string, false);
928
929 HValue* maybe_length_accessor = Add<HLoadKeyed>(
930 descriptors,
931 Add<HConstant>(DescriptorArray::ToValueIndex(length_index)), nullptr,
932 nullptr, FAST_ELEMENTS);
933 BuildCheckHeapObject(maybe_length_accessor);
934 Add<HCheckMaps>(maybe_length_accessor,
935 isolate()->factory()->accessor_info_map());
936
937 // Verify .name.
938 const int name_index = JSFunction::kNameDescriptorIndex;
939 HValue* maybe_name = Add<HLoadKeyed>(
940 descriptors, Add<HConstant>(DescriptorArray::ToKeyIndex(name_index)),
941 nullptr, nullptr, FAST_ELEMENTS);
942 Unique<Name> name_string =
943 Unique<Name>::CreateUninitialized(isolate()->factory()->name_string());
944 Add<HCheckValue>(maybe_name, name_string, false);
945
946 HValue* maybe_name_accessor = Add<HLoadKeyed>(
947 descriptors, Add<HConstant>(DescriptorArray::ToValueIndex(name_index)),
948 nullptr, nullptr, FAST_ELEMENTS);
949 BuildCheckHeapObject(maybe_name_accessor);
950 Add<HCheckMaps>(maybe_name_accessor,
951 isolate()->factory()->accessor_info_map());
952 }
953
954 // Choose the right bound function map based on whether the target is
955 // constructable.
956 {
957 HValue* bit_field =
958 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField());
959 HValue* mask = Add<HConstant>(static_cast<int>(1 << Map::kIsConstructor));
960 HValue* bits = AddUncasted<HBitwise>(Token::BIT_AND, bit_field, mask);
961
962 HValue* native_context = BuildGetNativeContext();
963 IfBuilder is_constructor(this);
964 is_constructor.If<HCompareNumericAndBranch>(bits, mask, Token::EQ);
965 is_constructor.Then();
966 {
967 HValue* map = Add<HLoadNamedField>(
968 native_context, nullptr,
969 HObjectAccess::ForContextSlot(
970 Context::BOUND_FUNCTION_WITH_CONSTRUCTOR_MAP_INDEX));
971 environment()->Push(map);
972 }
973 is_constructor.Else();
974 {
975 HValue* map = Add<HLoadNamedField>(
976 native_context, nullptr,
977 HObjectAccess::ForContextSlot(
978 Context::BOUND_FUNCTION_WITHOUT_CONSTRUCTOR_MAP_INDEX));
979 environment()->Push(map);
980 }
981 is_constructor.End();
982 }
983 HValue* bound_function_map = environment()->Pop();
984
985 // Verify that __proto__ matches that of a the target bound function.
986 {
987 HValue* prototype =
988 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForPrototype());
989 HValue* expected_prototype = Add<HLoadNamedField>(
990 bound_function_map, nullptr, HObjectAccess::ForPrototype());
991 IfBuilder equal_prototype(this);
992 equal_prototype.IfNot<HCompareObjectEqAndBranch>(prototype,
993 expected_prototype);
994 equal_prototype.ThenDeopt(Deoptimizer::kFastPathFailed);
995 equal_prototype.End();
996 }
997
998 // Allocate the arguments array.
999 IfBuilder empty_args(this);
1000 empty_args.If<HCompareNumericAndBranch>(argc, graph()->GetConstant1(),
1001 Token::LTE);
1002 empty_args.Then();
1003 { environment()->Push(Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex)); }
1004 empty_args.Else();
1005 {
1006 HValue* elements_length = AddUncasted<HSub>(argc, graph()->GetConstant1());
1007 HValue* elements =
1008 BuildAllocateAndInitializeArray(FAST_ELEMENTS, elements_length);
1009
1010 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
1011 HValue* start = graph()->GetConstant1();
1012 HValue* key = builder.BeginBody(start, argc, Token::LT);
1013 {
1014 HValue* argument = Add<HAccessArgumentsAt>(argument_elements, argc, key);
1015 HValue* index = AddUncasted<HSub>(key, graph()->GetConstant1());
1016 AddElementAccess(elements, index, argument, elements, nullptr,
1017 FAST_ELEMENTS, STORE);
1018 }
1019 builder.EndBody();
1020 environment()->Push(elements);
1021 }
1022 empty_args.End();
1023 HValue* elements = environment()->Pop();
1024
1025 // Find the 'this' to bind.
1026 IfBuilder no_receiver(this);
1027 no_receiver.If<HCompareNumericAndBranch>(argc, graph()->GetConstant0(),
1028 Token::EQ);
1029 no_receiver.Then();
1030 { environment()->Push(Add<HLoadRoot>(Heap::kUndefinedValueRootIndex)); }
1031 no_receiver.Else();
1032 {
1033 environment()->Push(Add<HAccessArgumentsAt>(argument_elements, argc,
1034 graph()->GetConstant0()));
1035 }
1036 no_receiver.End();
1037 HValue* receiver = environment()->Pop();
1038
1039 // Allocate the resulting bound function.
1040 HValue* size = Add<HConstant>(JSBoundFunction::kSize);
1041 HValue* bound_function =
1042 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED,
1043 JS_BOUND_FUNCTION_TYPE, graph()->GetConstant0());
1044 Add<HStoreNamedField>(bound_function, HObjectAccess::ForMap(),
1045 bound_function_map);
1046 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
1047 Add<HStoreNamedField>(bound_function, HObjectAccess::ForPropertiesPointer(),
1048 empty_fixed_array);
1049 Add<HStoreNamedField>(bound_function, HObjectAccess::ForElementsPointer(),
1050 empty_fixed_array);
1051 Add<HStoreNamedField>(bound_function, HObjectAccess::ForBoundTargetFunction(),
1052 object);
1053
1054 Add<HStoreNamedField>(bound_function, HObjectAccess::ForBoundThis(),
1055 receiver);
1056 Add<HStoreNamedField>(bound_function, HObjectAccess::ForBoundArguments(),
1057 elements);
1058
1059 return bound_function;
1060}
1061
1062Handle<Code> FastFunctionBindStub::GenerateCode() {
1063 return DoGenerateCode(this);
1064}
1065
1066template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001067HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
1068 ElementsKind kind = casted_stub()->elements_kind();
1069 if (IsFastDoubleElementsKind(kind)) {
1070 info()->MarkAsSavesCallerDoubles();
1071 }
1072
1073 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
1074 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
1075
1076 HValue* elements = AddLoadElements(object);
1077 HValue* current_capacity = Add<HLoadNamedField>(
1078 elements, nullptr, HObjectAccess::ForFixedArrayLength());
1079
1080 HValue* length =
1081 casted_stub()->is_js_array()
1082 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
1083 HObjectAccess::ForArrayLength(kind))
1084 : current_capacity;
1085
1086 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
1087 current_capacity, key);
1088}
1089
1090
1091Handle<Code> GrowArrayElementsStub::GenerateCode() {
1092 return DoGenerateCode(this);
1093}
1094
1095
1096template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001097HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001098 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
1099 ? CONVERT_HOLE_TO_UNDEFINED
1100 : NEVER_RETURN_HOLE;
1101
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
1103 GetParameter(LoadDescriptor::kReceiverIndex),
1104 GetParameter(LoadDescriptor::kNameIndex), NULL,
1105 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001106 hole_mode, STANDARD_STORE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001107 return load;
1108}
1109
1110
1111Handle<Code> LoadFastElementStub::GenerateCode() {
1112 return DoGenerateCode(this);
1113}
1114
1115
1116HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
1117 HValue* object, FieldIndex index) {
1118 Representation representation = index.is_double()
1119 ? Representation::Double()
1120 : Representation::Tagged();
1121 int offset = index.offset();
1122 HObjectAccess access = index.is_inobject()
1123 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
1124 : HObjectAccess::ForBackingStoreOffset(offset, representation);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001125 if (index.is_double() &&
1126 (!FLAG_unbox_double_fields || !index.is_inobject())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127 // Load the heap number.
1128 object = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001129 object, nullptr, access.WithRepresentation(Representation::Tagged()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130 // Load the double value from it.
1131 access = HObjectAccess::ForHeapNumberValue();
1132 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001133 return Add<HLoadNamedField>(object, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001134}
1135
1136
1137template<>
1138HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
1139 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
1140}
1141
1142
1143Handle<Code> LoadFieldStub::GenerateCode() {
1144 return DoGenerateCode(this);
1145}
1146
1147
1148template <>
1149HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
1150 HValue* map = AddLoadMap(GetParameter(0), NULL);
1151 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
1152 Map::kDescriptorsOffset, Representation::Tagged());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001153 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
1155 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001156 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001157}
1158
1159
1160Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
1161
1162
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001163HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
1164 HValue* value) {
1165 HValue* result = NULL;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001166 HInstruction* backing_store =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001167 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, nullptr,
1168 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001169 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001170 HValue* backing_store_length = Add<HLoadNamedField>(
1171 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172 IfBuilder in_unmapped_range(this);
1173 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
1174 Token::LT);
1175 in_unmapped_range.Then();
1176 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001177 if (value == NULL) {
1178 result = Add<HLoadKeyed>(backing_store, key, nullptr, nullptr,
1179 FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE);
1180 } else {
1181 Add<HStoreKeyed>(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS);
1182 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001183 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001184 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001185 in_unmapped_range.End();
1186 return result;
1187}
1188
1189
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001190HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
1191 HValue* key,
1192 HValue* value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001193 // Mapped arguments are actual arguments. Unmapped arguments are values added
1194 // to the arguments object after it was created for the call. Mapped arguments
1195 // are stored in the context at indexes given by elements[key + 2]. Unmapped
1196 // arguments are stored as regular indexed properties in the arguments array,
1197 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
1198 // look at argument object construction.
1199 //
1200 // The sloppy arguments elements array has a special format:
1201 //
1202 // 0: context
1203 // 1: unmapped arguments array
1204 // 2: mapped_index0,
1205 // 3: mapped_index1,
1206 // ...
1207 //
1208 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
1209 // If key + 2 >= elements.length then attempt to look in the unmapped
1210 // arguments array (given by elements[1]) and return the value at key, missing
1211 // to the runtime if the unmapped arguments array is not a fixed array or if
1212 // key >= unmapped_arguments_array.length.
1213 //
1214 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
1215 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
1216 // index into the context array given at elements[0]. Return the value at
1217 // context[t].
1218
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001219 bool is_load = value == NULL;
1220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
1222 IfBuilder positive_smi(this);
1223 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
1224 Token::LT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001226 positive_smi.End();
1227
1228 HValue* constant_two = Add<HConstant>(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001229 HValue* elements = AddLoadElements(receiver, nullptr);
1230 HValue* elements_length = Add<HLoadNamedField>(
1231 elements, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
1233 IfBuilder in_range(this);
1234 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
1235 in_range.Then();
1236 {
1237 HValue* index = AddUncasted<HAdd>(key, constant_two);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001238 HInstruction* mapped_index =
1239 Add<HLoadKeyed>(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS,
1240 ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241
1242 IfBuilder is_valid(this);
1243 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
1244 graph()->GetConstantHole());
1245 is_valid.Then();
1246 {
1247 // TODO(mvstanton): I'd like to assert from this point, that if the
1248 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
1249 // smi check is being emitted.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001250 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251 nullptr, nullptr, FAST_ELEMENTS);
1252 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
1253 if (is_load) {
1254 HValue* result =
1255 Add<HLoadKeyed>(the_context, mapped_index, nullptr, nullptr,
1256 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1257 environment()->Push(result);
1258 } else {
1259 DCHECK(value != NULL);
1260 Add<HStoreKeyed>(the_context, mapped_index, value, nullptr,
1261 FAST_ELEMENTS);
1262 environment()->Push(value);
1263 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001264 }
1265 is_valid.Else();
1266 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001267 HValue* result = UnmappedCase(elements, key, value);
1268 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269 }
1270 is_valid.End();
1271 }
1272 in_range.Else();
1273 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001274 HValue* result = UnmappedCase(elements, key, value);
1275 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001276 }
1277 in_range.End();
1278
1279 return environment()->Pop();
1280}
1281
1282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001283template <>
1284HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
1285 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1286 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1287
1288 return EmitKeyedSloppyArguments(receiver, key, NULL);
1289}
1290
1291
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001292Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
1293 return DoGenerateCode(this);
1294}
1295
1296
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001297template <>
1298HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
1299 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
1300 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
1301 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
1302
1303 return EmitKeyedSloppyArguments(receiver, key, value);
1304}
1305
1306
1307Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
1308 return DoGenerateCode(this);
1309}
1310
1311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312void CodeStubGraphBuilderBase::BuildStoreNamedField(
1313 HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001314 Representation representation, bool transition_to_field) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001315 DCHECK(!index.is_double() || representation.IsDouble());
1316 int offset = index.offset();
1317 HObjectAccess access =
1318 index.is_inobject()
1319 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
1320 : HObjectAccess::ForBackingStoreOffset(offset, representation);
1321
1322 if (representation.IsDouble()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001323 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
1324 HObjectAccess heap_number_access =
1325 access.WithRepresentation(Representation::Tagged());
1326 if (transition_to_field) {
1327 // The store requires a mutable HeapNumber to be allocated.
1328 NoObservableSideEffectsScope no_side_effects(this);
1329 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
1330
1331 // TODO(hpayer): Allocation site pretenuring support.
1332 HInstruction* heap_number =
1333 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
Ben Murdochc5610432016-08-08 18:44:38 +01001334 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001335 AddStoreMapConstant(heap_number,
1336 isolate()->factory()->mutable_heap_number_map());
1337 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
1338 value);
1339 // Store the new mutable heap number into the object.
1340 access = heap_number_access;
1341 value = heap_number;
1342 } else {
1343 // Load the heap number.
1344 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
1345 // Store the double value into it.
1346 access = HObjectAccess::ForHeapNumberValue();
1347 }
1348 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349 } else if (representation.IsHeapObject()) {
1350 BuildCheckHeapObject(value);
1351 }
1352
1353 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1354}
1355
1356
1357template <>
1358HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1359 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001360 casted_stub()->representation(), false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001361 return GetParameter(2);
1362}
1363
1364
1365Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1366
1367
1368template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001369HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
Ben Murdoch61f157c2016-09-16 13:49:30 +01001371 HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
1372 StoreTransitionStub::StoreMode store_mode = casted_stub()->store_mode();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001373
Ben Murdoch61f157c2016-09-16 13:49:30 +01001374 if (store_mode != StoreTransitionStub::StoreMapOnly) {
1375 value = GetParameter(StoreTransitionHelper::ValueIndex());
1376 Representation representation = casted_stub()->representation();
1377 if (representation.IsDouble()) {
1378 // In case we are storing a double, assure that the value is a double
1379 // before manipulating the properties backing store. Otherwise the actual
1380 // store may deopt, leaving the backing store in an overallocated state.
1381 value = AddUncasted<HForceRepresentation>(value, representation);
1382 }
1383 }
1384
1385 switch (store_mode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001386 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1387 HValue* properties = Add<HLoadNamedField>(
1388 object, nullptr, HObjectAccess::ForPropertiesPointer());
1389 HValue* length = AddLoadFixedArrayLength(properties);
1390 HValue* delta =
1391 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1392 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1393
1394 // Grow properties array.
1395 ElementsKind kind = FAST_ELEMENTS;
1396 Add<HBoundsCheck>(new_capacity,
1397 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1398 FixedArray::kHeaderSize) >>
1399 ElementsKindToShiftSize(kind)));
1400
1401 // Reuse this code for properties backing store allocation.
1402 HValue* new_properties =
1403 BuildAllocateAndInitializeArray(kind, new_capacity);
1404
1405 BuildCopyProperties(properties, new_properties, length, new_capacity);
1406
1407 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1408 new_properties);
1409 }
1410 // Fall through.
1411 case StoreTransitionStub::StoreMapAndValue:
1412 // Store the new value into the "extended" object.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001413 BuildStoreNamedField(object, value, casted_stub()->index(),
1414 casted_stub()->representation(), true);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001415 // Fall through.
1416
1417 case StoreTransitionStub::StoreMapOnly:
1418 // And finally update the map.
1419 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001420 GetParameter(StoreTransitionHelper::MapIndex()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001421 break;
1422 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01001423 return value;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001424}
1425
1426
1427Handle<Code> StoreTransitionStub::GenerateCode() {
1428 return DoGenerateCode(this);
1429}
1430
1431
1432template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1434 BuildUncheckedMonomorphicElementAccess(
1435 GetParameter(StoreDescriptor::kReceiverIndex),
1436 GetParameter(StoreDescriptor::kNameIndex),
1437 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1438 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1439 casted_stub()->store_mode());
1440
1441 return GetParameter(2);
1442}
1443
1444
1445Handle<Code> StoreFastElementStub::GenerateCode() {
1446 return DoGenerateCode(this);
1447}
1448
1449
1450template <>
1451HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001452 ElementsKind const from_kind = casted_stub()->from_kind();
1453 ElementsKind const to_kind = casted_stub()->to_kind();
1454 HValue* const object = GetParameter(0);
1455 HValue* const map = GetParameter(1);
1456
1457 // The {object} is known to be a JSObject (otherwise it wouldn't have elements
1458 // anyways).
1459 object->set_type(HType::JSObject());
1460
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001461 info()->MarkAsSavesCallerDoubles();
1462
Ben Murdoch61f157c2016-09-16 13:49:30 +01001463 DCHECK_IMPLIES(IsFastHoleyElementsKind(from_kind),
1464 IsFastHoleyElementsKind(to_kind));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001465
Ben Murdoch61f157c2016-09-16 13:49:30 +01001466 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1467 Add<HTrapAllocationMemento>(object);
1468 }
1469
1470 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1471 HInstruction* elements = AddLoadElements(object);
1472
1473 IfBuilder if_objecthaselements(this);
1474 if_objecthaselements.IfNot<HCompareObjectEqAndBranch>(
1475 elements, Add<HConstant>(isolate()->factory()->empty_fixed_array()));
1476 if_objecthaselements.Then();
1477 {
1478 // Determine the elements capacity.
1479 HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1480
1481 // Determine the effective (array) length.
1482 IfBuilder if_objectisarray(this);
1483 if_objectisarray.If<HHasInstanceTypeAndBranch>(object, JS_ARRAY_TYPE);
1484 if_objectisarray.Then();
1485 {
1486 // The {object} is a JSArray, load the special "length" property.
1487 Push(Add<HLoadNamedField>(object, nullptr,
1488 HObjectAccess::ForArrayLength(from_kind)));
1489 }
1490 if_objectisarray.Else();
1491 {
1492 // The {object} is some other JSObject.
1493 Push(elements_length);
1494 }
1495 if_objectisarray.End();
1496 HValue* length = Pop();
1497
1498 BuildGrowElementsCapacity(object, elements, from_kind, to_kind, length,
1499 elements_length);
1500 }
1501 if_objecthaselements.End();
1502 }
1503
1504 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1505
1506 return object;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001507}
1508
1509
1510Handle<Code> TransitionElementsKindStub::GenerateCode() {
1511 return DoGenerateCode(this);
1512}
1513
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001514template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1516 BinaryOpICState state = casted_stub()->state();
1517
1518 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1519 HValue* right = GetParameter(BinaryOpICStub::kRight);
1520
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001521 Type* left_type = state.GetLeftType();
1522 Type* right_type = state.GetRightType();
1523 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001524
1525 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1526 (state.HasSideEffects() || !result_type->Is(Type::None())));
1527
1528 HValue* result = NULL;
1529 HAllocationMode allocation_mode(NOT_TENURED);
1530 if (state.op() == Token::ADD &&
1531 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1532 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1533 // For the generic add stub a fast case for string addition is performance
1534 // critical.
1535 if (left_type->Maybe(Type::String())) {
1536 IfBuilder if_leftisstring(this);
1537 if_leftisstring.If<HIsStringAndBranch>(left);
1538 if_leftisstring.Then();
1539 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001540 Push(BuildBinaryOperation(state.op(), left, right, Type::String(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541 right_type, result_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001542 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001543 }
1544 if_leftisstring.Else();
1545 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001546 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1547 right_type, result_type,
1548 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001549 }
1550 if_leftisstring.End();
1551 result = Pop();
1552 } else {
1553 IfBuilder if_rightisstring(this);
1554 if_rightisstring.If<HIsStringAndBranch>(right);
1555 if_rightisstring.Then();
1556 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557 Push(BuildBinaryOperation(state.op(), left, right, left_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001558 Type::String(), result_type,
1559 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001560 }
1561 if_rightisstring.Else();
1562 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001563 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1564 right_type, result_type,
1565 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001566 }
1567 if_rightisstring.End();
1568 result = Pop();
1569 }
1570 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001571 result = BuildBinaryOperation(state.op(), left, right, left_type,
1572 right_type, result_type,
1573 state.fixed_right_arg(), allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001574 }
1575
1576 // If we encounter a generic argument, the number conversion is
1577 // observable, thus we cannot afford to bail out after the fact.
1578 if (!state.HasSideEffects()) {
1579 result = EnforceNumberType(result, result_type);
1580 }
1581
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582 return result;
1583}
1584
1585
1586Handle<Code> BinaryOpICStub::GenerateCode() {
1587 return DoGenerateCode(this);
1588}
1589
1590
1591template <>
1592HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1593 BinaryOpICState state = casted_stub()->state();
1594
1595 HValue* allocation_site = GetParameter(
1596 BinaryOpWithAllocationSiteStub::kAllocationSite);
1597 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1598 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1599
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001600 Type* left_type = state.GetLeftType();
1601 Type* right_type = state.GetRightType();
1602 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001603 HAllocationMode allocation_mode(allocation_site);
1604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001605 return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1606 result_type, state.fixed_right_arg(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001607 allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001608}
1609
1610
1611Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1612 return DoGenerateCode(this);
1613}
1614
1615
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001616HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) {
1617 if (!convert) return BuildCheckString(input);
1618 IfBuilder if_inputissmi(this);
1619 HValue* inputissmi = if_inputissmi.If<HIsSmiAndBranch>(input);
1620 if_inputissmi.Then();
1621 {
1622 // Convert the input smi to a string.
1623 Push(BuildNumberToString(input, Type::SignedSmall()));
1624 }
1625 if_inputissmi.Else();
1626 {
1627 HValue* input_map =
1628 Add<HLoadNamedField>(input, inputissmi, HObjectAccess::ForMap());
1629 HValue* input_instance_type = Add<HLoadNamedField>(
1630 input_map, inputissmi, HObjectAccess::ForMapInstanceType());
1631 IfBuilder if_inputisstring(this);
1632 if_inputisstring.If<HCompareNumericAndBranch>(
1633 input_instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
1634 if_inputisstring.Then();
1635 {
1636 // The input is already a string.
1637 Push(input);
1638 }
1639 if_inputisstring.Else();
1640 {
1641 // Convert to primitive first (if necessary), see
1642 // ES6 section 12.7.3 The Addition operator.
1643 IfBuilder if_inputisprimitive(this);
1644 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1645 if_inputisprimitive.If<HCompareNumericAndBranch>(
1646 input_instance_type, Add<HConstant>(LAST_PRIMITIVE_TYPE), Token::LTE);
1647 if_inputisprimitive.Then();
1648 {
1649 // The input is already a primitive.
1650 Push(input);
1651 }
1652 if_inputisprimitive.Else();
1653 {
1654 // Convert the input to a primitive.
1655 Push(BuildToPrimitive(input, input_map));
1656 }
1657 if_inputisprimitive.End();
1658 // Convert the primitive to a string value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001659 ToStringStub stub(isolate());
1660 HValue* values[] = {context(), Pop()};
Ben Murdochc5610432016-08-08 18:44:38 +01001661 Push(AddUncasted<HCallWithDescriptor>(Add<HConstant>(stub.GetCode()), 0,
1662 stub.GetCallInterfaceDescriptor(),
1663 ArrayVector(values)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001664 }
1665 if_inputisstring.End();
1666 }
1667 if_inputissmi.End();
1668 return Pop();
1669}
1670
1671
1672HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input,
1673 HValue* input_map) {
1674 // Get the native context of the caller.
1675 HValue* native_context = BuildGetNativeContext();
1676
1677 // Determine the initial map of the %ObjectPrototype%.
1678 HValue* object_function_prototype_map =
1679 Add<HLoadNamedField>(native_context, nullptr,
1680 HObjectAccess::ForContextSlot(
1681 Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX));
1682
1683 // Determine the initial map of the %StringPrototype%.
1684 HValue* string_function_prototype_map =
1685 Add<HLoadNamedField>(native_context, nullptr,
1686 HObjectAccess::ForContextSlot(
1687 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
1688
1689 // Determine the initial map of the String function.
1690 HValue* string_function = Add<HLoadNamedField>(
1691 native_context, nullptr,
1692 HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX));
1693 HValue* string_function_initial_map = Add<HLoadNamedField>(
1694 string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
1695
1696 // Determine the map of the [[Prototype]] of {input}.
1697 HValue* input_prototype =
1698 Add<HLoadNamedField>(input_map, nullptr, HObjectAccess::ForPrototype());
1699 HValue* input_prototype_map =
1700 Add<HLoadNamedField>(input_prototype, nullptr, HObjectAccess::ForMap());
1701
1702 // For string wrappers (JSValue instances with [[StringData]] internal
1703 // fields), we can shortcirciut the ToPrimitive if
1704 //
1705 // (a) the {input} map matches the initial map of the String function,
1706 // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e.
1707 // no one monkey-patched toString, @@toPrimitive or valueOf), and
1708 // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the
1709 // %StringPrototype%) is also unmodified, that is no one sneaked a
1710 // @@toPrimitive into the %ObjectPrototype%.
1711 //
1712 // If all these assumptions hold, we can just take the [[StringData]] value
1713 // and return it.
1714 // TODO(bmeurer): This just repairs a regression introduced by removing the
1715 // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which
1716 // was intendend to something similar to this, although less efficient and
1717 // wrong in the presence of @@toPrimitive. Long-term we might want to move
1718 // into the direction of having a ToPrimitiveStub that can do common cases
1719 // while staying in JavaScript land (i.e. not going to C++).
1720 IfBuilder if_inputisstringwrapper(this);
1721 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1722 input_map, string_function_initial_map);
1723 if_inputisstringwrapper.And();
1724 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1725 input_prototype_map, string_function_prototype_map);
1726 if_inputisstringwrapper.And();
1727 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1728 Add<HLoadNamedField>(Add<HLoadNamedField>(input_prototype_map, nullptr,
1729 HObjectAccess::ForPrototype()),
1730 nullptr, HObjectAccess::ForMap()),
1731 object_function_prototype_map);
1732 if_inputisstringwrapper.Then();
1733 {
1734 Push(BuildLoadNamedField(
1735 input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset)));
1736 }
1737 if_inputisstringwrapper.Else();
1738 {
1739 // TODO(bmeurer): Add support for fast ToPrimitive conversion using
1740 // a dedicated ToPrimitiveStub.
1741 Add<HPushArguments>(input);
1742 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kToPrimitive), 1));
1743 }
1744 if_inputisstringwrapper.End();
1745 return Pop();
1746}
1747
1748
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001749template <>
1750HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1751 StringAddStub* stub = casted_stub();
1752 StringAddFlags flags = stub->flags();
1753 PretenureFlag pretenure_flag = stub->pretenure_flag();
1754
1755 HValue* left = GetParameter(StringAddStub::kLeft);
1756 HValue* right = GetParameter(StringAddStub::kRight);
1757
1758 // Make sure that both arguments are strings if not known in advance.
1759 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001760 left =
1761 BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001762 }
1763 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764 right = BuildToString(right,
1765 (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001766 }
1767
1768 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1769}
1770
1771
1772Handle<Code> StringAddStub::GenerateCode() {
1773 return DoGenerateCode(this);
1774}
1775
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776template <>
Ben Murdochda12d292016-06-02 14:46:10 +01001777HValue* CodeStubGraphBuilder<ToBooleanICStub>::BuildCodeInitializedStub() {
1778 ToBooleanICStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001779 IfBuilder if_true(this);
1780 if_true.If<HBranch>(GetParameter(0), stub->types());
1781 if_true.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001782 if_true.Return(graph()->GetConstantTrue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001783 if_true.Else();
1784 if_true.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001785 return graph()->GetConstantFalse();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001786}
1787
Ben Murdochda12d292016-06-02 14:46:10 +01001788Handle<Code> ToBooleanICStub::GenerateCode() { return DoGenerateCode(this); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001789
1790template <>
1791HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1792 StoreGlobalStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001794 if (stub->check_global()) {
1795 // Check that the map of the global has not changed: use a placeholder map
1796 // that will be replaced later with the global object's map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1798 HValue* proxy_map =
1799 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1800 HValue* global =
1801 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1802 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1803 StoreGlobalStub::global_map_placeholder(isolate())));
1804 HValue* expected_map = Add<HLoadNamedField>(
1805 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1806 HValue* map =
1807 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1808 IfBuilder map_check(this);
1809 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1810 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1811 map_check.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 }
1813
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001814 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1815 StoreGlobalStub::property_cell_placeholder(isolate())));
1816 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1817 HObjectAccess::ForWeakCellValue());
1818 Add<HCheckHeapObject>(cell);
1819 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1820 // Load the payload of the global parameter cell. A hole indicates that the
1821 // cell has been invalidated and that the store must be handled by the
1822 // runtime.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001823 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001824
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001825 auto cell_type = stub->cell_type();
1826 if (cell_type == PropertyCellType::kConstant ||
1827 cell_type == PropertyCellType::kUndefined) {
1828 // This is always valid for all states a cell can be in.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001829 IfBuilder builder(this);
1830 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1831 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001832 builder.ElseDeopt(
1833 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001834 builder.End();
1835 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 IfBuilder builder(this);
1837 HValue* hole_value = graph()->GetConstantHole();
1838 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1839 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001840 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001841 builder.Else();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001842 // When dealing with constant types, the type may be allowed to change, as
1843 // long as optimized code remains valid.
1844 if (cell_type == PropertyCellType::kConstantType) {
1845 switch (stub->constant_type()) {
1846 case PropertyCellConstantType::kSmi:
1847 access = access.WithRepresentation(Representation::Smi());
1848 break;
1849 case PropertyCellConstantType::kStableMap: {
1850 // It is sufficient here to check that the value and cell contents
1851 // have identical maps, no matter if they are stable or not or if they
1852 // are the maps that were originally in the cell or not. If optimized
1853 // code will deopt when a cell has a unstable map and if it has a
1854 // dependency on a stable map, it will deopt if the map destabilizes.
1855 Add<HCheckHeapObject>(value);
1856 Add<HCheckHeapObject>(cell_contents);
1857 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1858 HObjectAccess::ForMap());
1859 HValue* map =
1860 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1861 IfBuilder map_check(this);
1862 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1863 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1864 map_check.End();
1865 access = access.WithRepresentation(Representation::HeapObject());
1866 break;
1867 }
1868 }
1869 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001870 Add<HStoreNamedField>(cell, access, value);
1871 builder.End();
1872 }
1873
1874 return value;
1875}
1876
1877
1878Handle<Code> StoreGlobalStub::GenerateCode() {
1879 return DoGenerateCode(this);
1880}
1881
1882
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001883template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001884HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001885 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
1886 HValue* key = GetParameter(StoreTransitionHelper::NameIndex());
1887 HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
1888 HValue* map = GetParameter(StoreTransitionHelper::MapIndex());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001889
1890 if (FLAG_trace_elements_transitions) {
1891 // Tracing elements transitions is the job of the runtime.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001892 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1893 Deoptimizer::EAGER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001894 } else {
1895 info()->MarkAsSavesCallerDoubles();
1896
1897 BuildTransitionElementsKind(object, map,
1898 casted_stub()->from_kind(),
1899 casted_stub()->to_kind(),
1900 casted_stub()->is_jsarray());
1901
1902 BuildUncheckedMonomorphicElementAccess(object, key, value,
1903 casted_stub()->is_jsarray(),
1904 casted_stub()->to_kind(),
1905 STORE, ALLOW_RETURN_HOLE,
1906 casted_stub()->store_mode());
1907 }
1908
1909 return value;
1910}
1911
1912
1913Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1914 return DoGenerateCode(this);
1915}
1916
1917
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001918template <>
1919HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() {
Ben Murdochda12d292016-06-02 14:46:10 +01001920 HValue* receiver = GetParameter(TypeConversionDescriptor::kArgumentIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001921 return BuildToObject(receiver);
1922}
1923
1924
1925Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); }
1926
1927
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001928template<>
1929HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1930 Counters* counters = isolate()->counters();
1931 Factory* factory = isolate()->factory();
1932 HInstruction* empty_fixed_array =
1933 Add<HConstant>(factory->empty_fixed_array());
Ben Murdoch61f157c2016-09-16 13:49:30 +01001934 HInstruction* empty_literals_array =
1935 Add<HConstant>(factory->empty_literals_array());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001936 HValue* shared_info = GetParameter(0);
1937
1938 AddIncrementCounter(counters->fast_new_closure_total());
1939
1940 // Create a new closure from the given function info in new space
1941 HValue* size = Add<HConstant>(JSFunction::kSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001942 HInstruction* js_function =
Ben Murdochc5610432016-08-08 18:44:38 +01001943 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE,
1944 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001945
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001946 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001947 casted_stub()->kind());
1948
1949 // Compute the function map in the current native context and set that
1950 // as the map of the allocated object.
1951 HInstruction* native_context = BuildGetNativeContext();
1952 HInstruction* map_slot_value = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001953 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001954 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1955
1956 // Initialize the rest of the function.
1957 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1958 empty_fixed_array);
1959 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1960 empty_fixed_array);
1961 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001962 empty_literals_array);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001963 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1964 graph()->GetConstantHole());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001965 Add<HStoreNamedField>(
1966 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001967 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1968 context());
1969
Ben Murdochc5610432016-08-08 18:44:38 +01001970 Handle<Code> lazy_builtin(
1971 isolate()->builtins()->builtin(Builtins::kCompileLazy));
1972 HConstant* lazy = Add<HConstant>(lazy_builtin);
1973 Add<HStoreCodeEntry>(js_function, lazy);
1974 Add<HStoreNamedField>(js_function,
1975 HObjectAccess::ForNextFunctionLinkPointer(),
1976 graph()->GetConstantUndefined());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977
1978 return js_function;
1979}
1980
1981
1982Handle<Code> FastNewClosureStub::GenerateCode() {
1983 return DoGenerateCode(this);
1984}
1985
1986
1987template<>
1988HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1989 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1990
1991 // Get the function.
1992 HParameter* function = GetParameter(FastNewContextStub::kFunction);
1993
1994 // Allocate the context in new space.
1995 HAllocate* function_context = Add<HAllocate>(
1996 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
Ben Murdochc5610432016-08-08 18:44:38 +01001997 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE,
1998 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001999
2000 // Set up the object header.
2001 AddStoreMapConstant(function_context,
2002 isolate()->factory()->function_context_map());
2003 Add<HStoreNamedField>(function_context,
2004 HObjectAccess::ForFixedArrayLength(),
2005 Add<HConstant>(length));
2006
2007 // Set up the fixed slots.
2008 Add<HStoreNamedField>(function_context,
2009 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
2010 function);
2011 Add<HStoreNamedField>(function_context,
2012 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
2013 context());
2014 Add<HStoreNamedField>(function_context,
2015 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002016 graph()->GetConstantHole());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002017
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002018 // Copy the native context from the previous context.
2019 HValue* native_context = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002020 context(), nullptr,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002021 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2022 Add<HStoreNamedField>(function_context, HObjectAccess::ForContextSlot(
2023 Context::NATIVE_CONTEXT_INDEX),
2024 native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002025
2026 // Initialize the rest of the slots to undefined.
2027 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
2028 Add<HStoreNamedField>(function_context,
2029 HObjectAccess::ForContextSlot(i),
2030 graph()->GetConstantUndefined());
2031 }
2032
2033 return function_context;
2034}
2035
2036
2037Handle<Code> FastNewContextStub::GenerateCode() {
2038 return DoGenerateCode(this);
2039}
2040
2041
2042template <>
2043HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2044 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2045 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2046
2047 Add<HCheckSmi>(key);
2048
2049 HValue* elements = AddLoadElements(receiver);
2050
2051 HValue* hash = BuildElementIndexHash(key);
2052
Ben Murdoch097c5b22016-05-18 11:27:45 +01002053 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002054}
2055
2056
2057Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2058 return DoGenerateCode(this);
2059}
2060
2061
2062template<>
2063HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2064 // Determine the parameters.
2065 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2066 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2067 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2068
Ben Murdoch61f157c2016-09-16 13:49:30 +01002069 // TODO(turbofan): This codestub has regressed to need a frame on ia32 at some
2070 // point and wasn't caught since it wasn't built in the snapshot. We should
2071 // probably just replace with a TurboFan stub rather than fixing it.
2072#if !(V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002073 info()->MarkMustNotHaveEagerFrame();
Ben Murdoch61f157c2016-09-16 13:49:30 +01002074#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002075
2076 return BuildRegExpConstructResult(length, index, input);
2077}
2078
2079
2080Handle<Code> RegExpConstructResultStub::GenerateCode() {
2081 return DoGenerateCode(this);
2082}
2083
2084
2085template <>
2086class CodeStubGraphBuilder<KeyedLoadGenericStub>
2087 : public CodeStubGraphBuilderBase {
2088 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002089 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
2090 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002091
2092 protected:
2093 virtual HValue* BuildCodeStub();
2094
2095 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2096 HValue* bit_field2,
2097 ElementsKind kind);
2098
2099 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2100 HValue* receiver,
2101 HValue* key,
2102 HValue* instance_type,
2103 HValue* bit_field2,
2104 ElementsKind kind);
2105
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002106 KeyedLoadGenericStub* casted_stub() {
2107 return static_cast<KeyedLoadGenericStub*>(stub());
2108 }
2109};
2110
2111
2112void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2113 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2114 ElementsKind kind) {
2115 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2116 HValue* kind_limit = Add<HConstant>(
2117 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2118
2119 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2120 if_builder->Then();
2121}
2122
2123
2124void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2125 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2126 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002127 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2128
2129 IfBuilder js_array_check(this);
2130 js_array_check.If<HCompareNumericAndBranch>(
2131 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2132 js_array_check.Then();
2133 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2134 true, kind,
2135 LOAD, NEVER_RETURN_HOLE,
2136 STANDARD_STORE));
2137 js_array_check.Else();
2138 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2139 false, kind,
2140 LOAD, NEVER_RETURN_HOLE,
2141 STANDARD_STORE));
2142 js_array_check.End();
2143}
2144
2145
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2147 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2148 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002149 // Split into a smi/integer case and unique string case.
2150 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2151 graph()->CreateBasicBlock());
2152
2153 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2154
2155 IfBuilder index_name_split(this, &index_name_split_continuation);
2156 index_name_split.Then();
2157 {
2158 // Key is an index (number)
2159 key = Pop();
2160
2161 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2162 (1 << Map::kHasIndexedInterceptor);
2163 BuildJSObjectCheck(receiver, bit_field_mask);
2164
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002165 HValue* map =
2166 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002167
2168 HValue* instance_type =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002169 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002170
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002171 HValue* bit_field2 =
2172 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002173
2174 IfBuilder kind_if(this);
2175 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2176 FAST_HOLEY_ELEMENTS);
2177
2178 kind_if.Else();
2179 {
2180 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2181 FAST_HOLEY_DOUBLE_ELEMENTS);
2182 }
2183 kind_if.Else();
2184
2185 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2186 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2187 {
2188 HValue* elements = AddLoadElements(receiver);
2189
2190 HValue* hash = BuildElementIndexHash(key);
2191
Ben Murdoch097c5b22016-05-18 11:27:45 +01002192 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002193 }
2194 kind_if.Else();
2195
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002196 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2197 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2198 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002199 BuildElementsKindLimitCheck(&kind_if, bit_field2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002200 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002201 // Non-strict elements are not handled.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002202 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002203 Deoptimizer::EAGER);
2204 Push(graph()->GetConstant0());
2205
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002206 kind_if.ElseDeopt(
2207 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208
2209 kind_if.End();
2210 }
2211 index_name_split.Else();
2212 {
2213 // Key is a unique string.
2214 key = Pop();
2215
2216 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2217 (1 << Map::kHasNamedInterceptor);
2218 BuildJSObjectCheck(receiver, bit_field_mask);
2219
2220 HIfContinuation continuation;
2221 BuildTestForDictionaryProperties(receiver, &continuation);
2222 IfBuilder if_dict_properties(this, &continuation);
2223 if_dict_properties.Then();
2224 {
2225 // Key is string, properties are dictionary mode
2226 BuildNonGlobalObjectCheck(receiver);
2227
2228 HValue* properties = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002229 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002230
2231 HValue* hash =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002232 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002233
2234 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2235
Ben Murdoch097c5b22016-05-18 11:27:45 +01002236 HValue* value =
2237 BuildUncheckedDictionaryElementLoad(receiver, properties, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002238 Push(value);
2239 }
2240 if_dict_properties.Else();
2241 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002242 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2243 // megamorphic stub cache.
2244 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002245 // Key is string, properties are fast mode
2246 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2247
2248 ExternalReference cache_keys_ref =
2249 ExternalReference::keyed_lookup_cache_keys(isolate());
2250 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2251
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002252 HValue* map =
2253 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002254 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2255 base_index->ClearFlag(HValue::kCanOverflow);
2256
2257 HIfContinuation inline_or_runtime_continuation(
2258 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2259 {
2260 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2261 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2262 ++probe) {
2263 IfBuilder* lookup_if = &lookup_ifs[probe];
2264 lookup_if->Initialize(this);
2265 int probe_base = probe * KeyedLookupCache::kEntryLength;
2266 HValue* map_index = AddUncasted<HAdd>(
2267 base_index,
2268 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2269 map_index->ClearFlag(HValue::kCanOverflow);
2270 HValue* key_index = AddUncasted<HAdd>(
2271 base_index,
2272 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2273 key_index->ClearFlag(HValue::kCanOverflow);
2274 HValue* map_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002275 Add<HLoadKeyed>(cache_keys, map_index, nullptr, nullptr,
2276 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002277 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2278 lookup_if->And();
2279 HValue* key_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002280 Add<HLoadKeyed>(cache_keys, key_index, nullptr, nullptr,
2281 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002282 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2283 lookup_if->Then();
2284 {
2285 ExternalReference cache_field_offsets_ref =
2286 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2287 HValue* cache_field_offsets =
2288 Add<HConstant>(cache_field_offsets_ref);
2289 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2290 index->ClearFlag(HValue::kCanOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002291 HValue* property_index =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, cache_keys,
2293 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002294 Push(property_index);
2295 }
2296 lookup_if->Else();
2297 }
2298 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2299 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2300 }
2301 }
2302
2303 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2304 inline_or_runtime.Then();
2305 {
2306 // Found a cached index, load property inline.
2307 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2308 }
2309 inline_or_runtime.Else();
2310 {
2311 // KeyedLookupCache miss; call runtime.
2312 Add<HPushArguments>(receiver, key);
2313 Push(Add<HCallRuntime>(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002314 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002315 }
2316 inline_or_runtime.End();
2317 }
2318 if_dict_properties.End();
2319 }
2320 index_name_split.End();
2321
2322 return Pop();
2323}
2324
2325
2326Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2327 return DoGenerateCode(this);
2328}
2329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002330} // namespace internal
2331} // namespace v8