blob: 6680e66dc3d5fcbe897f7d8aea415b6889edb9d6 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/code-stubs.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006
7#include "src/bailout-reason.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/crankshaft/hydrogen.h"
9#include "src/crankshaft/lithium.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/field-index.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/ic/ic.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15
16
17static LChunk* OptimizeGraph(HGraph* graph) {
18 DisallowHeapAllocation no_allocation;
19 DisallowHandleAllocation no_handles;
20 DisallowHandleDereference no_deref;
21
22 DCHECK(graph != NULL);
23 BailoutReason bailout_reason = kNoReason;
24 if (!graph->Optimize(&bailout_reason)) {
25 FATAL(GetBailoutReason(bailout_reason));
26 }
27 LChunk* chunk = LChunk::NewChunk(graph);
28 if (chunk == NULL) {
29 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
30 }
31 return chunk;
32}
33
34
35class CodeStubGraphBuilderBase : public HGraphBuilder {
36 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +010037 explicit CodeStubGraphBuilderBase(CompilationInfo* info, CodeStub* code_stub)
38 : HGraphBuilder(info, code_stub->GetCallInterfaceDescriptor()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039 arguments_length_(NULL),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040040 info_(info),
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 code_stub_(code_stub),
42 descriptor_(code_stub),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 context_(NULL) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 int parameter_count = GetParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 parameters_.Reset(new HParameter*[parameter_count]);
46 }
47 virtual bool BuildGraph();
48
49 protected:
50 virtual HValue* BuildCodeStub() = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 int GetParameterCount() const { return descriptor_.GetParameterCount(); }
52 int GetRegisterParameterCount() const {
53 return descriptor_.GetRegisterParameterCount();
54 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 HParameter* GetParameter(int parameter) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000056 DCHECK(parameter < GetParameterCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 return parameters_[parameter];
58 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 Representation GetParameterRepresentation(int parameter) {
60 return RepresentationFromType(descriptor_.GetParameterType(parameter));
61 }
62 bool IsParameterCountRegister(int index) const {
63 return descriptor_.GetRegisterParameter(index)
64 .is(descriptor_.stack_parameter_count());
65 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 HValue* GetArgumentsLength() {
67 // This is initialized in BuildGraph()
68 DCHECK(arguments_length_ != NULL);
69 return arguments_length_;
70 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 CompilationInfo* info() { return info_; }
Ben Murdoch097c5b22016-05-18 11:27:45 +010072 CodeStub* stub() { return code_stub_; }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 HContext* context() { return context_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 Isolate* isolate() { return info_->isolate(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 HLoadNamedField* BuildLoadNamedField(HValue* object, FieldIndex index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078 Representation representation,
79 bool transition_to_field);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080
Ben Murdochda12d292016-06-02 14:46:10 +010081 HValue* BuildPushElement(HValue* object, HValue* argc,
82 HValue* argument_elements, ElementsKind kind);
83
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084 enum ArgumentClass {
85 NONE,
86 SINGLE,
87 MULTIPLE
88 };
89
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000090 HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
91 HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
92 HValue* value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093
94 HValue* BuildArrayConstructor(ElementsKind kind,
95 AllocationSiteOverrideMode override_mode,
96 ArgumentClass argument_class);
97 HValue* BuildInternalArrayConstructor(ElementsKind kind,
98 ArgumentClass argument_class);
99
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 HValue* BuildToString(HValue* input, bool convert);
101 HValue* BuildToPrimitive(HValue* input, HValue* input_map);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400102
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 private:
104 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
105 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
106 ElementsKind kind);
107
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000108 base::SmartArrayPointer<HParameter*> parameters_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000109 HValue* arguments_length_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000110 CompilationInfo* info_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100111 CodeStub* code_stub_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112 CodeStubDescriptor descriptor_;
113 HContext* context_;
114};
115
116
117bool CodeStubGraphBuilderBase::BuildGraph() {
118 // Update the static counter each time a new code stub is generated.
119 isolate()->counters()->code_stubs()->Increment();
120
121 if (FLAG_trace_hydrogen_stubs) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000122 const char* name = CodeStub::MajorName(stub()->MajorKey());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 PrintF("-----------------------------------------------------------\n");
124 PrintF("Compiling stub %s using hydrogen\n", name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400125 isolate()->GetHTracer()->TraceCompilation(info());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 }
127
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000128 int param_count = GetParameterCount();
129 int register_param_count = GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000130 HEnvironment* start_environment = graph()->start_environment();
131 HBasicBlock* next_block = CreateBasicBlock(start_environment);
132 Goto(next_block);
133 next_block->SetJoinId(BailoutId::StubEntry());
134 set_current_block(next_block);
135
136 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
137 HInstruction* stack_parameter_count = NULL;
138 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139 Representation r = GetParameterRepresentation(i);
140 HParameter* param;
141 if (i >= register_param_count) {
142 param = Add<HParameter>(i - register_param_count,
143 HParameter::STACK_PARAMETER, r);
144 } else {
145 param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
146 start_environment->Bind(i, param);
147 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000148 parameters_[i] = param;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000149 if (i < register_param_count && IsParameterCountRegister(i)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150 param->set_type(HType::Smi());
151 stack_parameter_count = param;
152 arguments_length_ = stack_parameter_count;
153 }
154 }
155
156 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
157 if (!runtime_stack_params) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000158 stack_parameter_count =
159 Add<HConstant>(param_count - register_param_count - 1);
160 // graph()->GetConstantMinus1();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 arguments_length_ = graph()->GetConstant0();
162 }
163
164 context_ = Add<HContext>();
165 start_environment->BindContext(context_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100166 start_environment->Bind(param_count, context_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000167
168 Add<HSimulate>(BailoutId::StubEntry());
169
170 NoObservableSideEffectsScope no_effects(this);
171
172 HValue* return_value = BuildCodeStub();
173
174 // We might have extra expressions to pop from the stack in addition to the
175 // arguments above.
176 HInstruction* stack_pop_count = stack_parameter_count;
177 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
178 if (!stack_parameter_count->IsConstant() &&
179 descriptor_.hint_stack_parameter_count() < 0) {
180 HInstruction* constant_one = graph()->GetConstant1();
181 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
182 stack_pop_count->ClearFlag(HValue::kCanOverflow);
183 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
184 // smi.
185 } else {
186 int count = descriptor_.hint_stack_parameter_count();
187 stack_pop_count = Add<HConstant>(count);
188 }
189 }
190
191 if (current_block() != NULL) {
192 HReturn* hreturn_instruction = New<HReturn>(return_value,
193 stack_pop_count);
194 FinishCurrentBlock(hreturn_instruction);
195 }
196 return true;
197}
198
199
200template <class Stub>
201class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
202 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100203 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
204 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000205
206 protected:
207 virtual HValue* BuildCodeStub() {
208 if (casted_stub()->IsUninitialized()) {
209 return BuildCodeUninitializedStub();
210 } else {
211 return BuildCodeInitializedStub();
212 }
213 }
214
215 virtual HValue* BuildCodeInitializedStub() {
216 UNIMPLEMENTED();
217 return NULL;
218 }
219
220 virtual HValue* BuildCodeUninitializedStub() {
221 // Force a deopt that falls back to the runtime.
222 HValue* undefined = graph()->GetConstantUndefined();
223 IfBuilder builder(this);
224 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
225 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000226 builder.ElseDeopt(Deoptimizer::kForcedDeoptToRuntime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227 return undefined;
228 }
229
230 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
231};
232
233
234Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
235 ExternalReference miss) {
236 Factory* factory = isolate()->factory();
237
238 // Generate the new code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000239 MacroAssembler masm(isolate(), NULL, 256, CodeObjectRequired::kYes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000240
241 {
242 // Update the static counter each time a new code stub is generated.
243 isolate()->counters()->code_stubs()->Increment();
244
245 // Generate the code for the stub.
246 masm.set_generating_stub(true);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400247 // TODO(yangguo): remove this once we can serialize IC stubs.
248 masm.enable_serializer();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 NoCurrentFrameScope scope(&masm);
250 GenerateLightweightMiss(&masm, miss);
251 }
252
253 // Create the code object.
254 CodeDesc desc;
255 masm.GetCode(&desc);
256
257 // Copy the generated code into a heap object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 Handle<Code> new_object = factory->NewCode(
Ben Murdoch097c5b22016-05-18 11:27:45 +0100259 desc, GetCodeFlags(), masm.CodeObject(), NeedsImmovableCode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000260 return new_object;
261}
262
263
264template <class Stub>
265static Handle<Code> DoGenerateCode(Stub* stub) {
266 Isolate* isolate = stub->isolate();
267 CodeStubDescriptor descriptor(stub);
268
269 // If we are uninitialized we can use a light-weight stub to enter
270 // the runtime that is significantly faster than using the standard
271 // stub-failure deopt mechanism.
272 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
273 DCHECK(!descriptor.stack_parameter_count().is_valid());
274 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
275 }
276 base::ElapsedTimer timer;
277 if (FLAG_profile_hydrogen_code_stub_compilation) {
278 timer.Start();
279 }
Ben Murdochda12d292016-06-02 14:46:10 +0100280 Zone zone(isolate->allocator());
Ben Murdochc5610432016-08-08 18:44:38 +0100281 CompilationInfo info(CStrVector(CodeStub::MajorName(stub->MajorKey())),
282 isolate, &zone, stub->GetCodeFlags());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100283 // Parameter count is number of stack parameters.
284 int parameter_count = descriptor.GetStackParameterCount();
285 if (descriptor.function_mode() == NOT_JS_FUNCTION_STUB_MODE) {
286 parameter_count--;
287 }
288 info.set_parameter_count(parameter_count);
289 CodeStubGraphBuilder<Stub> builder(&info, stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 Handle<Code> code = chunk->Codegen();
292 if (FLAG_profile_hydrogen_code_stub_compilation) {
293 OFStream os(stdout);
294 os << "[Lazy compilation of " << stub << " took "
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400295 << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000296 }
297 return code;
298}
299
300
301template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000302HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
303 info()->MarkAsSavesCallerDoubles();
304 HValue* number = GetParameter(NumberToStringStub::kNumber);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100305 return BuildNumberToString(number, Type::Number());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306}
307
308
309Handle<Code> NumberToStringStub::GenerateCode() {
310 return DoGenerateCode(this);
311}
312
313
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000314// Returns the type string of a value; see ECMA-262, 11.4.3 (p 47).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000315template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 Factory* factory = isolate()->factory();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000318 HConstant* number_string = Add<HConstant>(factory->number_string());
319 HValue* object = GetParameter(TypeofStub::kObject);
320
321 IfBuilder is_smi(this);
322 HValue* smi_check = is_smi.If<HIsSmiAndBranch>(object);
323 is_smi.Then();
324 { Push(number_string); }
325 is_smi.Else();
326 {
327 IfBuilder is_number(this);
328 is_number.If<HCompareMap>(object, isolate()->factory()->heap_number_map());
329 is_number.Then();
330 { Push(number_string); }
331 is_number.Else();
332 {
333 HValue* map = AddLoadMap(object, smi_check);
334 HValue* instance_type = Add<HLoadNamedField>(
335 map, nullptr, HObjectAccess::ForMapInstanceType());
336 IfBuilder is_string(this);
337 is_string.If<HCompareNumericAndBranch>(
338 instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
339 is_string.Then();
340 { Push(Add<HConstant>(factory->string_string())); }
341 is_string.Else();
342 {
343 HConstant* object_string = Add<HConstant>(factory->object_string());
344 IfBuilder is_oddball(this);
345 is_oddball.If<HCompareNumericAndBranch>(
346 instance_type, Add<HConstant>(ODDBALL_TYPE), Token::EQ);
347 is_oddball.Then();
348 {
349 Push(Add<HLoadNamedField>(object, nullptr,
350 HObjectAccess::ForOddballTypeOf()));
351 }
352 is_oddball.Else();
353 {
354 IfBuilder is_symbol(this);
355 is_symbol.If<HCompareNumericAndBranch>(
356 instance_type, Add<HConstant>(SYMBOL_TYPE), Token::EQ);
357 is_symbol.Then();
358 { Push(Add<HConstant>(factory->symbol_string())); }
359 is_symbol.Else();
360 {
361 HValue* bit_field = Add<HLoadNamedField>(
362 map, nullptr, HObjectAccess::ForMapBitField());
363 HValue* bit_field_masked = AddUncasted<HBitwise>(
364 Token::BIT_AND, bit_field,
365 Add<HConstant>((1 << Map::kIsCallable) |
366 (1 << Map::kIsUndetectable)));
367 IfBuilder is_function(this);
368 is_function.If<HCompareNumericAndBranch>(
369 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable),
370 Token::EQ);
371 is_function.Then();
372 { Push(Add<HConstant>(factory->function_string())); }
373 is_function.Else();
374 {
375#define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \
376 IfBuilder is_##type(this); \
377 is_##type.If<HCompareObjectEqAndBranch>( \
378 map, Add<HConstant>(factory->type##_map())); \
379 is_##type.Then(); \
380 { Push(Add<HConstant>(factory->type##_string())); } \
381 is_##type.Else(); {
382 SIMD128_TYPES(SIMD128_BUILDER_OPEN)
383#undef SIMD128_BUILDER_OPEN
384 // Is it an undetectable object?
385 IfBuilder is_undetectable(this);
386 is_undetectable.If<HCompareNumericAndBranch>(
387 bit_field_masked, graph()->GetConstant0(), Token::NE);
388 is_undetectable.Then();
389 {
390 // typeof an undetectable object is 'undefined'.
391 Push(Add<HConstant>(factory->undefined_string()));
392 }
393 is_undetectable.Else();
394 {
395 // For any kind of object not handled above, the spec rule for
396 // host objects gives that it is okay to return "object".
397 Push(object_string);
398 }
399#define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) }
400 SIMD128_TYPES(SIMD128_BUILDER_CLOSE)
401#undef SIMD128_BUILDER_CLOSE
402 }
403 is_function.End();
404 }
405 is_symbol.End();
406 }
407 is_oddball.End();
408 }
409 is_string.End();
410 }
411 is_number.End();
412 }
413 is_smi.End();
414
415 return environment()->Pop();
416}
417
418
419Handle<Code> TypeofStub::GenerateCode() { return DoGenerateCode(this); }
420
421
422template <>
423HValue* CodeStubGraphBuilder<FastCloneRegExpStub>::BuildCodeStub() {
424 HValue* closure = GetParameter(0);
425 HValue* literal_index = GetParameter(1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426
427 // This stub is very performance sensitive, the generated code must be tuned
428 // so that it doesn't build and eager frame.
429 info()->MarkMustNotHaveEagerFrame();
430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431 HValue* literals_array = Add<HLoadNamedField>(
432 closure, nullptr, HObjectAccess::ForLiteralsPointer());
433 HInstruction* boilerplate = Add<HLoadKeyed>(
434 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
435 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
436
437 IfBuilder if_notundefined(this);
438 if_notundefined.IfNot<HCompareObjectEqAndBranch>(
439 boilerplate, graph()->GetConstantUndefined());
440 if_notundefined.Then();
441 {
442 int result_size =
443 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
444 HValue* result =
445 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(),
Ben Murdochc5610432016-08-08 18:44:38 +0100446 NOT_TENURED, JS_REGEXP_TYPE, graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000447 Add<HStoreNamedField>(
448 result, HObjectAccess::ForMap(),
449 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap()));
450 Add<HStoreNamedField>(
451 result, HObjectAccess::ForPropertiesPointer(),
452 Add<HLoadNamedField>(boilerplate, nullptr,
453 HObjectAccess::ForPropertiesPointer()));
454 Add<HStoreNamedField>(
455 result, HObjectAccess::ForElementsPointer(),
456 Add<HLoadNamedField>(boilerplate, nullptr,
457 HObjectAccess::ForElementsPointer()));
458 for (int offset = JSObject::kHeaderSize; offset < result_size;
459 offset += kPointerSize) {
460 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset);
461 Add<HStoreNamedField>(result, access,
462 Add<HLoadNamedField>(boilerplate, nullptr, access));
463 }
464 Push(result);
465 }
466 if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
467 if_notundefined.End();
468
469 return Pop();
470}
471
472
473Handle<Code> FastCloneRegExpStub::GenerateCode() {
474 return DoGenerateCode(this);
475}
476
477
478template <>
479HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
480 Factory* factory = isolate()->factory();
481 HValue* undefined = graph()->GetConstantUndefined();
482 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
483 HValue* closure = GetParameter(0);
484 HValue* literal_index = GetParameter(1);
485
486 // This stub is very performance sensitive, the generated code must be tuned
487 // so that it doesn't build and eager frame.
488 info()->MarkMustNotHaveEagerFrame();
489
490 HValue* literals_array = Add<HLoadNamedField>(
491 closure, nullptr, HObjectAccess::ForLiteralsPointer());
492
493 HInstruction* allocation_site = Add<HLoadKeyed>(
494 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
495 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 IfBuilder checker(this);
497 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
498 undefined);
499 checker.Then();
500
501 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
502 AllocationSite::kTransitionInfoOffset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400503 HInstruction* boilerplate =
504 Add<HLoadNamedField>(allocation_site, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 HValue* elements = AddLoadElements(boilerplate);
506 HValue* capacity = AddLoadFixedArrayLength(elements);
507 IfBuilder zero_capacity(this);
508 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
509 Token::EQ);
510 zero_capacity.Then();
511 Push(BuildCloneShallowArrayEmpty(boilerplate,
512 allocation_site,
513 alloc_site_mode));
514 zero_capacity.Else();
515 IfBuilder if_fixed_cow(this);
516 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
517 if_fixed_cow.Then();
518 Push(BuildCloneShallowArrayCow(boilerplate,
519 allocation_site,
520 alloc_site_mode,
521 FAST_ELEMENTS));
522 if_fixed_cow.Else();
523 IfBuilder if_fixed(this);
524 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
525 if_fixed.Then();
526 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
527 allocation_site,
528 alloc_site_mode,
529 FAST_ELEMENTS));
530
531 if_fixed.Else();
532 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
533 allocation_site,
534 alloc_site_mode,
535 FAST_DOUBLE_ELEMENTS));
536 if_fixed.End();
537 if_fixed_cow.End();
538 zero_capacity.End();
539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000541 checker.End();
542
543 return environment()->Pop();
544}
545
546
547Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
548 return DoGenerateCode(this);
549}
550
551
552template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 // This stub is performance sensitive, the generated code must be tuned
555 // so that it doesn't build an eager frame.
556 info()->MarkMustNotHaveEagerFrame();
557
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000558 HValue* size = Add<HConstant>(AllocationSite::kSize);
Ben Murdochc5610432016-08-08 18:44:38 +0100559 HInstruction* object =
560 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE,
561 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000562
563 // Store the map
564 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
565 AddStoreMapConstant(object, allocation_site_map);
566
567 // Store the payload (smi elements kind)
568 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
569 Add<HStoreNamedField>(object,
570 HObjectAccess::ForAllocationSiteOffset(
571 AllocationSite::kTransitionInfoOffset),
572 initial_elements_kind);
573
574 // Unlike literals, constructed arrays don't have nested sites
575 Add<HStoreNamedField>(object,
576 HObjectAccess::ForAllocationSiteOffset(
577 AllocationSite::kNestedSiteOffset),
578 graph()->GetConstant0());
579
580 // Pretenuring calculation field.
581 Add<HStoreNamedField>(object,
582 HObjectAccess::ForAllocationSiteOffset(
583 AllocationSite::kPretenureDataOffset),
584 graph()->GetConstant0());
585
586 // Pretenuring memento creation count field.
587 Add<HStoreNamedField>(object,
588 HObjectAccess::ForAllocationSiteOffset(
589 AllocationSite::kPretenureCreateCountOffset),
590 graph()->GetConstant0());
591
592 // Store an empty fixed array for the code dependency.
593 HConstant* empty_fixed_array =
594 Add<HConstant>(isolate()->factory()->empty_fixed_array());
595 Add<HStoreNamedField>(
596 object,
597 HObjectAccess::ForAllocationSiteOffset(
598 AllocationSite::kDependentCodeOffset),
599 empty_fixed_array);
600
601 // Link the object to the allocation site list
602 HValue* site_list = Add<HConstant>(
603 ExternalReference::allocation_sites_list_address(isolate()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400604 HValue* site = Add<HLoadNamedField>(site_list, nullptr,
605 HObjectAccess::ForAllocationSiteList());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000606 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
607 // mark as such in order to skip the write barrier, once we have a unified
608 // system for weakness. For now we decided to keep it like this because having
609 // an initial write barrier backed store makes this pointer strong until the
610 // next GC, and allocation sites are designed to survive several GCs anyway.
611 Add<HStoreNamedField>(
612 object,
613 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
614 site);
615 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
616 object);
617
618 HInstruction* feedback_vector = GetParameter(0);
619 HInstruction* slot = GetParameter(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 INITIALIZING_STORE);
622 return feedback_vector;
623}
624
625
626Handle<Code> CreateAllocationSiteStub::GenerateCode() {
627 return DoGenerateCode(this);
628}
629
630
631template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000632HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
633 // This stub is performance sensitive, the generated code must be tuned
634 // so that it doesn't build an eager frame.
635 info()->MarkMustNotHaveEagerFrame();
636
637 HValue* size = Add<HConstant>(WeakCell::kSize);
638 HInstruction* object =
Ben Murdochc5610432016-08-08 18:44:38 +0100639 Add<HAllocate>(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE,
640 graph()->GetConstant0());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000641
642 Handle<Map> weak_cell_map = isolate()->factory()->weak_cell_map();
643 AddStoreMapConstant(object, weak_cell_map);
644
645 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
646 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
647 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
648 graph()->GetConstantHole());
649
650 HInstruction* feedback_vector =
651 GetParameter(CreateWeakCellDescriptor::kVectorIndex);
652 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex);
653 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS,
654 INITIALIZING_STORE);
655 return graph()->GetConstant0();
656}
657
658
659Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); }
660
661
662template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400663HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
664 int context_index = casted_stub()->context_index();
665 int slot_index = casted_stub()->slot_index();
666
667 HValue* script_context = BuildGetScriptContext(context_index);
668 return Add<HLoadNamedField>(script_context, nullptr,
669 HObjectAccess::ForContextSlot(slot_index));
670}
671
672
673Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
674 return DoGenerateCode(this);
675}
676
677
678template <>
679HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
680 int context_index = casted_stub()->context_index();
681 int slot_index = casted_stub()->slot_index();
682
683 HValue* script_context = BuildGetScriptContext(context_index);
684 Add<HStoreNamedField>(script_context,
685 HObjectAccess::ForContextSlot(slot_index),
686 GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
687 return GetParameter(2);
688}
689
690
691Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
692 return DoGenerateCode(this);
693}
694
Ben Murdochda12d292016-06-02 14:46:10 +0100695HValue* CodeStubGraphBuilderBase::BuildPushElement(HValue* object, HValue* argc,
696 HValue* argument_elements,
697 ElementsKind kind) {
698 // Precheck whether all elements fit into the array.
699 if (!IsFastObjectElementsKind(kind)) {
700 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
701 HValue* start = graph()->GetConstant0();
702 HValue* key = builder.BeginBody(start, argc, Token::LT);
703 {
704 HInstruction* argument =
705 Add<HAccessArgumentsAt>(argument_elements, argc, key);
706 IfBuilder can_store(this);
707 can_store.IfNot<HIsSmiAndBranch>(argument);
708 if (IsFastDoubleElementsKind(kind)) {
709 can_store.And();
710 can_store.IfNot<HCompareMap>(argument,
711 isolate()->factory()->heap_number_map());
712 }
713 can_store.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
714 can_store.End();
715 }
716 builder.EndBody();
717 }
718
719 HValue* length = Add<HLoadNamedField>(object, nullptr,
720 HObjectAccess::ForArrayLength(kind));
721 HValue* new_length = AddUncasted<HAdd>(length, argc);
722 HValue* max_key = AddUncasted<HSub>(new_length, graph()->GetConstant1());
723
724 HValue* elements = Add<HLoadNamedField>(object, nullptr,
725 HObjectAccess::ForElementsPointer());
726 elements = BuildCheckForCapacityGrow(object, elements, kind, length, max_key,
727 true, STORE);
728
729 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
730 HValue* start = graph()->GetConstant0();
731 HValue* key = builder.BeginBody(start, argc, Token::LT);
732 {
733 HValue* argument = Add<HAccessArgumentsAt>(argument_elements, argc, key);
734 HValue* index = AddUncasted<HAdd>(key, length);
735 AddElementAccess(elements, index, argument, object, nullptr, kind, STORE);
736 }
737 builder.EndBody();
738 return new_length;
739}
740
741template <>
742HValue* CodeStubGraphBuilder<FastArrayPushStub>::BuildCodeStub() {
743 // TODO(verwaest): Fix deoptimizer messages.
744 HValue* argc = GetArgumentsLength();
745 HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
746 HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
747 graph()->GetConstantMinus1());
748 BuildCheckHeapObject(object);
749 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
750 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_JS_ARRAY);
751
752 // Disallow pushing onto prototypes. It might be the JSArray prototype.
753 // Disallow pushing onto non-extensible objects.
754 {
755 HValue* bit_field2 =
756 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
757 HValue* mask =
758 Add<HConstant>(static_cast<int>(Map::IsPrototypeMapBits::kMask) |
759 (1 << Map::kIsExtensible));
760 HValue* bits = AddUncasted<HBitwise>(Token::BIT_AND, bit_field2, mask);
761 IfBuilder check(this);
762 check.If<HCompareNumericAndBranch>(
763 bits, Add<HConstant>(1 << Map::kIsExtensible), Token::NE);
764 check.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
765 check.End();
766 }
767
Ben Murdochda12d292016-06-02 14:46:10 +0100768 // Disallow pushing onto arrays in dictionary named property mode. We need to
769 // figure out whether the length property is still writable.
770 {
771 HValue* bit_field3 =
772 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
773 HValue* mask = Add<HConstant>(static_cast<int>(Map::DictionaryMap::kMask));
774 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, bit_field3, mask);
775 IfBuilder check(this);
776 check.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
777 check.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
778 check.End();
779 }
780
781 // Check whether the length property is writable. The length property is the
782 // only default named property on arrays. It's nonconfigurable, hence is
783 // guaranteed to stay the first property.
784 {
785 HValue* descriptors =
786 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapDescriptors());
787 HValue* details = Add<HLoadKeyed>(
788 descriptors, Add<HConstant>(DescriptorArray::ToDetailsIndex(0)),
789 nullptr, nullptr, FAST_SMI_ELEMENTS);
790 HValue* mask =
791 Add<HConstant>(READ_ONLY << PropertyDetails::AttributesField::kShift);
792 HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, details, mask);
793 IfBuilder readonly(this);
794 readonly.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
795 readonly.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
796 readonly.End();
797 }
798
799 HValue* null = Add<HLoadRoot>(Heap::kNullValueRootIndex);
800 HValue* empty = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
801 environment()->Push(map);
802 LoopBuilder check_prototypes(this);
803 check_prototypes.BeginBody(1);
804 {
805 HValue* parent_map = environment()->Pop();
806 HValue* prototype = Add<HLoadNamedField>(parent_map, nullptr,
807 HObjectAccess::ForPrototype());
808
809 IfBuilder is_null(this);
810 is_null.If<HCompareObjectEqAndBranch>(prototype, null);
811 is_null.Then();
812 check_prototypes.Break();
813 is_null.End();
814
815 HValue* prototype_map =
816 Add<HLoadNamedField>(prototype, nullptr, HObjectAccess::ForMap());
817 HValue* instance_type = Add<HLoadNamedField>(
818 prototype_map, nullptr, HObjectAccess::ForMapInstanceType());
819 IfBuilder check_instance_type(this);
820 check_instance_type.If<HCompareNumericAndBranch>(
821 instance_type, Add<HConstant>(LAST_CUSTOM_ELEMENTS_RECEIVER),
822 Token::LTE);
823 check_instance_type.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
824 check_instance_type.End();
825
826 HValue* elements = Add<HLoadNamedField>(
827 prototype, nullptr, HObjectAccess::ForElementsPointer());
828 IfBuilder no_elements(this);
829 no_elements.IfNot<HCompareObjectEqAndBranch>(elements, empty);
830 no_elements.ThenDeopt(Deoptimizer::kFastArrayPushFailed);
831 no_elements.End();
832
833 environment()->Push(prototype_map);
834 }
835 check_prototypes.EndBody();
836
837 HValue* bit_field2 =
838 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
839 HValue* kind = BuildDecodeField<Map::ElementsKindBits>(bit_field2);
840
841 // Below we only check the upper bound of the relevant ranges to include both
842 // holey and non-holey versions. We check them in order smi, object, double
843 // since smi < object < double.
844 STATIC_ASSERT(FAST_SMI_ELEMENTS < FAST_HOLEY_SMI_ELEMENTS);
845 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS < FAST_HOLEY_ELEMENTS);
846 STATIC_ASSERT(FAST_ELEMENTS < FAST_HOLEY_ELEMENTS);
847 STATIC_ASSERT(FAST_HOLEY_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
848 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
849 IfBuilder has_smi_elements(this);
850 has_smi_elements.If<HCompareNumericAndBranch>(
851 kind, Add<HConstant>(FAST_HOLEY_SMI_ELEMENTS), Token::LTE);
852 has_smi_elements.Then();
853 {
854 HValue* new_length = BuildPushElement(object, argc, argument_elements,
855 FAST_HOLEY_SMI_ELEMENTS);
856 environment()->Push(new_length);
857 }
858 has_smi_elements.Else();
859 {
860 IfBuilder has_object_elements(this);
861 has_object_elements.If<HCompareNumericAndBranch>(
862 kind, Add<HConstant>(FAST_HOLEY_ELEMENTS), Token::LTE);
863 has_object_elements.Then();
864 {
865 HValue* new_length = BuildPushElement(object, argc, argument_elements,
866 FAST_HOLEY_ELEMENTS);
867 environment()->Push(new_length);
868 }
869 has_object_elements.Else();
870 {
871 IfBuilder has_double_elements(this);
872 has_double_elements.If<HCompareNumericAndBranch>(
873 kind, Add<HConstant>(FAST_HOLEY_DOUBLE_ELEMENTS), Token::LTE);
874 has_double_elements.Then();
875 {
876 HValue* new_length = BuildPushElement(object, argc, argument_elements,
877 FAST_HOLEY_DOUBLE_ELEMENTS);
878 environment()->Push(new_length);
879 }
880 has_double_elements.ElseDeopt(Deoptimizer::kFastArrayPushFailed);
881 has_double_elements.End();
882 }
883 has_object_elements.End();
884 }
885 has_smi_elements.End();
886
887 return environment()->Pop();
888}
889
890Handle<Code> FastArrayPushStub::GenerateCode() { return DoGenerateCode(this); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400891
892template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000893HValue* CodeStubGraphBuilder<GrowArrayElementsStub>::BuildCodeStub() {
894 ElementsKind kind = casted_stub()->elements_kind();
895 if (IsFastDoubleElementsKind(kind)) {
896 info()->MarkAsSavesCallerDoubles();
897 }
898
899 HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex);
900 HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex);
901
902 HValue* elements = AddLoadElements(object);
903 HValue* current_capacity = Add<HLoadNamedField>(
904 elements, nullptr, HObjectAccess::ForFixedArrayLength());
905
906 HValue* length =
907 casted_stub()->is_js_array()
908 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
909 HObjectAccess::ForArrayLength(kind))
910 : current_capacity;
911
912 return BuildCheckAndGrowElementsCapacity(object, elements, kind, length,
913 current_capacity, key);
914}
915
916
917Handle<Code> GrowArrayElementsStub::GenerateCode() {
918 return DoGenerateCode(this);
919}
920
921
922template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924 LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined()
925 ? CONVERT_HOLE_TO_UNDEFINED
926 : NEVER_RETURN_HOLE;
927
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
929 GetParameter(LoadDescriptor::kReceiverIndex),
930 GetParameter(LoadDescriptor::kNameIndex), NULL,
931 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000932 hole_mode, STANDARD_STORE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 return load;
934}
935
936
937Handle<Code> LoadFastElementStub::GenerateCode() {
938 return DoGenerateCode(this);
939}
940
941
942HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
943 HValue* object, FieldIndex index) {
944 Representation representation = index.is_double()
945 ? Representation::Double()
946 : Representation::Tagged();
947 int offset = index.offset();
948 HObjectAccess access = index.is_inobject()
949 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
950 : HObjectAccess::ForBackingStoreOffset(offset, representation);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400951 if (index.is_double() &&
952 (!FLAG_unbox_double_fields || !index.is_inobject())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953 // Load the heap number.
954 object = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400955 object, nullptr, access.WithRepresentation(Representation::Tagged()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000956 // Load the double value from it.
957 access = HObjectAccess::ForHeapNumberValue();
958 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400959 return Add<HLoadNamedField>(object, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000960}
961
962
963template<>
964HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
965 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
966}
967
968
969Handle<Code> LoadFieldStub::GenerateCode() {
970 return DoGenerateCode(this);
971}
972
973
974template <>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000975HValue* CodeStubGraphBuilder<ArrayBufferViewLoadFieldStub>::BuildCodeStub() {
976 return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr,
977 casted_stub()->index());
978}
979
980
981Handle<Code> ArrayBufferViewLoadFieldStub::GenerateCode() {
982 return DoGenerateCode(this);
983}
984
985
986template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000987HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
988 HValue* map = AddLoadMap(GetParameter(0), NULL);
989 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
990 Map::kDescriptorsOffset, Representation::Tagged());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400991 HValue* descriptors = Add<HLoadNamedField>(map, nullptr, descriptors_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
993 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400994 return Add<HLoadNamedField>(descriptors, nullptr, value_access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995}
996
997
998Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
999
1000
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
1002 HValue* value) {
1003 HValue* result = NULL;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001004 HInstruction* backing_store =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, nullptr,
1006 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001007 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001008 HValue* backing_store_length = Add<HLoadNamedField>(
1009 backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001010 IfBuilder in_unmapped_range(this);
1011 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
1012 Token::LT);
1013 in_unmapped_range.Then();
1014 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015 if (value == NULL) {
1016 result = Add<HLoadKeyed>(backing_store, key, nullptr, nullptr,
1017 FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE);
1018 } else {
1019 Add<HStoreKeyed>(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS);
1020 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001022 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023 in_unmapped_range.End();
1024 return result;
1025}
1026
1027
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001028HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
1029 HValue* key,
1030 HValue* value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031 // Mapped arguments are actual arguments. Unmapped arguments are values added
1032 // to the arguments object after it was created for the call. Mapped arguments
1033 // are stored in the context at indexes given by elements[key + 2]. Unmapped
1034 // arguments are stored as regular indexed properties in the arguments array,
1035 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
1036 // look at argument object construction.
1037 //
1038 // The sloppy arguments elements array has a special format:
1039 //
1040 // 0: context
1041 // 1: unmapped arguments array
1042 // 2: mapped_index0,
1043 // 3: mapped_index1,
1044 // ...
1045 //
1046 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
1047 // If key + 2 >= elements.length then attempt to look in the unmapped
1048 // arguments array (given by elements[1]) and return the value at key, missing
1049 // to the runtime if the unmapped arguments array is not a fixed array or if
1050 // key >= unmapped_arguments_array.length.
1051 //
1052 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
1053 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
1054 // index into the context array given at elements[0]. Return the value at
1055 // context[t].
1056
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001057 bool is_load = value == NULL;
1058
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
1060 IfBuilder positive_smi(this);
1061 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
1062 Token::LT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001063 positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001064 positive_smi.End();
1065
1066 HValue* constant_two = Add<HConstant>(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001067 HValue* elements = AddLoadElements(receiver, nullptr);
1068 HValue* elements_length = Add<HLoadNamedField>(
1069 elements, nullptr, HObjectAccess::ForFixedArrayLength());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001070 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
1071 IfBuilder in_range(this);
1072 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
1073 in_range.Then();
1074 {
1075 HValue* index = AddUncasted<HAdd>(key, constant_two);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001076 HInstruction* mapped_index =
1077 Add<HLoadKeyed>(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS,
1078 ALLOW_RETURN_HOLE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001079
1080 IfBuilder is_valid(this);
1081 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
1082 graph()->GetConstantHole());
1083 is_valid.Then();
1084 {
1085 // TODO(mvstanton): I'd like to assert from this point, that if the
1086 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
1087 // smi check is being emitted.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001088 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001089 nullptr, nullptr, FAST_ELEMENTS);
1090 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
1091 if (is_load) {
1092 HValue* result =
1093 Add<HLoadKeyed>(the_context, mapped_index, nullptr, nullptr,
1094 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1095 environment()->Push(result);
1096 } else {
1097 DCHECK(value != NULL);
1098 Add<HStoreKeyed>(the_context, mapped_index, value, nullptr,
1099 FAST_ELEMENTS);
1100 environment()->Push(value);
1101 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 }
1103 is_valid.Else();
1104 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001105 HValue* result = UnmappedCase(elements, key, value);
1106 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001107 }
1108 is_valid.End();
1109 }
1110 in_range.Else();
1111 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001112 HValue* result = UnmappedCase(elements, key, value);
1113 environment()->Push(is_load ? result : value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001114 }
1115 in_range.End();
1116
1117 return environment()->Pop();
1118}
1119
1120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001121template <>
1122HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
1123 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1124 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1125
1126 return EmitKeyedSloppyArguments(receiver, key, NULL);
1127}
1128
1129
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
1131 return DoGenerateCode(this);
1132}
1133
1134
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001135template <>
1136HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
1137 HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
1138 HValue* key = GetParameter(StoreDescriptor::kNameIndex);
1139 HValue* value = GetParameter(StoreDescriptor::kValueIndex);
1140
1141 return EmitKeyedSloppyArguments(receiver, key, value);
1142}
1143
1144
1145Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
1146 return DoGenerateCode(this);
1147}
1148
1149
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150void CodeStubGraphBuilderBase::BuildStoreNamedField(
1151 HValue* object, HValue* value, FieldIndex index,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001152 Representation representation, bool transition_to_field) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001153 DCHECK(!index.is_double() || representation.IsDouble());
1154 int offset = index.offset();
1155 HObjectAccess access =
1156 index.is_inobject()
1157 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
1158 : HObjectAccess::ForBackingStoreOffset(offset, representation);
1159
1160 if (representation.IsDouble()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001161 if (!FLAG_unbox_double_fields || !index.is_inobject()) {
1162 HObjectAccess heap_number_access =
1163 access.WithRepresentation(Representation::Tagged());
1164 if (transition_to_field) {
1165 // The store requires a mutable HeapNumber to be allocated.
1166 NoObservableSideEffectsScope no_side_effects(this);
1167 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
1168
1169 // TODO(hpayer): Allocation site pretenuring support.
1170 HInstruction* heap_number =
1171 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
Ben Murdochc5610432016-08-08 18:44:38 +01001172 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001173 AddStoreMapConstant(heap_number,
1174 isolate()->factory()->mutable_heap_number_map());
1175 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
1176 value);
1177 // Store the new mutable heap number into the object.
1178 access = heap_number_access;
1179 value = heap_number;
1180 } else {
1181 // Load the heap number.
1182 object = Add<HLoadNamedField>(object, nullptr, heap_number_access);
1183 // Store the double value into it.
1184 access = HObjectAccess::ForHeapNumberValue();
1185 }
1186 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001187 } else if (representation.IsHeapObject()) {
1188 BuildCheckHeapObject(value);
1189 }
1190
1191 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
1192}
1193
1194
1195template <>
1196HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
1197 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001198 casted_stub()->representation(), false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001199 return GetParameter(2);
1200}
1201
1202
1203Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
1204
1205
1206template <>
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001207HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001208 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001209
1210 switch (casted_stub()->store_mode()) {
1211 case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
1212 HValue* properties = Add<HLoadNamedField>(
1213 object, nullptr, HObjectAccess::ForPropertiesPointer());
1214 HValue* length = AddLoadFixedArrayLength(properties);
1215 HValue* delta =
1216 Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
1217 HValue* new_capacity = AddUncasted<HAdd>(length, delta);
1218
1219 // Grow properties array.
1220 ElementsKind kind = FAST_ELEMENTS;
1221 Add<HBoundsCheck>(new_capacity,
1222 Add<HConstant>((Page::kMaxRegularHeapObjectSize -
1223 FixedArray::kHeaderSize) >>
1224 ElementsKindToShiftSize(kind)));
1225
1226 // Reuse this code for properties backing store allocation.
1227 HValue* new_properties =
1228 BuildAllocateAndInitializeArray(kind, new_capacity);
1229
1230 BuildCopyProperties(properties, new_properties, length, new_capacity);
1231
1232 Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
1233 new_properties);
1234 }
1235 // Fall through.
1236 case StoreTransitionStub::StoreMapAndValue:
1237 // Store the new value into the "extended" object.
1238 BuildStoreNamedField(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001239 object, GetParameter(StoreTransitionHelper::ValueIndex()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001240 casted_stub()->index(), casted_stub()->representation(), true);
1241 // Fall through.
1242
1243 case StoreTransitionStub::StoreMapOnly:
1244 // And finally update the map.
1245 Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 GetParameter(StoreTransitionHelper::MapIndex()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001247 break;
1248 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001249 return GetParameter(StoreTransitionHelper::ValueIndex());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001250}
1251
1252
1253Handle<Code> StoreTransitionStub::GenerateCode() {
1254 return DoGenerateCode(this);
1255}
1256
1257
1258template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001259HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
1260 BuildUncheckedMonomorphicElementAccess(
1261 GetParameter(StoreDescriptor::kReceiverIndex),
1262 GetParameter(StoreDescriptor::kNameIndex),
1263 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
1264 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
1265 casted_stub()->store_mode());
1266
1267 return GetParameter(2);
1268}
1269
1270
1271Handle<Code> StoreFastElementStub::GenerateCode() {
1272 return DoGenerateCode(this);
1273}
1274
1275
1276template <>
1277HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
1278 info()->MarkAsSavesCallerDoubles();
1279
1280 BuildTransitionElementsKind(GetParameter(0),
1281 GetParameter(1),
1282 casted_stub()->from_kind(),
1283 casted_stub()->to_kind(),
1284 casted_stub()->is_js_array());
1285
1286 return GetParameter(0);
1287}
1288
1289
1290Handle<Code> TransitionElementsKindStub::GenerateCode() {
1291 return DoGenerateCode(this);
1292}
1293
1294HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
1295 ElementsKind kind,
1296 AllocationSiteOverrideMode override_mode,
1297 ArgumentClass argument_class) {
1298 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
1299 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
1300 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
1301 override_mode);
1302 HValue* result = NULL;
1303 switch (argument_class) {
1304 case NONE:
1305 // This stub is very performance sensitive, the generated code must be
1306 // tuned so that it doesn't build and eager frame.
1307 info()->MarkMustNotHaveEagerFrame();
1308 result = array_builder.AllocateEmptyArray();
1309 break;
1310 case SINGLE:
1311 result = BuildArraySingleArgumentConstructor(&array_builder);
1312 break;
1313 case MULTIPLE:
1314 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1315 break;
1316 }
1317
1318 return result;
1319}
1320
1321
1322HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
1323 ElementsKind kind, ArgumentClass argument_class) {
1324 HValue* constructor = GetParameter(
1325 InternalArrayConstructorStubBase::kConstructor);
1326 JSArrayBuilder array_builder(this, kind, constructor);
1327
1328 HValue* result = NULL;
1329 switch (argument_class) {
1330 case NONE:
1331 // This stub is very performance sensitive, the generated code must be
1332 // tuned so that it doesn't build and eager frame.
1333 info()->MarkMustNotHaveEagerFrame();
1334 result = array_builder.AllocateEmptyArray();
1335 break;
1336 case SINGLE:
1337 result = BuildArraySingleArgumentConstructor(&array_builder);
1338 break;
1339 case MULTIPLE:
1340 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
1341 break;
1342 }
1343 return result;
1344}
1345
1346
1347HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
1348 JSArrayBuilder* array_builder) {
1349 // Smi check and range check on the input arg.
1350 HValue* constant_one = graph()->GetConstant1();
1351 HValue* constant_zero = graph()->GetConstant0();
1352
1353 HInstruction* elements = Add<HArgumentsElements>(false);
1354 HInstruction* argument = Add<HAccessArgumentsAt>(
1355 elements, constant_one, constant_zero);
1356
1357 return BuildAllocateArrayFromLength(array_builder, argument);
1358}
1359
1360
1361HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
1362 JSArrayBuilder* array_builder, ElementsKind kind) {
1363 // Insert a bounds check because the number of arguments might exceed
1364 // the kInitialMaxFastElementArray limit. This cannot happen for code
1365 // that was parsed, but calling via Array.apply(thisArg, [...]) might
1366 // trigger it.
1367 HValue* length = GetArgumentsLength();
1368 HConstant* max_alloc_length =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001369 Add<HConstant>(JSArray::kInitialMaxFastElementArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001370 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
1371
1372 // We need to fill with the hole if it's a smi array in the multi-argument
1373 // case because we might have to bail out while copying arguments into
1374 // the array because they aren't compatible with a smi array.
1375 // If it's a double array, no problem, and if it's fast then no
1376 // problem either because doubles are boxed.
1377 //
1378 // TODO(mvstanton): consider an instruction to memset fill the array
1379 // with zero in this case instead.
1380 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
1381 ? JSArrayBuilder::FILL_WITH_HOLE
1382 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
1383 HValue* new_object = array_builder->AllocateArray(checked_length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001384 checked_length,
1385 fill_mode);
1386 HValue* elements = array_builder->GetElementsLocation();
1387 DCHECK(elements != NULL);
1388
1389 // Now populate the elements correctly.
1390 LoopBuilder builder(this,
1391 context(),
1392 LoopBuilder::kPostIncrement);
1393 HValue* start = graph()->GetConstant0();
1394 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
1395 HInstruction* argument_elements = Add<HArgumentsElements>(false);
1396 HInstruction* argument = Add<HAccessArgumentsAt>(
1397 argument_elements, checked_length, key);
1398
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001399 Add<HStoreKeyed>(elements, key, argument, nullptr, kind);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001400 builder.EndBody();
1401 return new_object;
1402}
1403
1404
1405template <>
1406HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
1407 ElementsKind kind = casted_stub()->elements_kind();
1408 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1409 return BuildArrayConstructor(kind, override_mode, NONE);
1410}
1411
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001412template <>
1413HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
1414 BuildCodeStub() {
1415 ElementsKind kind = casted_stub()->elements_kind();
1416 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1417 return BuildArrayConstructor(kind, override_mode, SINGLE);
1418}
1419
1420
1421Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
1422 return DoGenerateCode(this);
1423}
1424
1425
1426template <>
1427HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
1428 ElementsKind kind = casted_stub()->elements_kind();
1429 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
1430 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
1431}
1432
1433
1434Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
1435 return DoGenerateCode(this);
1436}
1437
1438
1439template <>
1440HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
1441 BuildCodeStub() {
1442 ElementsKind kind = casted_stub()->elements_kind();
1443 return BuildInternalArrayConstructor(kind, NONE);
1444}
1445
1446
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001447template <>
1448HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
1449 BuildCodeStub() {
1450 ElementsKind kind = casted_stub()->elements_kind();
1451 return BuildInternalArrayConstructor(kind, SINGLE);
1452}
1453
1454
1455Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
1456 return DoGenerateCode(this);
1457}
1458
1459
1460template <>
1461HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
1462 BuildCodeStub() {
1463 ElementsKind kind = casted_stub()->elements_kind();
1464 return BuildInternalArrayConstructor(kind, MULTIPLE);
1465}
1466
1467
1468Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
1469 return DoGenerateCode(this);
1470}
1471
1472
1473template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001474HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1475 BinaryOpICState state = casted_stub()->state();
1476
1477 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1478 HValue* right = GetParameter(BinaryOpICStub::kRight);
1479
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001480 Type* left_type = state.GetLeftType();
1481 Type* right_type = state.GetRightType();
1482 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001483
1484 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1485 (state.HasSideEffects() || !result_type->Is(Type::None())));
1486
1487 HValue* result = NULL;
1488 HAllocationMode allocation_mode(NOT_TENURED);
1489 if (state.op() == Token::ADD &&
1490 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1491 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1492 // For the generic add stub a fast case for string addition is performance
1493 // critical.
1494 if (left_type->Maybe(Type::String())) {
1495 IfBuilder if_leftisstring(this);
1496 if_leftisstring.If<HIsStringAndBranch>(left);
1497 if_leftisstring.Then();
1498 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001499 Push(BuildBinaryOperation(state.op(), left, right, Type::String(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001500 right_type, result_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001501 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001502 }
1503 if_leftisstring.Else();
1504 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001505 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1506 right_type, result_type,
1507 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001508 }
1509 if_leftisstring.End();
1510 result = Pop();
1511 } else {
1512 IfBuilder if_rightisstring(this);
1513 if_rightisstring.If<HIsStringAndBranch>(right);
1514 if_rightisstring.Then();
1515 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 Push(BuildBinaryOperation(state.op(), left, right, left_type,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001517 Type::String(), result_type,
1518 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001519 }
1520 if_rightisstring.Else();
1521 {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001522 Push(BuildBinaryOperation(state.op(), left, right, left_type,
1523 right_type, result_type,
1524 state.fixed_right_arg(), allocation_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001525 }
1526 if_rightisstring.End();
1527 result = Pop();
1528 }
1529 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001530 result = BuildBinaryOperation(state.op(), left, right, left_type,
1531 right_type, result_type,
1532 state.fixed_right_arg(), allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001533 }
1534
1535 // If we encounter a generic argument, the number conversion is
1536 // observable, thus we cannot afford to bail out after the fact.
1537 if (!state.HasSideEffects()) {
1538 result = EnforceNumberType(result, result_type);
1539 }
1540
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001541 return result;
1542}
1543
1544
1545Handle<Code> BinaryOpICStub::GenerateCode() {
1546 return DoGenerateCode(this);
1547}
1548
1549
1550template <>
1551HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1552 BinaryOpICState state = casted_stub()->state();
1553
1554 HValue* allocation_site = GetParameter(
1555 BinaryOpWithAllocationSiteStub::kAllocationSite);
1556 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1557 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001559 Type* left_type = state.GetLeftType();
1560 Type* right_type = state.GetRightType();
1561 Type* result_type = state.GetResultType();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001562 HAllocationMode allocation_mode(allocation_site);
1563
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001564 return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
1565 result_type, state.fixed_right_arg(),
Ben Murdoch097c5b22016-05-18 11:27:45 +01001566 allocation_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001567}
1568
1569
1570Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1571 return DoGenerateCode(this);
1572}
1573
1574
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) {
1576 if (!convert) return BuildCheckString(input);
1577 IfBuilder if_inputissmi(this);
1578 HValue* inputissmi = if_inputissmi.If<HIsSmiAndBranch>(input);
1579 if_inputissmi.Then();
1580 {
1581 // Convert the input smi to a string.
1582 Push(BuildNumberToString(input, Type::SignedSmall()));
1583 }
1584 if_inputissmi.Else();
1585 {
1586 HValue* input_map =
1587 Add<HLoadNamedField>(input, inputissmi, HObjectAccess::ForMap());
1588 HValue* input_instance_type = Add<HLoadNamedField>(
1589 input_map, inputissmi, HObjectAccess::ForMapInstanceType());
1590 IfBuilder if_inputisstring(this);
1591 if_inputisstring.If<HCompareNumericAndBranch>(
1592 input_instance_type, Add<HConstant>(FIRST_NONSTRING_TYPE), Token::LT);
1593 if_inputisstring.Then();
1594 {
1595 // The input is already a string.
1596 Push(input);
1597 }
1598 if_inputisstring.Else();
1599 {
1600 // Convert to primitive first (if necessary), see
1601 // ES6 section 12.7.3 The Addition operator.
1602 IfBuilder if_inputisprimitive(this);
1603 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1604 if_inputisprimitive.If<HCompareNumericAndBranch>(
1605 input_instance_type, Add<HConstant>(LAST_PRIMITIVE_TYPE), Token::LTE);
1606 if_inputisprimitive.Then();
1607 {
1608 // The input is already a primitive.
1609 Push(input);
1610 }
1611 if_inputisprimitive.Else();
1612 {
1613 // Convert the input to a primitive.
1614 Push(BuildToPrimitive(input, input_map));
1615 }
1616 if_inputisprimitive.End();
1617 // Convert the primitive to a string value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001618 ToStringStub stub(isolate());
1619 HValue* values[] = {context(), Pop()};
Ben Murdochc5610432016-08-08 18:44:38 +01001620 Push(AddUncasted<HCallWithDescriptor>(Add<HConstant>(stub.GetCode()), 0,
1621 stub.GetCallInterfaceDescriptor(),
1622 ArrayVector(values)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001623 }
1624 if_inputisstring.End();
1625 }
1626 if_inputissmi.End();
1627 return Pop();
1628}
1629
1630
1631HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input,
1632 HValue* input_map) {
1633 // Get the native context of the caller.
1634 HValue* native_context = BuildGetNativeContext();
1635
1636 // Determine the initial map of the %ObjectPrototype%.
1637 HValue* object_function_prototype_map =
1638 Add<HLoadNamedField>(native_context, nullptr,
1639 HObjectAccess::ForContextSlot(
1640 Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX));
1641
1642 // Determine the initial map of the %StringPrototype%.
1643 HValue* string_function_prototype_map =
1644 Add<HLoadNamedField>(native_context, nullptr,
1645 HObjectAccess::ForContextSlot(
1646 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
1647
1648 // Determine the initial map of the String function.
1649 HValue* string_function = Add<HLoadNamedField>(
1650 native_context, nullptr,
1651 HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX));
1652 HValue* string_function_initial_map = Add<HLoadNamedField>(
1653 string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
1654
1655 // Determine the map of the [[Prototype]] of {input}.
1656 HValue* input_prototype =
1657 Add<HLoadNamedField>(input_map, nullptr, HObjectAccess::ForPrototype());
1658 HValue* input_prototype_map =
1659 Add<HLoadNamedField>(input_prototype, nullptr, HObjectAccess::ForMap());
1660
1661 // For string wrappers (JSValue instances with [[StringData]] internal
1662 // fields), we can shortcirciut the ToPrimitive if
1663 //
1664 // (a) the {input} map matches the initial map of the String function,
1665 // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e.
1666 // no one monkey-patched toString, @@toPrimitive or valueOf), and
1667 // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the
1668 // %StringPrototype%) is also unmodified, that is no one sneaked a
1669 // @@toPrimitive into the %ObjectPrototype%.
1670 //
1671 // If all these assumptions hold, we can just take the [[StringData]] value
1672 // and return it.
1673 // TODO(bmeurer): This just repairs a regression introduced by removing the
1674 // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which
1675 // was intendend to something similar to this, although less efficient and
1676 // wrong in the presence of @@toPrimitive. Long-term we might want to move
1677 // into the direction of having a ToPrimitiveStub that can do common cases
1678 // while staying in JavaScript land (i.e. not going to C++).
1679 IfBuilder if_inputisstringwrapper(this);
1680 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1681 input_map, string_function_initial_map);
1682 if_inputisstringwrapper.And();
1683 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1684 input_prototype_map, string_function_prototype_map);
1685 if_inputisstringwrapper.And();
1686 if_inputisstringwrapper.If<HCompareObjectEqAndBranch>(
1687 Add<HLoadNamedField>(Add<HLoadNamedField>(input_prototype_map, nullptr,
1688 HObjectAccess::ForPrototype()),
1689 nullptr, HObjectAccess::ForMap()),
1690 object_function_prototype_map);
1691 if_inputisstringwrapper.Then();
1692 {
1693 Push(BuildLoadNamedField(
1694 input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset)));
1695 }
1696 if_inputisstringwrapper.Else();
1697 {
1698 // TODO(bmeurer): Add support for fast ToPrimitive conversion using
1699 // a dedicated ToPrimitiveStub.
1700 Add<HPushArguments>(input);
1701 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kToPrimitive), 1));
1702 }
1703 if_inputisstringwrapper.End();
1704 return Pop();
1705}
1706
1707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001708template <>
1709HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1710 StringAddStub* stub = casted_stub();
1711 StringAddFlags flags = stub->flags();
1712 PretenureFlag pretenure_flag = stub->pretenure_flag();
1713
1714 HValue* left = GetParameter(StringAddStub::kLeft);
1715 HValue* right = GetParameter(StringAddStub::kRight);
1716
1717 // Make sure that both arguments are strings if not known in advance.
1718 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001719 left =
1720 BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001721 }
1722 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723 right = BuildToString(right,
1724 (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001725 }
1726
1727 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1728}
1729
1730
1731Handle<Code> StringAddStub::GenerateCode() {
1732 return DoGenerateCode(this);
1733}
1734
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001735template <>
Ben Murdochda12d292016-06-02 14:46:10 +01001736HValue* CodeStubGraphBuilder<ToBooleanICStub>::BuildCodeInitializedStub() {
1737 ToBooleanICStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001738 IfBuilder if_true(this);
1739 if_true.If<HBranch>(GetParameter(0), stub->types());
1740 if_true.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001741 if_true.Return(graph()->GetConstantTrue());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001742 if_true.Else();
1743 if_true.End();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001744 return graph()->GetConstantFalse();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001745}
1746
Ben Murdochda12d292016-06-02 14:46:10 +01001747Handle<Code> ToBooleanICStub::GenerateCode() { return DoGenerateCode(this); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001748
1749template <>
1750HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1751 StoreGlobalStub* stub = casted_stub();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001752 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753 if (stub->check_global()) {
1754 // Check that the map of the global has not changed: use a placeholder map
1755 // that will be replaced later with the global object's map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756 HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex);
1757 HValue* proxy_map =
1758 Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
1759 HValue* global =
1760 Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
1761 HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1762 StoreGlobalStub::global_map_placeholder(isolate())));
1763 HValue* expected_map = Add<HLoadNamedField>(
1764 map_cell, nullptr, HObjectAccess::ForWeakCellValue());
1765 HValue* map =
1766 Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
1767 IfBuilder map_check(this);
1768 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1769 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1770 map_check.End();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 }
1772
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001773 HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
1774 StoreGlobalStub::property_cell_placeholder(isolate())));
1775 HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
1776 HObjectAccess::ForWeakCellValue());
1777 Add<HCheckHeapObject>(cell);
1778 HObjectAccess access = HObjectAccess::ForPropertyCellValue();
1779 // Load the payload of the global parameter cell. A hole indicates that the
1780 // cell has been invalidated and that the store must be handled by the
1781 // runtime.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001782 HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001783
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001784 auto cell_type = stub->cell_type();
1785 if (cell_type == PropertyCellType::kConstant ||
1786 cell_type == PropertyCellType::kUndefined) {
1787 // This is always valid for all states a cell can be in.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788 IfBuilder builder(this);
1789 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1790 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 builder.ElseDeopt(
1792 Deoptimizer::kUnexpectedCellContentsInConstantGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 builder.End();
1794 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001795 IfBuilder builder(this);
1796 HValue* hole_value = graph()->GetConstantHole();
1797 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1798 builder.Then();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001799 builder.Deopt(Deoptimizer::kUnexpectedCellContentsInGlobalStore);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001800 builder.Else();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001801 // When dealing with constant types, the type may be allowed to change, as
1802 // long as optimized code remains valid.
1803 if (cell_type == PropertyCellType::kConstantType) {
1804 switch (stub->constant_type()) {
1805 case PropertyCellConstantType::kSmi:
1806 access = access.WithRepresentation(Representation::Smi());
1807 break;
1808 case PropertyCellConstantType::kStableMap: {
1809 // It is sufficient here to check that the value and cell contents
1810 // have identical maps, no matter if they are stable or not or if they
1811 // are the maps that were originally in the cell or not. If optimized
1812 // code will deopt when a cell has a unstable map and if it has a
1813 // dependency on a stable map, it will deopt if the map destabilizes.
1814 Add<HCheckHeapObject>(value);
1815 Add<HCheckHeapObject>(cell_contents);
1816 HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
1817 HObjectAccess::ForMap());
1818 HValue* map =
1819 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
1820 IfBuilder map_check(this);
1821 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
1822 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
1823 map_check.End();
1824 access = access.WithRepresentation(Representation::HeapObject());
1825 break;
1826 }
1827 }
1828 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001829 Add<HStoreNamedField>(cell, access, value);
1830 builder.End();
1831 }
1832
1833 return value;
1834}
1835
1836
1837Handle<Code> StoreGlobalStub::GenerateCode() {
1838 return DoGenerateCode(this);
1839}
1840
1841
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001842template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001843HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001844 HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
1845 HValue* key = GetParameter(StoreTransitionHelper::NameIndex());
1846 HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
1847 HValue* map = GetParameter(StoreTransitionHelper::MapIndex());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001848
1849 if (FLAG_trace_elements_transitions) {
1850 // Tracing elements transitions is the job of the runtime.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 Add<HDeoptimize>(Deoptimizer::kTracingElementsTransitions,
1852 Deoptimizer::EAGER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001853 } else {
1854 info()->MarkAsSavesCallerDoubles();
1855
1856 BuildTransitionElementsKind(object, map,
1857 casted_stub()->from_kind(),
1858 casted_stub()->to_kind(),
1859 casted_stub()->is_jsarray());
1860
1861 BuildUncheckedMonomorphicElementAccess(object, key, value,
1862 casted_stub()->is_jsarray(),
1863 casted_stub()->to_kind(),
1864 STORE, ALLOW_RETURN_HOLE,
1865 casted_stub()->store_mode());
1866 }
1867
1868 return value;
1869}
1870
1871
1872Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1873 return DoGenerateCode(this);
1874}
1875
1876
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877template <>
1878HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() {
Ben Murdochda12d292016-06-02 14:46:10 +01001879 HValue* receiver = GetParameter(TypeConversionDescriptor::kArgumentIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001880 return BuildToObject(receiver);
1881}
1882
1883
1884Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); }
1885
1886
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887template<>
1888HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1889 Counters* counters = isolate()->counters();
1890 Factory* factory = isolate()->factory();
1891 HInstruction* empty_fixed_array =
1892 Add<HConstant>(factory->empty_fixed_array());
1893 HValue* shared_info = GetParameter(0);
1894
1895 AddIncrementCounter(counters->fast_new_closure_total());
1896
1897 // Create a new closure from the given function info in new space
1898 HValue* size = Add<HConstant>(JSFunction::kSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001899 HInstruction* js_function =
Ben Murdochc5610432016-08-08 18:44:38 +01001900 Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE,
1901 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001902
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001903 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001904 casted_stub()->kind());
1905
1906 // Compute the function map in the current native context and set that
1907 // as the map of the allocated object.
1908 HInstruction* native_context = BuildGetNativeContext();
1909 HInstruction* map_slot_value = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001910 native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001911 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1912
1913 // Initialize the rest of the function.
1914 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1915 empty_fixed_array);
1916 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1917 empty_fixed_array);
1918 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1919 empty_fixed_array);
1920 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1921 graph()->GetConstantHole());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001922 Add<HStoreNamedField>(
1923 js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1925 context());
1926
Ben Murdochc5610432016-08-08 18:44:38 +01001927 Handle<Code> lazy_builtin(
1928 isolate()->builtins()->builtin(Builtins::kCompileLazy));
1929 HConstant* lazy = Add<HConstant>(lazy_builtin);
1930 Add<HStoreCodeEntry>(js_function, lazy);
1931 Add<HStoreNamedField>(js_function,
1932 HObjectAccess::ForNextFunctionLinkPointer(),
1933 graph()->GetConstantUndefined());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001934
1935 return js_function;
1936}
1937
1938
1939Handle<Code> FastNewClosureStub::GenerateCode() {
1940 return DoGenerateCode(this);
1941}
1942
1943
1944template<>
1945HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1946 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1947
1948 // Get the function.
1949 HParameter* function = GetParameter(FastNewContextStub::kFunction);
1950
1951 // Allocate the context in new space.
1952 HAllocate* function_context = Add<HAllocate>(
1953 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
Ben Murdochc5610432016-08-08 18:44:38 +01001954 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE,
1955 graph()->GetConstant0());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001956
1957 // Set up the object header.
1958 AddStoreMapConstant(function_context,
1959 isolate()->factory()->function_context_map());
1960 Add<HStoreNamedField>(function_context,
1961 HObjectAccess::ForFixedArrayLength(),
1962 Add<HConstant>(length));
1963
1964 // Set up the fixed slots.
1965 Add<HStoreNamedField>(function_context,
1966 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1967 function);
1968 Add<HStoreNamedField>(function_context,
1969 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1970 context());
1971 Add<HStoreNamedField>(function_context,
1972 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001973 graph()->GetConstantHole());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001974
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001975 // Copy the native context from the previous context.
1976 HValue* native_context = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001977 context(), nullptr,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001978 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
1979 Add<HStoreNamedField>(function_context, HObjectAccess::ForContextSlot(
1980 Context::NATIVE_CONTEXT_INDEX),
1981 native_context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001982
1983 // Initialize the rest of the slots to undefined.
1984 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1985 Add<HStoreNamedField>(function_context,
1986 HObjectAccess::ForContextSlot(i),
1987 graph()->GetConstantUndefined());
1988 }
1989
1990 return function_context;
1991}
1992
1993
1994Handle<Code> FastNewContextStub::GenerateCode() {
1995 return DoGenerateCode(this);
1996}
1997
1998
1999template <>
2000HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
2001 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2002 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
2003
2004 Add<HCheckSmi>(key);
2005
2006 HValue* elements = AddLoadElements(receiver);
2007
2008 HValue* hash = BuildElementIndexHash(key);
2009
Ben Murdoch097c5b22016-05-18 11:27:45 +01002010 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002011}
2012
2013
2014Handle<Code> LoadDictionaryElementStub::GenerateCode() {
2015 return DoGenerateCode(this);
2016}
2017
2018
2019template<>
2020HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
2021 // Determine the parameters.
2022 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
2023 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
2024 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
2025
2026 info()->MarkMustNotHaveEagerFrame();
2027
2028 return BuildRegExpConstructResult(length, index, input);
2029}
2030
2031
2032Handle<Code> RegExpConstructResultStub::GenerateCode() {
2033 return DoGenerateCode(this);
2034}
2035
2036
2037template <>
2038class CodeStubGraphBuilder<KeyedLoadGenericStub>
2039 : public CodeStubGraphBuilderBase {
2040 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002041 explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
2042 : CodeStubGraphBuilderBase(info, stub) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002043
2044 protected:
2045 virtual HValue* BuildCodeStub();
2046
2047 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
2048 HValue* bit_field2,
2049 ElementsKind kind);
2050
2051 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
2052 HValue* receiver,
2053 HValue* key,
2054 HValue* instance_type,
2055 HValue* bit_field2,
2056 ElementsKind kind);
2057
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002058 KeyedLoadGenericStub* casted_stub() {
2059 return static_cast<KeyedLoadGenericStub*>(stub());
2060 }
2061};
2062
2063
2064void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
2065 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
2066 ElementsKind kind) {
2067 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
2068 HValue* kind_limit = Add<HConstant>(
2069 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
2070
2071 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
2072 if_builder->Then();
2073}
2074
2075
2076void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
2077 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
2078 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002079 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
2080
2081 IfBuilder js_array_check(this);
2082 js_array_check.If<HCompareNumericAndBranch>(
2083 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
2084 js_array_check.Then();
2085 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2086 true, kind,
2087 LOAD, NEVER_RETURN_HOLE,
2088 STANDARD_STORE));
2089 js_array_check.Else();
2090 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
2091 false, kind,
2092 LOAD, NEVER_RETURN_HOLE,
2093 STANDARD_STORE));
2094 js_array_check.End();
2095}
2096
2097
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002098HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
2099 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
2100 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002101 // Split into a smi/integer case and unique string case.
2102 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
2103 graph()->CreateBasicBlock());
2104
2105 BuildKeyedIndexCheck(key, &index_name_split_continuation);
2106
2107 IfBuilder index_name_split(this, &index_name_split_continuation);
2108 index_name_split.Then();
2109 {
2110 // Key is an index (number)
2111 key = Pop();
2112
2113 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2114 (1 << Map::kHasIndexedInterceptor);
2115 BuildJSObjectCheck(receiver, bit_field_mask);
2116
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002117 HValue* map =
2118 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002119
2120 HValue* instance_type =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002121 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002122
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002123 HValue* bit_field2 =
2124 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002125
2126 IfBuilder kind_if(this);
2127 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2128 FAST_HOLEY_ELEMENTS);
2129
2130 kind_if.Else();
2131 {
2132 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
2133 FAST_HOLEY_DOUBLE_ELEMENTS);
2134 }
2135 kind_if.Else();
2136
2137 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
2138 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
2139 {
2140 HValue* elements = AddLoadElements(receiver);
2141
2142 HValue* hash = BuildElementIndexHash(key);
2143
Ben Murdoch097c5b22016-05-18 11:27:45 +01002144 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002145 }
2146 kind_if.Else();
2147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002148 // The SLOW_SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
2149 STATIC_ASSERT(FAST_SLOPPY_ARGUMENTS_ELEMENTS <
2150 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002151 BuildElementsKindLimitCheck(&kind_if, bit_field2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002152 SLOW_SLOPPY_ARGUMENTS_ELEMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153 // Non-strict elements are not handled.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002154 Add<HDeoptimize>(Deoptimizer::kNonStrictElementsInKeyedLoadGenericStub,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002155 Deoptimizer::EAGER);
2156 Push(graph()->GetConstant0());
2157
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002158 kind_if.ElseDeopt(
2159 Deoptimizer::kElementsKindUnhandledInKeyedLoadGenericStub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002160
2161 kind_if.End();
2162 }
2163 index_name_split.Else();
2164 {
2165 // Key is a unique string.
2166 key = Pop();
2167
2168 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
2169 (1 << Map::kHasNamedInterceptor);
2170 BuildJSObjectCheck(receiver, bit_field_mask);
2171
2172 HIfContinuation continuation;
2173 BuildTestForDictionaryProperties(receiver, &continuation);
2174 IfBuilder if_dict_properties(this, &continuation);
2175 if_dict_properties.Then();
2176 {
2177 // Key is string, properties are dictionary mode
2178 BuildNonGlobalObjectCheck(receiver);
2179
2180 HValue* properties = Add<HLoadNamedField>(
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002181 receiver, nullptr, HObjectAccess::ForPropertiesPointer());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002182
2183 HValue* hash =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002184 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForNameHashField());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002185
2186 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
2187
Ben Murdoch097c5b22016-05-18 11:27:45 +01002188 HValue* value =
2189 BuildUncheckedDictionaryElementLoad(receiver, properties, key, hash);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002190 Push(value);
2191 }
2192 if_dict_properties.Else();
2193 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002194 // TODO(dcarney): don't use keyed lookup cache, but convert to use
2195 // megamorphic stub cache.
2196 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002197 // Key is string, properties are fast mode
2198 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
2199
2200 ExternalReference cache_keys_ref =
2201 ExternalReference::keyed_lookup_cache_keys(isolate());
2202 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
2203
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002204 HValue* map =
2205 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002206 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
2207 base_index->ClearFlag(HValue::kCanOverflow);
2208
2209 HIfContinuation inline_or_runtime_continuation(
2210 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
2211 {
2212 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
2213 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
2214 ++probe) {
2215 IfBuilder* lookup_if = &lookup_ifs[probe];
2216 lookup_if->Initialize(this);
2217 int probe_base = probe * KeyedLookupCache::kEntryLength;
2218 HValue* map_index = AddUncasted<HAdd>(
2219 base_index,
2220 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
2221 map_index->ClearFlag(HValue::kCanOverflow);
2222 HValue* key_index = AddUncasted<HAdd>(
2223 base_index,
2224 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
2225 key_index->ClearFlag(HValue::kCanOverflow);
2226 HValue* map_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 Add<HLoadKeyed>(cache_keys, map_index, nullptr, nullptr,
2228 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002229 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
2230 lookup_if->And();
2231 HValue* key_to_check =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002232 Add<HLoadKeyed>(cache_keys, key_index, nullptr, nullptr,
2233 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002234 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
2235 lookup_if->Then();
2236 {
2237 ExternalReference cache_field_offsets_ref =
2238 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
2239 HValue* cache_field_offsets =
2240 Add<HConstant>(cache_field_offsets_ref);
2241 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
2242 index->ClearFlag(HValue::kCanOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002243 HValue* property_index =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002244 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, cache_keys,
2245 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246 Push(property_index);
2247 }
2248 lookup_if->Else();
2249 }
2250 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
2251 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
2252 }
2253 }
2254
2255 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
2256 inline_or_runtime.Then();
2257 {
2258 // Found a cached index, load property inline.
2259 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
2260 }
2261 inline_or_runtime.Else();
2262 {
2263 // KeyedLookupCache miss; call runtime.
2264 Add<HPushArguments>(receiver, key);
2265 Push(Add<HCallRuntime>(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002267 }
2268 inline_or_runtime.End();
2269 }
2270 if_dict_properties.End();
2271 }
2272 index_name_split.End();
2273
2274 return Pop();
2275}
2276
2277
2278Handle<Code> KeyedLoadGenericStub::GenerateCode() {
2279 return DoGenerateCode(this);
2280}
2281
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002282} // namespace internal
2283} // namespace v8