blob: 687fb1e73dc31b3e855f3c643984b55b6a950807 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "ic-inl.h"
31#include "codegen-inl.h"
32#include "stub-cache.h"
33
34namespace v8 {
35namespace internal {
36
37#define __ ACCESS_MASM(masm)
38
39
40static void ProbeTable(MacroAssembler* masm,
41 Code::Flags flags,
42 StubCache::Table table,
43 Register name,
44 Register offset) {
45 ExternalReference key_offset(SCTableReference::keyReference(table));
46 ExternalReference value_offset(SCTableReference::valueReference(table));
47
48 Label miss;
49
50 // Save the offset on the stack.
51 __ push(offset);
52
53 // Check that the key in the entry matches the name.
54 __ mov(ip, Operand(key_offset));
55 __ ldr(ip, MemOperand(ip, offset, LSL, 1));
56 __ cmp(name, Operand(ip));
57 __ b(ne, &miss);
58
59 // Get the code entry from the cache.
60 __ mov(ip, Operand(value_offset));
61 __ ldr(offset, MemOperand(ip, offset, LSL, 1));
62
63 // Check that the flags match what we're looking for.
64 __ ldr(offset, FieldMemOperand(offset, Code::kFlagsOffset));
65 __ and_(offset, offset, Operand(~Code::kFlagsNotUsedInLookup));
66 __ cmp(offset, Operand(flags));
67 __ b(ne, &miss);
68
69 // Restore offset and re-load code entry from cache.
70 __ pop(offset);
71 __ mov(ip, Operand(value_offset));
72 __ ldr(offset, MemOperand(ip, offset, LSL, 1));
73
74 // Jump to the first instruction in the code stub.
75 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
76 __ Jump(offset);
77
78 // Miss: Restore offset and fall through.
79 __ bind(&miss);
80 __ pop(offset);
81}
82
83
84void StubCache::GenerateProbe(MacroAssembler* masm,
85 Code::Flags flags,
86 Register receiver,
87 Register name,
88 Register scratch,
89 Register extra) {
90 Label miss;
91
92 // Make sure that code is valid. The shifting code relies on the
93 // entry size being 8.
94 ASSERT(sizeof(Entry) == 8);
95
96 // Make sure the flags does not name a specific type.
97 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
98
99 // Make sure that there are no register conflicts.
100 ASSERT(!scratch.is(receiver));
101 ASSERT(!scratch.is(name));
102
103 // Check that the receiver isn't a smi.
104 __ tst(receiver, Operand(kSmiTagMask));
105 __ b(eq, &miss);
106
107 // Get the map of the receiver and compute the hash.
Steve Blockd0582a62009-12-15 09:54:21 +0000108 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
110 __ add(scratch, scratch, Operand(ip));
111 __ eor(scratch, scratch, Operand(flags));
112 __ and_(scratch,
113 scratch,
114 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
115
116 // Probe the primary table.
117 ProbeTable(masm, flags, kPrimary, name, scratch);
118
119 // Primary miss: Compute hash for secondary probe.
120 __ sub(scratch, scratch, Operand(name));
121 __ add(scratch, scratch, Operand(flags));
122 __ and_(scratch,
123 scratch,
124 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
125
126 // Probe the secondary table.
127 ProbeTable(masm, flags, kSecondary, name, scratch);
128
129 // Cache miss: Fall-through and let caller handle the miss by
130 // entering the runtime system.
131 __ bind(&miss);
132}
133
134
135void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
136 int index,
137 Register prototype) {
138 // Load the global or builtins object from the current context.
139 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
140 // Load the global context from the global or builtins object.
141 __ ldr(prototype,
142 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
143 // Load the function from the global context.
144 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
145 // Load the initial map. The global functions all have initial maps.
146 __ ldr(prototype,
147 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
148 // Load the prototype from the initial map.
149 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
150}
151
152
153// Load a fast property out of a holder object (src). In-object properties
154// are loaded directly otherwise the property is loaded from the properties
155// fixed array.
156void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
157 Register dst, Register src,
158 JSObject* holder, int index) {
159 // Adjust for the number of properties stored in the holder.
160 index -= holder->map()->inobject_properties();
161 if (index < 0) {
162 // Get the property straight out of the holder.
163 int offset = holder->map()->instance_size() + (index * kPointerSize);
164 __ ldr(dst, FieldMemOperand(src, offset));
165 } else {
166 // Calculate the offset into the properties array.
167 int offset = index * kPointerSize + FixedArray::kHeaderSize;
168 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
169 __ ldr(dst, FieldMemOperand(dst, offset));
170 }
171}
172
173
174void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
175 Register receiver,
176 Register scratch,
177 Label* miss_label) {
178 // Check that the receiver isn't a smi.
179 __ tst(receiver, Operand(kSmiTagMask));
180 __ b(eq, miss_label);
181
182 // Check that the object is a JS array.
183 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
184 __ b(ne, miss_label);
185
186 // Load length directly from the JS array.
187 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
188 __ Ret();
189}
190
191
192// Generate code to check if an object is a string. If the object is
193// a string, the map's instance type is left in the scratch1 register.
194static void GenerateStringCheck(MacroAssembler* masm,
195 Register receiver,
196 Register scratch1,
197 Register scratch2,
198 Label* smi,
199 Label* non_string_object) {
200 // Check that the receiver isn't a smi.
201 __ tst(receiver, Operand(kSmiTagMask));
202 __ b(eq, smi);
203
204 // Check that the object is a string.
205 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
206 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
207 __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
208 // The cast is to resolve the overload for the argument of 0x0.
209 __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
210 __ b(ne, non_string_object);
211}
212
213
214// Generate code to load the length from a string object and return the length.
215// If the receiver object is not a string or a wrapped string object the
216// execution continues at the miss label. The register containing the
217// receiver is potentially clobbered.
218void StubCompiler::GenerateLoadStringLength2(MacroAssembler* masm,
219 Register receiver,
220 Register scratch1,
221 Register scratch2,
222 Label* miss) {
223 Label check_string, check_wrapper;
224
225 __ bind(&check_string);
226 // Check if the object is a string leaving the instance type in the
227 // scratch1 register.
228 GenerateStringCheck(masm, receiver, scratch1, scratch2,
229 miss, &check_wrapper);
230
231 // Load length directly from the string.
Steve Blocka7e24c12009-10-30 11:49:00 +0000232 __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000233 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
234 __ Ret();
235
236 // Check if the object is a JSValue wrapper.
237 __ bind(&check_wrapper);
238 __ cmp(scratch1, Operand(JS_VALUE_TYPE));
239 __ b(ne, miss);
240
241 // Unwrap the value in place and check if the wrapped value is a string.
242 __ ldr(receiver, FieldMemOperand(receiver, JSValue::kValueOffset));
243 __ b(&check_string);
244}
245
246
247void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
248 Register receiver,
249 Register scratch1,
250 Register scratch2,
251 Label* miss_label) {
252 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
253 __ mov(r0, scratch1);
254 __ Ret();
255}
256
257
258// Generate StoreField code, value is passed in r0 register.
259// After executing generated code, the receiver_reg and name_reg
260// may be clobbered.
261void StubCompiler::GenerateStoreField(MacroAssembler* masm,
262 Builtins::Name storage_extend,
263 JSObject* object,
264 int index,
265 Map* transition,
266 Register receiver_reg,
267 Register name_reg,
268 Register scratch,
269 Label* miss_label) {
270 // r0 : value
271 Label exit;
272
273 // Check that the receiver isn't a smi.
274 __ tst(receiver_reg, Operand(kSmiTagMask));
275 __ b(eq, miss_label);
276
277 // Check that the map of the receiver hasn't changed.
278 __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
279 __ cmp(scratch, Operand(Handle<Map>(object->map())));
280 __ b(ne, miss_label);
281
282 // Perform global security token check if needed.
283 if (object->IsJSGlobalProxy()) {
284 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
285 }
286
287 // Stub never generated for non-global objects that require access
288 // checks.
289 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
290
291 // Perform map transition for the receiver if necessary.
292 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
293 // The properties must be extended before we can store the value.
294 // We jump to a runtime call that extends the properties array.
295 __ mov(r2, Operand(Handle<Map>(transition)));
296 // Please note, if we implement keyed store for arm we need
297 // to call the Builtins::KeyedStoreIC_ExtendStorage.
298 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_ExtendStorage));
299 __ Jump(ic, RelocInfo::CODE_TARGET);
300 return;
301 }
302
303 if (transition != NULL) {
304 // Update the map of the object; no write barrier updating is
305 // needed because the map is never in new space.
306 __ mov(ip, Operand(Handle<Map>(transition)));
307 __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
308 }
309
310 // Adjust for the number of properties stored in the object. Even in the
311 // face of a transition we can use the old map here because the size of the
312 // object and the number of in-object properties is not going to change.
313 index -= object->map()->inobject_properties();
314
315 if (index < 0) {
316 // Set the property straight into the object.
317 int offset = object->map()->instance_size() + (index * kPointerSize);
318 __ str(r0, FieldMemOperand(receiver_reg, offset));
319
320 // Skip updating write barrier if storing a smi.
321 __ tst(r0, Operand(kSmiTagMask));
322 __ b(eq, &exit);
323
324 // Update the write barrier for the array address.
325 // Pass the value being stored in the now unused name_reg.
326 __ mov(name_reg, Operand(offset));
327 __ RecordWrite(receiver_reg, name_reg, scratch);
328 } else {
329 // Write to the properties array.
330 int offset = index * kPointerSize + FixedArray::kHeaderSize;
331 // Get the properties array
332 __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
333 __ str(r0, FieldMemOperand(scratch, offset));
334
335 // Skip updating write barrier if storing a smi.
336 __ tst(r0, Operand(kSmiTagMask));
337 __ b(eq, &exit);
338
339 // Update the write barrier for the array address.
340 // Ok to clobber receiver_reg and name_reg, since we return.
341 __ mov(name_reg, Operand(offset));
342 __ RecordWrite(scratch, name_reg, receiver_reg);
343 }
344
345 // Return the value (register r0).
346 __ bind(&exit);
347 __ Ret();
348}
349
350
351void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
352 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
353 Code* code = NULL;
354 if (kind == Code::LOAD_IC) {
355 code = Builtins::builtin(Builtins::LoadIC_Miss);
356 } else {
357 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
358 }
359
360 Handle<Code> ic(code);
361 __ Jump(ic, RelocInfo::CODE_TARGET);
362}
363
364
365#undef __
366#define __ ACCESS_MASM(masm())
367
368
369Register StubCompiler::CheckPrototypes(JSObject* object,
370 Register object_reg,
371 JSObject* holder,
372 Register holder_reg,
373 Register scratch,
374 String* name,
375 Label* miss) {
376 // Check that the maps haven't changed.
377 Register result =
378 masm()->CheckMaps(object, object_reg, holder, holder_reg, scratch, miss);
379
380 // If we've skipped any global objects, it's not enough to verify
381 // that their maps haven't changed.
382 while (object != holder) {
383 if (object->IsGlobalObject()) {
384 GlobalObject* global = GlobalObject::cast(object);
385 Object* probe = global->EnsurePropertyCell(name);
386 if (probe->IsFailure()) {
387 set_failure(Failure::cast(probe));
388 return result;
389 }
390 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
391 ASSERT(cell->value()->IsTheHole());
392 __ mov(scratch, Operand(Handle<Object>(cell)));
393 __ ldr(scratch,
394 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
395 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
396 __ cmp(scratch, ip);
397 __ b(ne, miss);
398 }
399 object = JSObject::cast(object->GetPrototype());
400 }
401
402 // Return the register containin the holder.
403 return result;
404}
405
406
407void StubCompiler::GenerateLoadField(JSObject* object,
408 JSObject* holder,
409 Register receiver,
410 Register scratch1,
411 Register scratch2,
412 int index,
413 String* name,
414 Label* miss) {
415 // Check that the receiver isn't a smi.
416 __ tst(receiver, Operand(kSmiTagMask));
417 __ b(eq, miss);
418
419 // Check that the maps haven't changed.
420 Register reg =
421 CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
422 GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
423 __ Ret();
424}
425
426
427void StubCompiler::GenerateLoadConstant(JSObject* object,
428 JSObject* holder,
429 Register receiver,
430 Register scratch1,
431 Register scratch2,
432 Object* value,
433 String* name,
434 Label* miss) {
435 // Check that the receiver isn't a smi.
436 __ tst(receiver, Operand(kSmiTagMask));
437 __ b(eq, miss);
438
439 // Check that the maps haven't changed.
440 Register reg =
441 CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
442
443 // Return the constant value.
444 __ mov(r0, Operand(Handle<Object>(value)));
445 __ Ret();
446}
447
448
Leon Clarkee46be812010-01-19 14:06:41 +0000449bool StubCompiler::GenerateLoadCallback(JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +0000450 JSObject* holder,
451 Register receiver,
452 Register name_reg,
453 Register scratch1,
454 Register scratch2,
455 AccessorInfo* callback,
456 String* name,
Leon Clarkee46be812010-01-19 14:06:41 +0000457 Label* miss,
458 Failure** failure) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000459 // Check that the receiver isn't a smi.
460 __ tst(receiver, Operand(kSmiTagMask));
461 __ b(eq, miss);
462
463 // Check that the maps haven't changed.
464 Register reg =
465 CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
466
467 // Push the arguments on the JS stack of the caller.
468 __ push(receiver); // receiver
469 __ push(reg); // holder
470 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback data
471 __ push(ip);
472 __ ldr(reg, FieldMemOperand(ip, AccessorInfo::kDataOffset));
473 __ push(reg);
474 __ push(name_reg); // name
475
476 // Do tail-call to the runtime system.
477 ExternalReference load_callback_property =
478 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
479 __ TailCallRuntime(load_callback_property, 5, 1);
Leon Clarkee46be812010-01-19 14:06:41 +0000480
481 return true;
Steve Blocka7e24c12009-10-30 11:49:00 +0000482}
483
484
485void StubCompiler::GenerateLoadInterceptor(JSObject* object,
486 JSObject* holder,
487 LookupResult* lookup,
488 Register receiver,
489 Register name_reg,
490 Register scratch1,
491 Register scratch2,
492 String* name,
493 Label* miss) {
494 // Check that the receiver isn't a smi.
495 __ tst(receiver, Operand(kSmiTagMask));
496 __ b(eq, miss);
497
498 // Check that the maps haven't changed.
499 Register reg =
500 CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
501
502 // Push the arguments on the JS stack of the caller.
503 __ push(receiver); // receiver
504 __ push(reg); // holder
505 __ push(name_reg); // name
506
507 InterceptorInfo* interceptor = holder->GetNamedInterceptor();
508 ASSERT(!Heap::InNewSpace(interceptor));
509 __ mov(scratch1, Operand(Handle<Object>(interceptor)));
510 __ push(scratch1);
511 __ ldr(scratch2, FieldMemOperand(scratch1, InterceptorInfo::kDataOffset));
512 __ push(scratch2);
513
514 // Do tail-call to the runtime system.
515 ExternalReference load_ic_property =
516 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
517 __ TailCallRuntime(load_ic_property, 5, 1);
518}
519
520
521Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
522 // ----------- S t a t e -------------
523 // -- r1: function
524 // -- lr: return address
525 // -----------------------------------
526
527 // Enter an internal frame.
528 __ EnterInternalFrame();
529
530 // Preserve the function.
531 __ push(r1);
532
533 // Push the function on the stack as the argument to the runtime function.
534 __ push(r1);
535 __ CallRuntime(Runtime::kLazyCompile, 1);
536
537 // Calculate the entry point.
538 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
539
540 // Restore saved function.
541 __ pop(r1);
542
543 // Tear down temporary frame.
544 __ LeaveInternalFrame();
545
546 // Do a tail-call of the compiled function.
547 __ Jump(r2);
548
549 return GetCodeWithFlags(flags, "LazyCompileStub");
550}
551
552
553Object* CallStubCompiler::CompileCallField(Object* object,
554 JSObject* holder,
555 int index,
556 String* name) {
557 // ----------- S t a t e -------------
558 // -- lr: return address
559 // -----------------------------------
560 Label miss;
561
562 const int argc = arguments().immediate();
563
564 // Get the receiver of the function from the stack into r0.
565 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
566 // Check that the receiver isn't a smi.
567 __ tst(r0, Operand(kSmiTagMask));
568 __ b(eq, &miss);
569
570 // Do the right check and compute the holder register.
571 Register reg =
572 CheckPrototypes(JSObject::cast(object), r0, holder, r3, r2, name, &miss);
573 GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
574
575 // Check that the function really is a function.
576 __ tst(r1, Operand(kSmiTagMask));
577 __ b(eq, &miss);
578 // Get the map.
579 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
580 __ b(ne, &miss);
581
582 // Patch the receiver on the stack with the global proxy if
583 // necessary.
584 if (object->IsGlobalObject()) {
585 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
586 __ str(r3, MemOperand(sp, argc * kPointerSize));
587 }
588
589 // Invoke the function.
590 __ InvokeFunction(r1, arguments(), JUMP_FUNCTION);
591
592 // Handle call cache miss.
593 __ bind(&miss);
594 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
595 __ Jump(ic, RelocInfo::CODE_TARGET);
596
597 // Return the generated code.
598 return GetCode(FIELD, name);
599}
600
601
602Object* CallStubCompiler::CompileCallConstant(Object* object,
603 JSObject* holder,
604 JSFunction* function,
605 String* name,
606 CheckType check) {
607 // ----------- S t a t e -------------
608 // -- lr: return address
609 // -----------------------------------
610 Label miss;
611
612 // Get the receiver from the stack
613 const int argc = arguments().immediate();
614 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
615
616 // Check that the receiver isn't a smi.
617 if (check != NUMBER_CHECK) {
618 __ tst(r1, Operand(kSmiTagMask));
619 __ b(eq, &miss);
620 }
621
622 // Make sure that it's okay not to patch the on stack receiver
623 // unless we're doing a receiver map check.
624 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
625
626 switch (check) {
627 case RECEIVER_MAP_CHECK:
628 // Check that the maps haven't changed.
629 CheckPrototypes(JSObject::cast(object), r1, holder, r3, r2, name, &miss);
630
631 // Patch the receiver on the stack with the global proxy if
632 // necessary.
633 if (object->IsGlobalObject()) {
634 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
635 __ str(r3, MemOperand(sp, argc * kPointerSize));
636 }
637 break;
638
639 case STRING_CHECK:
Leon Clarkee46be812010-01-19 14:06:41 +0000640 if (!function->IsBuiltin()) {
641 // Calling non-builtins with a value as receiver requires boxing.
642 __ jmp(&miss);
643 } else {
644 // Check that the object is a two-byte string or a symbol.
645 __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE);
646 __ b(hs, &miss);
647 // Check that the maps starting from the prototype haven't changed.
648 GenerateLoadGlobalFunctionPrototype(masm(),
649 Context::STRING_FUNCTION_INDEX,
650 r2);
651 CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
652 r1, name, &miss);
653 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000654 break;
655
656 case NUMBER_CHECK: {
Leon Clarkee46be812010-01-19 14:06:41 +0000657 if (!function->IsBuiltin()) {
658 // Calling non-builtins with a value as receiver requires boxing.
659 __ jmp(&miss);
660 } else {
661 Label fast;
662 // Check that the object is a smi or a heap number.
663 __ tst(r1, Operand(kSmiTagMask));
664 __ b(eq, &fast);
665 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
666 __ b(ne, &miss);
667 __ bind(&fast);
668 // Check that the maps starting from the prototype haven't changed.
669 GenerateLoadGlobalFunctionPrototype(masm(),
670 Context::NUMBER_FUNCTION_INDEX,
671 r2);
672 CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
673 r1, name, &miss);
674 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000675 break;
676 }
677
678 case BOOLEAN_CHECK: {
Leon Clarkee46be812010-01-19 14:06:41 +0000679 if (!function->IsBuiltin()) {
680 // Calling non-builtins with a value as receiver requires boxing.
681 __ jmp(&miss);
682 } else {
683 Label fast;
684 // Check that the object is a boolean.
685 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
686 __ cmp(r1, ip);
687 __ b(eq, &fast);
688 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
689 __ cmp(r1, ip);
690 __ b(ne, &miss);
691 __ bind(&fast);
692 // Check that the maps starting from the prototype haven't changed.
693 GenerateLoadGlobalFunctionPrototype(masm(),
694 Context::BOOLEAN_FUNCTION_INDEX,
695 r2);
696 CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
697 r1, name, &miss);
698 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 break;
700 }
701
702 case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
703 CheckPrototypes(JSObject::cast(object), r1, holder, r3, r2, name, &miss);
704 // Make sure object->HasFastElements().
705 // Get the elements array of the object.
706 __ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
707 // Check that the object is in fast mode (not dictionary).
708 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
709 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
710 __ cmp(r2, ip);
711 __ b(ne, &miss);
712 break;
713
714 default:
715 UNREACHABLE();
716 }
717
718 // Get the function and setup the context.
719 __ mov(r1, Operand(Handle<JSFunction>(function)));
720 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
721
722 // Jump to the cached code (tail call).
723 ASSERT(function->is_compiled());
724 Handle<Code> code(function->code());
725 ParameterCount expected(function->shared()->formal_parameter_count());
726 __ InvokeCode(code, expected, arguments(),
727 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
728
729 // Handle call cache miss.
730 __ bind(&miss);
731 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
732 __ Jump(ic, RelocInfo::CODE_TARGET);
733
734 // Return the generated code.
735 String* function_name = NULL;
736 if (function->shared()->name()->IsString()) {
737 function_name = String::cast(function->shared()->name());
738 }
739 return GetCode(CONSTANT_FUNCTION, function_name);
740}
741
742
743Object* CallStubCompiler::CompileCallInterceptor(Object* object,
744 JSObject* holder,
745 String* name) {
746 // ----------- S t a t e -------------
747 // -- lr: return address
748 // -----------------------------------
749 Label miss;
750
751 // TODO(1224669): Implement.
752
753 // Handle call cache miss.
754 __ bind(&miss);
755 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
756 __ Jump(ic, RelocInfo::CODE_TARGET);
757
758 // Return the generated code.
759 return GetCode(INTERCEPTOR, name);
760}
761
762
763Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
764 GlobalObject* holder,
765 JSGlobalPropertyCell* cell,
766 JSFunction* function,
767 String* name) {
768 // ----------- S t a t e -------------
769 // -- lr: return address
770 // -----------------------------------
771 Label miss;
772
773 // Get the number of arguments.
774 const int argc = arguments().immediate();
775
776 // Get the receiver from the stack.
777 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
778
779 // If the object is the holder then we know that it's a global
780 // object which can only happen for contextual calls. In this case,
781 // the receiver cannot be a smi.
782 if (object != holder) {
783 __ tst(r0, Operand(kSmiTagMask));
784 __ b(eq, &miss);
785 }
786
787 // Check that the maps haven't changed.
788 CheckPrototypes(object, r0, holder, r3, r2, name, &miss);
789
790 // Get the value from the cell.
791 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
792 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
793
794 // Check that the cell contains the same function.
Leon Clarkee46be812010-01-19 14:06:41 +0000795 if (Heap::InNewSpace(function)) {
796 // We can't embed a pointer to a function in new space so we have
797 // to verify that the shared function info is unchanged. This has
798 // the nice side effect that multiple closures based on the same
799 // function can all use this call IC. Before we load through the
800 // function, we have to verify that it still is a function.
801 __ tst(r1, Operand(kSmiTagMask));
802 __ b(eq, &miss);
803 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
804 __ b(ne, &miss);
805
806 // Check the shared function info. Make sure it hasn't changed.
807 __ mov(r3, Operand(Handle<SharedFunctionInfo>(function->shared())));
808 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
809 __ cmp(r2, r3);
810 __ b(ne, &miss);
811 } else {
812 __ cmp(r1, Operand(Handle<JSFunction>(function)));
813 __ b(ne, &miss);
814 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000815
816 // Patch the receiver on the stack with the global proxy if
817 // necessary.
818 if (object->IsGlobalObject()) {
819 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
820 __ str(r3, MemOperand(sp, argc * kPointerSize));
821 }
822
823 // Setup the context (function already in r1).
824 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
825
826 // Jump to the cached code (tail call).
827 __ IncrementCounter(&Counters::call_global_inline, 1, r2, r3);
828 ASSERT(function->is_compiled());
829 Handle<Code> code(function->code());
830 ParameterCount expected(function->shared()->formal_parameter_count());
831 __ InvokeCode(code, expected, arguments(),
832 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
833
834 // Handle call cache miss.
835 __ bind(&miss);
836 __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
837 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
838 __ Jump(ic, RelocInfo::CODE_TARGET);
839
840 // Return the generated code.
841 return GetCode(NORMAL, name);
842}
843
844
845Object* StoreStubCompiler::CompileStoreField(JSObject* object,
846 int index,
847 Map* transition,
848 String* name) {
849 // ----------- S t a t e -------------
850 // -- r0 : value
851 // -- r2 : name
852 // -- lr : return address
853 // -- [sp] : receiver
854 // -----------------------------------
855 Label miss;
856
857 // Get the receiver from the stack.
858 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
859
860 // name register might be clobbered.
861 GenerateStoreField(masm(),
862 Builtins::StoreIC_ExtendStorage,
863 object,
864 index,
865 transition,
866 r3, r2, r1,
867 &miss);
868 __ bind(&miss);
869 __ mov(r2, Operand(Handle<String>(name))); // restore name
870 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
871 __ Jump(ic, RelocInfo::CODE_TARGET);
872
873 // Return the generated code.
874 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
875}
876
877
878Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
879 AccessorInfo* callback,
880 String* name) {
881 // ----------- S t a t e -------------
882 // -- r0 : value
883 // -- r2 : name
884 // -- lr : return address
885 // -- [sp] : receiver
886 // -----------------------------------
887 Label miss;
888
889 // Get the object from the stack.
890 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
891
892 // Check that the object isn't a smi.
893 __ tst(r3, Operand(kSmiTagMask));
894 __ b(eq, &miss);
895
896 // Check that the map of the object hasn't changed.
897 __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
898 __ cmp(r1, Operand(Handle<Map>(object->map())));
899 __ b(ne, &miss);
900
901 // Perform global security token check if needed.
902 if (object->IsJSGlobalProxy()) {
903 __ CheckAccessGlobalProxy(r3, r1, &miss);
904 }
905
906 // Stub never generated for non-global objects that require access
907 // checks.
908 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
909
910 __ ldr(ip, MemOperand(sp)); // receiver
911 __ push(ip);
912 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info
913 __ push(ip);
914 __ push(r2); // name
915 __ push(r0); // value
916
917 // Do tail-call to the runtime system.
918 ExternalReference store_callback_property =
919 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
920 __ TailCallRuntime(store_callback_property, 4, 1);
921
922 // Handle store cache miss.
923 __ bind(&miss);
924 __ mov(r2, Operand(Handle<String>(name))); // restore name
925 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
926 __ Jump(ic, RelocInfo::CODE_TARGET);
927
928 // Return the generated code.
929 return GetCode(CALLBACKS, name);
930}
931
932
933Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
934 String* name) {
935 // ----------- S t a t e -------------
936 // -- r0 : value
937 // -- r2 : name
938 // -- lr : return address
939 // -- [sp] : receiver
940 // -----------------------------------
941 Label miss;
942
943 // Get the object from the stack.
944 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
945
946 // Check that the object isn't a smi.
947 __ tst(r3, Operand(kSmiTagMask));
948 __ b(eq, &miss);
949
950 // Check that the map of the object hasn't changed.
951 __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
952 __ cmp(r1, Operand(Handle<Map>(receiver->map())));
953 __ b(ne, &miss);
954
955 // Perform global security token check if needed.
956 if (receiver->IsJSGlobalProxy()) {
957 __ CheckAccessGlobalProxy(r3, r1, &miss);
958 }
959
960 // Stub never generated for non-global objects that require access
961 // checks.
962 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
963
964 __ ldr(ip, MemOperand(sp)); // receiver
965 __ push(ip);
966 __ push(r2); // name
967 __ push(r0); // value
968
969 // Do tail-call to the runtime system.
970 ExternalReference store_ic_property =
971 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
972 __ TailCallRuntime(store_ic_property, 3, 1);
973
974 // Handle store cache miss.
975 __ bind(&miss);
976 __ mov(r2, Operand(Handle<String>(name))); // restore name
977 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
978 __ Jump(ic, RelocInfo::CODE_TARGET);
979
980 // Return the generated code.
981 return GetCode(INTERCEPTOR, name);
982}
983
984
985Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
986 JSGlobalPropertyCell* cell,
987 String* name) {
988 // ----------- S t a t e -------------
989 // -- r0 : value
990 // -- r2 : name
991 // -- lr : return address
992 // -- [sp] : receiver
993 // -----------------------------------
994 Label miss;
995
996 // Check that the map of the global has not changed.
997 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
998 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
999 __ cmp(r3, Operand(Handle<Map>(object->map())));
1000 __ b(ne, &miss);
1001
1002 // Store the value in the cell.
1003 __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
1004 __ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
1005
1006 __ IncrementCounter(&Counters::named_store_global_inline, 1, r1, r3);
1007 __ Ret();
1008
1009 // Handle store cache miss.
1010 __ bind(&miss);
1011 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r1, r3);
1012 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1013 __ Jump(ic, RelocInfo::CODE_TARGET);
1014
1015 // Return the generated code.
1016 return GetCode(NORMAL, name);
1017}
1018
1019
1020Object* LoadStubCompiler::CompileLoadField(JSObject* object,
1021 JSObject* holder,
1022 int index,
1023 String* name) {
1024 // ----------- S t a t e -------------
1025 // -- r2 : name
1026 // -- lr : return address
1027 // -- [sp] : receiver
1028 // -----------------------------------
1029 Label miss;
1030
1031 __ ldr(r0, MemOperand(sp, 0));
1032
1033 GenerateLoadField(object, holder, r0, r3, r1, index, name, &miss);
1034 __ bind(&miss);
1035 GenerateLoadMiss(masm(), Code::LOAD_IC);
1036
1037 // Return the generated code.
1038 return GetCode(FIELD, name);
1039}
1040
1041
Leon Clarkee46be812010-01-19 14:06:41 +00001042Object* LoadStubCompiler::CompileLoadCallback(String* name,
1043 JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 JSObject* holder,
Leon Clarkee46be812010-01-19 14:06:41 +00001045 AccessorInfo* callback) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001046 // ----------- S t a t e -------------
1047 // -- r2 : name
1048 // -- lr : return address
1049 // -- [sp] : receiver
1050 // -----------------------------------
1051 Label miss;
1052
1053 __ ldr(r0, MemOperand(sp, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001054 Failure* failure = Failure::InternalError();
1055 bool success = GenerateLoadCallback(object, holder, r0, r2, r3, r1,
1056 callback, name, &miss, &failure);
1057 if (!success) return failure;
1058
Steve Blocka7e24c12009-10-30 11:49:00 +00001059 __ bind(&miss);
1060 GenerateLoadMiss(masm(), Code::LOAD_IC);
1061
1062 // Return the generated code.
1063 return GetCode(CALLBACKS, name);
1064}
1065
1066
1067Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
1068 JSObject* holder,
1069 Object* value,
1070 String* name) {
1071 // ----------- S t a t e -------------
1072 // -- r2 : name
1073 // -- lr : return address
1074 // -- [sp] : receiver
1075 // -----------------------------------
1076 Label miss;
1077
1078 __ ldr(r0, MemOperand(sp, 0));
1079
1080 GenerateLoadConstant(object, holder, r0, r3, r1, value, name, &miss);
1081 __ bind(&miss);
1082 GenerateLoadMiss(masm(), Code::LOAD_IC);
1083
1084 // Return the generated code.
1085 return GetCode(CONSTANT_FUNCTION, name);
1086}
1087
1088
1089Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
1090 JSObject* holder,
1091 String* name) {
1092 // ----------- S t a t e -------------
1093 // -- r2 : name
1094 // -- lr : return address
1095 // -- [sp] : receiver
1096 // -----------------------------------
1097 Label miss;
1098
1099 __ ldr(r0, MemOperand(sp, 0));
1100
1101 LookupResult lookup;
1102 holder->LocalLookupRealNamedProperty(name, &lookup);
1103 GenerateLoadInterceptor(object,
1104 holder,
1105 &lookup,
1106 r0,
1107 r2,
1108 r3,
1109 r1,
1110 name,
1111 &miss);
1112 __ bind(&miss);
1113 GenerateLoadMiss(masm(), Code::LOAD_IC);
1114
1115 // Return the generated code.
1116 return GetCode(INTERCEPTOR, name);
1117}
1118
1119
1120Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
1121 GlobalObject* holder,
1122 JSGlobalPropertyCell* cell,
1123 String* name,
1124 bool is_dont_delete) {
1125 // ----------- S t a t e -------------
1126 // -- r2 : name
1127 // -- lr : return address
1128 // -- [sp] : receiver
1129 // -----------------------------------
1130 Label miss;
1131
1132 // Get the receiver from the stack.
1133 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
1134
1135 // If the object is the holder then we know that it's a global
1136 // object which can only happen for contextual calls. In this case,
1137 // the receiver cannot be a smi.
1138 if (object != holder) {
1139 __ tst(r1, Operand(kSmiTagMask));
1140 __ b(eq, &miss);
1141 }
1142
1143 // Check that the map of the global has not changed.
1144 CheckPrototypes(object, r1, holder, r3, r0, name, &miss);
1145
1146 // Get the value from the cell.
1147 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1148 __ ldr(r0, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1149
1150 // Check for deleted property if property can actually be deleted.
1151 if (!is_dont_delete) {
1152 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1153 __ cmp(r0, ip);
1154 __ b(eq, &miss);
1155 }
1156
1157 __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
1158 __ Ret();
1159
1160 __ bind(&miss);
1161 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1, r1, r3);
1162 GenerateLoadMiss(masm(), Code::LOAD_IC);
1163
1164 // Return the generated code.
1165 return GetCode(NORMAL, name);
1166}
1167
1168
1169Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
1170 JSObject* receiver,
1171 JSObject* holder,
1172 int index) {
1173 // ----------- S t a t e -------------
1174 // -- lr : return address
1175 // -- sp[0] : key
1176 // -- sp[4] : receiver
1177 // -----------------------------------
1178 Label miss;
1179
1180 __ ldr(r2, MemOperand(sp, 0));
1181 __ ldr(r0, MemOperand(sp, kPointerSize));
1182
1183 __ cmp(r2, Operand(Handle<String>(name)));
1184 __ b(ne, &miss);
1185
1186 GenerateLoadField(receiver, holder, r0, r3, r1, index, name, &miss);
1187 __ bind(&miss);
1188 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1189
1190 return GetCode(FIELD, name);
1191}
1192
1193
1194Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name,
1195 JSObject* receiver,
1196 JSObject* holder,
1197 AccessorInfo* callback) {
1198 // ----------- S t a t e -------------
1199 // -- lr : return address
1200 // -- sp[0] : key
1201 // -- sp[4] : receiver
1202 // -----------------------------------
1203 Label miss;
1204
1205 __ ldr(r2, MemOperand(sp, 0));
1206 __ ldr(r0, MemOperand(sp, kPointerSize));
1207
1208 __ cmp(r2, Operand(Handle<String>(name)));
1209 __ b(ne, &miss);
1210
Leon Clarkee46be812010-01-19 14:06:41 +00001211 Failure* failure = Failure::InternalError();
1212 bool success = GenerateLoadCallback(receiver, holder, r0, r2, r3, r1,
1213 callback, name, &miss, &failure);
1214 if (!success) return failure;
1215
Steve Blocka7e24c12009-10-30 11:49:00 +00001216 __ bind(&miss);
1217 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1218
1219 return GetCode(CALLBACKS, name);
1220}
1221
1222
1223Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
1224 JSObject* receiver,
1225 JSObject* holder,
1226 Object* value) {
1227 // ----------- S t a t e -------------
1228 // -- lr : return address
1229 // -- sp[0] : key
1230 // -- sp[4] : receiver
1231 // -----------------------------------
1232 Label miss;
1233
1234 // Check the key is the cached one
1235 __ ldr(r2, MemOperand(sp, 0));
1236 __ ldr(r0, MemOperand(sp, kPointerSize));
1237
1238 __ cmp(r2, Operand(Handle<String>(name)));
1239 __ b(ne, &miss);
1240
1241 GenerateLoadConstant(receiver, holder, r0, r3, r1, value, name, &miss);
1242 __ bind(&miss);
1243 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1244
1245 // Return the generated code.
1246 return GetCode(CONSTANT_FUNCTION, name);
1247}
1248
1249
1250Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
1251 JSObject* holder,
1252 String* name) {
1253 // ----------- S t a t e -------------
1254 // -- lr : return address
1255 // -- sp[0] : key
1256 // -- sp[4] : receiver
1257 // -----------------------------------
1258 Label miss;
1259
1260 // Check the key is the cached one
1261 __ ldr(r2, MemOperand(sp, 0));
1262 __ ldr(r0, MemOperand(sp, kPointerSize));
1263
1264 __ cmp(r2, Operand(Handle<String>(name)));
1265 __ b(ne, &miss);
1266
1267 LookupResult lookup;
1268 holder->LocalLookupRealNamedProperty(name, &lookup);
1269 GenerateLoadInterceptor(receiver,
1270 holder,
1271 &lookup,
1272 r0,
1273 r2,
1274 r3,
1275 r1,
1276 name,
1277 &miss);
1278 __ bind(&miss);
1279 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1280
1281 return GetCode(INTERCEPTOR, name);
1282}
1283
1284
1285Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
1286 // ----------- S t a t e -------------
1287 // -- lr : return address
1288 // -- sp[0] : key
1289 // -- sp[4] : receiver
1290 // -----------------------------------
1291 Label miss;
1292
1293 // Check the key is the cached one
1294 __ ldr(r2, MemOperand(sp, 0));
1295 __ ldr(r0, MemOperand(sp, kPointerSize));
1296
1297 __ cmp(r2, Operand(Handle<String>(name)));
1298 __ b(ne, &miss);
1299
1300 GenerateLoadArrayLength(masm(), r0, r3, &miss);
1301 __ bind(&miss);
1302 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1303
1304 return GetCode(CALLBACKS, name);
1305}
1306
1307
1308Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
1309 // ----------- S t a t e -------------
1310 // -- lr : return address
1311 // -- sp[0] : key
1312 // -- sp[4] : receiver
1313 // -----------------------------------
1314 Label miss;
1315 __ IncrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
1316
1317 __ ldr(r2, MemOperand(sp));
1318 __ ldr(r0, MemOperand(sp, kPointerSize)); // receiver
1319
1320 __ cmp(r2, Operand(Handle<String>(name)));
1321 __ b(ne, &miss);
1322
1323 GenerateLoadStringLength2(masm(), r0, r1, r3, &miss);
1324 __ bind(&miss);
1325 __ DecrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
1326
1327 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1328
1329 return GetCode(CALLBACKS, name);
1330}
1331
1332
1333// TODO(1224671): implement the fast case.
1334Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
1335 // ----------- S t a t e -------------
1336 // -- lr : return address
1337 // -- sp[0] : key
1338 // -- sp[4] : receiver
1339 // -----------------------------------
1340 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1341
1342 return GetCode(CALLBACKS, name);
1343}
1344
1345
1346Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
1347 int index,
1348 Map* transition,
1349 String* name) {
1350 // ----------- S t a t e -------------
1351 // -- r0 : value
1352 // -- r2 : name
1353 // -- lr : return address
1354 // -- [sp] : receiver
1355 // -----------------------------------
1356 Label miss;
1357
1358 __ IncrementCounter(&Counters::keyed_store_field, 1, r1, r3);
1359
1360 // Check that the name has not changed.
1361 __ cmp(r2, Operand(Handle<String>(name)));
1362 __ b(ne, &miss);
1363
1364 // Load receiver from the stack.
1365 __ ldr(r3, MemOperand(sp));
1366 // r1 is used as scratch register, r3 and r2 might be clobbered.
1367 GenerateStoreField(masm(),
1368 Builtins::StoreIC_ExtendStorage,
1369 object,
1370 index,
1371 transition,
1372 r3, r2, r1,
1373 &miss);
1374 __ bind(&miss);
1375
1376 __ DecrementCounter(&Counters::keyed_store_field, 1, r1, r3);
1377 __ mov(r2, Operand(Handle<String>(name))); // restore name register.
1378 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
1379 __ Jump(ic, RelocInfo::CODE_TARGET);
1380
1381 // Return the generated code.
1382 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
1383}
1384
1385
1386Object* ConstructStubCompiler::CompileConstructStub(
1387 SharedFunctionInfo* shared) {
1388 // ----------- S t a t e -------------
1389 // -- r0 : argc
1390 // -- r1 : constructor
1391 // -- lr : return address
1392 // -- [sp] : last argument
1393 // -----------------------------------
1394 Label generic_stub_call;
1395
1396 // Use r7 for holding undefined which is used in several places below.
1397 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
1398
1399#ifdef ENABLE_DEBUGGER_SUPPORT
1400 // Check to see whether there are any break points in the function code. If
1401 // there are jump to the generic constructor stub which calls the actual
1402 // code for the function thereby hitting the break points.
1403 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1404 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
1405 __ cmp(r2, r7);
1406 __ b(ne, &generic_stub_call);
1407#endif
1408
1409 // Load the initial map and verify that it is in fact a map.
1410 // r1: constructor function
1411 // r7: undefined
1412 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
1413 __ tst(r2, Operand(kSmiTagMask));
1414 __ b(eq, &generic_stub_call);
1415 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
1416 __ b(ne, &generic_stub_call);
1417
1418#ifdef DEBUG
1419 // Cannot construct functions this way.
1420 // r0: argc
1421 // r1: constructor function
1422 // r2: initial map
1423 // r7: undefined
1424 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
1425 __ Check(ne, "Function constructed by construct stub.");
1426#endif
1427
1428 // Now allocate the JSObject in new space.
1429 // r0: argc
1430 // r1: constructor function
1431 // r2: initial map
1432 // r7: undefined
1433 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1434 __ AllocateInNewSpace(r3,
1435 r4,
1436 r5,
1437 r6,
1438 &generic_stub_call,
1439 NO_ALLOCATION_FLAGS);
1440
1441 // Allocated the JSObject, now initialize the fields. Map is set to initial
1442 // map and properties and elements are set to empty fixed array.
1443 // r0: argc
1444 // r1: constructor function
1445 // r2: initial map
1446 // r3: object size (in words)
1447 // r4: JSObject (not tagged)
1448 // r7: undefined
1449 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1450 __ mov(r5, r4);
1451 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
1452 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1453 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
1454 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
1455 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
1456 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
1457
1458 // Calculate the location of the first argument. The stack contains only the
1459 // argc arguments.
1460 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
1461
1462 // Fill all the in-object properties with undefined.
1463 // r0: argc
1464 // r1: first argument
1465 // r3: object size (in words)
1466 // r4: JSObject (not tagged)
1467 // r5: First in-object property of JSObject (not tagged)
1468 // r7: undefined
1469 // Fill the initialized properties with a constant value or a passed argument
1470 // depending on the this.x = ...; assignment in the function.
1471 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
1472 if (shared->IsThisPropertyAssignmentArgument(i)) {
1473 Label not_passed, next;
1474 // Check if the argument assigned to the property is actually passed.
1475 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
1476 __ cmp(r0, Operand(arg_number));
1477 __ b(le, &not_passed);
1478 // Argument passed - find it on the stack.
1479 __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
1480 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1481 __ b(&next);
1482 __ bind(&not_passed);
1483 // Set the property to undefined.
1484 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
1485 __ bind(&next);
1486 } else {
1487 // Set the property to the constant value.
1488 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
1489 __ mov(r2, Operand(constant));
1490 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1491 }
1492 }
1493
1494 // Fill the unused in-object property fields with undefined.
1495 for (int i = shared->this_property_assignments_count();
1496 i < shared->CalculateInObjectProperties();
1497 i++) {
1498 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
1499 }
1500
1501 // r0: argc
1502 // r4: JSObject (not tagged)
1503 // Move argc to r1 and the JSObject to return to r0 and tag it.
1504 __ mov(r1, r0);
1505 __ mov(r0, r4);
1506 __ orr(r0, r0, Operand(kHeapObjectTag));
1507
1508 // r0: JSObject
1509 // r1: argc
1510 // Remove caller arguments and receiver from the stack and return.
1511 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
1512 __ add(sp, sp, Operand(kPointerSize));
1513 __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
1514 __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
1515 __ Jump(lr);
1516
1517 // Jump to the generic stub in case the specialized code cannot handle the
1518 // construction.
1519 __ bind(&generic_stub_call);
1520 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1521 Handle<Code> generic_construct_stub(code);
1522 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1523
1524 // Return the generated code.
1525 return GetCode();
1526}
1527
1528
1529#undef __
1530
1531} } // namespace v8::internal