blob: d19a683dc881d0fceb561aae782343bd40972b16 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "ic-inl.h"
31#include "codegen-inl.h"
32#include "stub-cache.h"
33
34namespace v8 {
35namespace internal {
36
37#define __ ACCESS_MASM(masm)
38
39
40static void ProbeTable(MacroAssembler* masm,
41 Code::Flags flags,
42 StubCache::Table table,
43 Register name,
44 Register offset) {
45 ExternalReference key_offset(SCTableReference::keyReference(table));
46 ExternalReference value_offset(SCTableReference::valueReference(table));
47
48 Label miss;
49
50 // Save the offset on the stack.
51 __ push(offset);
52
53 // Check that the key in the entry matches the name.
54 __ mov(ip, Operand(key_offset));
55 __ ldr(ip, MemOperand(ip, offset, LSL, 1));
56 __ cmp(name, Operand(ip));
57 __ b(ne, &miss);
58
59 // Get the code entry from the cache.
60 __ mov(ip, Operand(value_offset));
61 __ ldr(offset, MemOperand(ip, offset, LSL, 1));
62
63 // Check that the flags match what we're looking for.
64 __ ldr(offset, FieldMemOperand(offset, Code::kFlagsOffset));
65 __ and_(offset, offset, Operand(~Code::kFlagsNotUsedInLookup));
66 __ cmp(offset, Operand(flags));
67 __ b(ne, &miss);
68
69 // Restore offset and re-load code entry from cache.
70 __ pop(offset);
71 __ mov(ip, Operand(value_offset));
72 __ ldr(offset, MemOperand(ip, offset, LSL, 1));
73
74 // Jump to the first instruction in the code stub.
75 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
76 __ Jump(offset);
77
78 // Miss: Restore offset and fall through.
79 __ bind(&miss);
80 __ pop(offset);
81}
82
83
84void StubCache::GenerateProbe(MacroAssembler* masm,
85 Code::Flags flags,
86 Register receiver,
87 Register name,
88 Register scratch,
89 Register extra) {
90 Label miss;
91
92 // Make sure that code is valid. The shifting code relies on the
93 // entry size being 8.
94 ASSERT(sizeof(Entry) == 8);
95
96 // Make sure the flags does not name a specific type.
97 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
98
99 // Make sure that there are no register conflicts.
100 ASSERT(!scratch.is(receiver));
101 ASSERT(!scratch.is(name));
102
103 // Check that the receiver isn't a smi.
104 __ tst(receiver, Operand(kSmiTagMask));
105 __ b(eq, &miss);
106
107 // Get the map of the receiver and compute the hash.
Steve Blockd0582a62009-12-15 09:54:21 +0000108 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
110 __ add(scratch, scratch, Operand(ip));
111 __ eor(scratch, scratch, Operand(flags));
112 __ and_(scratch,
113 scratch,
114 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
115
116 // Probe the primary table.
117 ProbeTable(masm, flags, kPrimary, name, scratch);
118
119 // Primary miss: Compute hash for secondary probe.
120 __ sub(scratch, scratch, Operand(name));
121 __ add(scratch, scratch, Operand(flags));
122 __ and_(scratch,
123 scratch,
124 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
125
126 // Probe the secondary table.
127 ProbeTable(masm, flags, kSecondary, name, scratch);
128
129 // Cache miss: Fall-through and let caller handle the miss by
130 // entering the runtime system.
131 __ bind(&miss);
132}
133
134
135void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
136 int index,
137 Register prototype) {
138 // Load the global or builtins object from the current context.
139 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
140 // Load the global context from the global or builtins object.
141 __ ldr(prototype,
142 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
143 // Load the function from the global context.
144 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
145 // Load the initial map. The global functions all have initial maps.
146 __ ldr(prototype,
147 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
148 // Load the prototype from the initial map.
149 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
150}
151
152
153// Load a fast property out of a holder object (src). In-object properties
154// are loaded directly otherwise the property is loaded from the properties
155// fixed array.
156void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
157 Register dst, Register src,
158 JSObject* holder, int index) {
159 // Adjust for the number of properties stored in the holder.
160 index -= holder->map()->inobject_properties();
161 if (index < 0) {
162 // Get the property straight out of the holder.
163 int offset = holder->map()->instance_size() + (index * kPointerSize);
164 __ ldr(dst, FieldMemOperand(src, offset));
165 } else {
166 // Calculate the offset into the properties array.
167 int offset = index * kPointerSize + FixedArray::kHeaderSize;
168 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
169 __ ldr(dst, FieldMemOperand(dst, offset));
170 }
171}
172
173
174void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
175 Register receiver,
176 Register scratch,
177 Label* miss_label) {
178 // Check that the receiver isn't a smi.
179 __ tst(receiver, Operand(kSmiTagMask));
180 __ b(eq, miss_label);
181
182 // Check that the object is a JS array.
183 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
184 __ b(ne, miss_label);
185
186 // Load length directly from the JS array.
187 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
188 __ Ret();
189}
190
191
192// Generate code to check if an object is a string. If the object is
193// a string, the map's instance type is left in the scratch1 register.
194static void GenerateStringCheck(MacroAssembler* masm,
195 Register receiver,
196 Register scratch1,
197 Register scratch2,
198 Label* smi,
199 Label* non_string_object) {
200 // Check that the receiver isn't a smi.
201 __ tst(receiver, Operand(kSmiTagMask));
202 __ b(eq, smi);
203
204 // Check that the object is a string.
205 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
206 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
207 __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
208 // The cast is to resolve the overload for the argument of 0x0.
209 __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
210 __ b(ne, non_string_object);
211}
212
213
214// Generate code to load the length from a string object and return the length.
215// If the receiver object is not a string or a wrapped string object the
216// execution continues at the miss label. The register containing the
217// receiver is potentially clobbered.
218void StubCompiler::GenerateLoadStringLength2(MacroAssembler* masm,
219 Register receiver,
220 Register scratch1,
221 Register scratch2,
222 Label* miss) {
223 Label check_string, check_wrapper;
224
225 __ bind(&check_string);
226 // Check if the object is a string leaving the instance type in the
227 // scratch1 register.
228 GenerateStringCheck(masm, receiver, scratch1, scratch2,
229 miss, &check_wrapper);
230
231 // Load length directly from the string.
Steve Blocka7e24c12009-10-30 11:49:00 +0000232 __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000233 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
234 __ Ret();
235
236 // Check if the object is a JSValue wrapper.
237 __ bind(&check_wrapper);
238 __ cmp(scratch1, Operand(JS_VALUE_TYPE));
239 __ b(ne, miss);
240
241 // Unwrap the value in place and check if the wrapped value is a string.
242 __ ldr(receiver, FieldMemOperand(receiver, JSValue::kValueOffset));
243 __ b(&check_string);
244}
245
246
247void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
248 Register receiver,
249 Register scratch1,
250 Register scratch2,
251 Label* miss_label) {
252 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
253 __ mov(r0, scratch1);
254 __ Ret();
255}
256
257
258// Generate StoreField code, value is passed in r0 register.
259// After executing generated code, the receiver_reg and name_reg
260// may be clobbered.
261void StubCompiler::GenerateStoreField(MacroAssembler* masm,
262 Builtins::Name storage_extend,
263 JSObject* object,
264 int index,
265 Map* transition,
266 Register receiver_reg,
267 Register name_reg,
268 Register scratch,
269 Label* miss_label) {
270 // r0 : value
271 Label exit;
272
273 // Check that the receiver isn't a smi.
274 __ tst(receiver_reg, Operand(kSmiTagMask));
275 __ b(eq, miss_label);
276
277 // Check that the map of the receiver hasn't changed.
278 __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
279 __ cmp(scratch, Operand(Handle<Map>(object->map())));
280 __ b(ne, miss_label);
281
282 // Perform global security token check if needed.
283 if (object->IsJSGlobalProxy()) {
284 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
285 }
286
287 // Stub never generated for non-global objects that require access
288 // checks.
289 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
290
291 // Perform map transition for the receiver if necessary.
292 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
293 // The properties must be extended before we can store the value.
294 // We jump to a runtime call that extends the properties array.
295 __ mov(r2, Operand(Handle<Map>(transition)));
296 // Please note, if we implement keyed store for arm we need
297 // to call the Builtins::KeyedStoreIC_ExtendStorage.
298 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_ExtendStorage));
299 __ Jump(ic, RelocInfo::CODE_TARGET);
300 return;
301 }
302
303 if (transition != NULL) {
304 // Update the map of the object; no write barrier updating is
305 // needed because the map is never in new space.
306 __ mov(ip, Operand(Handle<Map>(transition)));
307 __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
308 }
309
310 // Adjust for the number of properties stored in the object. Even in the
311 // face of a transition we can use the old map here because the size of the
312 // object and the number of in-object properties is not going to change.
313 index -= object->map()->inobject_properties();
314
315 if (index < 0) {
316 // Set the property straight into the object.
317 int offset = object->map()->instance_size() + (index * kPointerSize);
318 __ str(r0, FieldMemOperand(receiver_reg, offset));
319
320 // Skip updating write barrier if storing a smi.
321 __ tst(r0, Operand(kSmiTagMask));
322 __ b(eq, &exit);
323
324 // Update the write barrier for the array address.
325 // Pass the value being stored in the now unused name_reg.
326 __ mov(name_reg, Operand(offset));
327 __ RecordWrite(receiver_reg, name_reg, scratch);
328 } else {
329 // Write to the properties array.
330 int offset = index * kPointerSize + FixedArray::kHeaderSize;
331 // Get the properties array
332 __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
333 __ str(r0, FieldMemOperand(scratch, offset));
334
335 // Skip updating write barrier if storing a smi.
336 __ tst(r0, Operand(kSmiTagMask));
337 __ b(eq, &exit);
338
339 // Update the write barrier for the array address.
340 // Ok to clobber receiver_reg and name_reg, since we return.
341 __ mov(name_reg, Operand(offset));
342 __ RecordWrite(scratch, name_reg, receiver_reg);
343 }
344
345 // Return the value (register r0).
346 __ bind(&exit);
347 __ Ret();
348}
349
350
351void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
352 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
353 Code* code = NULL;
354 if (kind == Code::LOAD_IC) {
355 code = Builtins::builtin(Builtins::LoadIC_Miss);
356 } else {
357 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
358 }
359
360 Handle<Code> ic(code);
361 __ Jump(ic, RelocInfo::CODE_TARGET);
362}
363
364
Leon Clarke4515c472010-02-03 11:58:03 +0000365static void GenerateCallFunction(MacroAssembler* masm,
366 Object* object,
367 const ParameterCount& arguments,
368 Label* miss) {
369 // ----------- S t a t e -------------
370 // -- r0: receiver
371 // -- r1: function to call
372 // -----------------------------------
373
374 // Check that the function really is a function.
375 __ BranchOnSmi(r1, miss);
376 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
377 __ b(ne, miss);
378
379 // Patch the receiver on the stack with the global proxy if
380 // necessary.
381 if (object->IsGlobalObject()) {
382 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
383 __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
384 }
385
386 // Invoke the function.
387 __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
388}
389
390
391static void GenerateCallConstFunction(MacroAssembler* masm,
392 JSFunction* function,
393 const ParameterCount& arguments) {
394 ASSERT(function->is_compiled());
395
396 // Get the function and setup the context.
397 __ mov(r1, Operand(Handle<JSFunction>(function)));
398 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
399
400 // Jump to the cached code (tail call).
401 Handle<Code> code(function->code());
402 ParameterCount expected(function->shared()->formal_parameter_count());
403 __ InvokeCode(code, expected, arguments,
404 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
405}
406
407
408template <class Compiler>
409static void CompileLoadInterceptor(Compiler* compiler,
410 StubCompiler* stub_compiler,
411 MacroAssembler* masm,
412 JSObject* object,
413 JSObject* holder,
414 String* name,
415 LookupResult* lookup,
416 Register receiver,
417 Register scratch1,
418 Register scratch2,
419 Label* miss) {
420 ASSERT(holder->HasNamedInterceptor());
421 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
422
423 // Check that the receiver isn't a smi.
424 __ BranchOnSmi(receiver, miss);
425
426 // Check that the maps haven't changed.
427 Register reg =
428 stub_compiler->CheckPrototypes(object, receiver, holder,
429 scratch1, scratch2, name, miss);
430
431 if (lookup->IsValid() && lookup->IsCacheable()) {
432 compiler->CompileCacheable(masm,
433 stub_compiler,
434 receiver,
435 reg,
436 scratch1,
437 scratch2,
438 holder,
439 lookup,
440 name,
441 miss);
442 } else {
443 compiler->CompileRegular(masm,
444 receiver,
445 reg,
446 scratch2,
447 holder,
448 miss);
449 }
450}
451
452
453static void PushInterceptorArguments(MacroAssembler* masm,
454 Register receiver,
455 Register holder,
456 Register name,
457 JSObject* holder_obj) {
458 __ push(receiver);
459 __ push(holder);
460 __ push(name);
461 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
462 ASSERT(!Heap::InNewSpace(interceptor));
463
464 Register scratch = receiver;
465 __ mov(scratch, Operand(Handle<Object>(interceptor)));
466 __ push(scratch);
467 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
468 __ push(scratch);
469}
470
471
472static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
473 Register receiver,
474 Register holder,
475 Register name,
476 JSObject* holder_obj) {
477 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
478
479 ExternalReference ref =
480 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
481 __ mov(r0, Operand(5));
482 __ mov(r1, Operand(ref));
483
484 CEntryStub stub(1);
485 __ CallStub(&stub);
486}
487
488
489class LoadInterceptorCompiler BASE_EMBEDDED {
490 public:
491 explicit LoadInterceptorCompiler(Register name) : name_(name) {}
492
493 void CompileCacheable(MacroAssembler* masm,
494 StubCompiler* stub_compiler,
495 Register receiver,
496 Register holder,
497 Register scratch1,
498 Register scratch2,
499 JSObject* holder_obj,
500 LookupResult* lookup,
501 String* name,
502 Label* miss_label) {
503 AccessorInfo* callback = 0;
504 bool optimize = false;
505 // So far the most popular follow ups for interceptor loads are FIELD
506 // and CALLBACKS, so inline only them, other cases may be added
507 // later.
508 if (lookup->type() == FIELD) {
509 optimize = true;
510 } else if (lookup->type() == CALLBACKS) {
511 Object* callback_object = lookup->GetCallbackObject();
512 if (callback_object->IsAccessorInfo()) {
513 callback = AccessorInfo::cast(callback_object);
514 optimize = callback->getter() != NULL;
515 }
516 }
517
518 if (!optimize) {
519 CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
520 return;
521 }
522
523 // Note: starting a frame here makes GC aware of pointers pushed below.
524 __ EnterInternalFrame();
525
526 if (lookup->type() == CALLBACKS) {
527 __ push(receiver);
528 }
529 __ push(holder);
530 __ push(name_);
531
532 CompileCallLoadPropertyWithInterceptor(masm,
533 receiver,
534 holder,
535 name_,
536 holder_obj);
537
538 Label interceptor_failed;
539 // Compare with no_interceptor_result_sentinel.
540 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
541 __ cmp(r0, scratch1);
542 __ b(eq, &interceptor_failed);
543 __ LeaveInternalFrame();
544 __ Ret();
545
546 __ bind(&interceptor_failed);
547 __ pop(name_);
548 __ pop(holder);
549
550 if (lookup->type() == CALLBACKS) {
551 __ pop(receiver);
552 }
553
554 __ LeaveInternalFrame();
555
556 if (lookup->type() == FIELD) {
557 holder = stub_compiler->CheckPrototypes(holder_obj,
558 holder,
559 lookup->holder(),
560 scratch1,
561 scratch2,
562 name,
563 miss_label);
564 stub_compiler->GenerateFastPropertyLoad(masm,
565 r0,
566 holder,
567 lookup->holder(),
568 lookup->GetFieldIndex());
569 __ Ret();
570 } else {
571 ASSERT(lookup->type() == CALLBACKS);
572 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
573 ASSERT(callback != NULL);
574 ASSERT(callback->getter() != NULL);
575
576 Label cleanup;
577 __ pop(scratch2);
578 __ push(receiver);
579 __ push(scratch2);
580
581 holder = stub_compiler->CheckPrototypes(holder_obj, holder,
582 lookup->holder(), scratch1,
583 scratch2,
584 name,
585 &cleanup);
586
587 __ push(holder);
588 __ Move(holder, Handle<AccessorInfo>(callback));
589 __ push(holder);
590 __ ldr(scratch1, FieldMemOperand(holder, AccessorInfo::kDataOffset));
591 __ push(scratch1);
592 __ push(name_);
593
594 ExternalReference ref =
595 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
596 __ TailCallRuntime(ref, 5, 1);
597
598 __ bind(&cleanup);
599 __ pop(scratch1);
600 __ pop(scratch2);
601 __ push(scratch1);
602 }
603 }
604
605
606 void CompileRegular(MacroAssembler* masm,
607 Register receiver,
608 Register holder,
609 Register scratch,
610 JSObject* holder_obj,
611 Label* miss_label) {
612 PushInterceptorArguments(masm, receiver, holder, name_, holder_obj);
613
614 ExternalReference ref = ExternalReference(
615 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
616 __ TailCallRuntime(ref, 5, 1);
617 }
618
619 private:
620 Register name_;
621};
622
623
624class CallInterceptorCompiler BASE_EMBEDDED {
625 public:
626 CallInterceptorCompiler(const ParameterCount& arguments, Register name)
627 : arguments_(arguments), argc_(arguments.immediate()), name_(name) {}
628
629 void CompileCacheable(MacroAssembler* masm,
630 StubCompiler* stub_compiler,
631 Register receiver,
632 Register holder,
633 Register scratch1,
634 Register scratch2,
635 JSObject* holder_obj,
636 LookupResult* lookup,
637 String* name,
638 Label* miss_label) {
639 JSFunction* function = 0;
640 bool optimize = false;
641 // So far the most popular case for failed interceptor is
642 // CONSTANT_FUNCTION sitting below.
643 if (lookup->type() == CONSTANT_FUNCTION) {
644 function = lookup->GetConstantFunction();
645 // JSArray holder is a special case for call constant function
646 // (see the corresponding code).
647 if (function->is_compiled() && !holder_obj->IsJSArray()) {
648 optimize = true;
649 }
650 }
651
652 if (!optimize) {
653 CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
654 return;
655 }
656
657 // Constant functions cannot sit on global object.
658 ASSERT(!lookup->holder()->IsGlobalObject());
659
660 __ EnterInternalFrame();
661 __ push(holder); // Save the holder.
662 __ push(name_); // Save the name.
663
664 CompileCallLoadPropertyWithInterceptor(masm,
665 receiver,
666 holder,
667 name_,
668 holder_obj);
669
670 ASSERT(!r0.is(name_));
671 ASSERT(!r0.is(scratch1));
672 __ pop(name_); // Restore the name.
673 __ pop(scratch1); // Restore the holder.
674 __ LeaveInternalFrame();
675
676 // Compare with no_interceptor_result_sentinel.
677 __ LoadRoot(scratch2, Heap::kNoInterceptorResultSentinelRootIndex);
678 __ cmp(r0, scratch2);
679 Label invoke;
680 __ b(ne, &invoke);
681
682 stub_compiler->CheckPrototypes(holder_obj, scratch1,
683 lookup->holder(), scratch1,
684 scratch2,
685 name,
686 miss_label);
687 GenerateCallConstFunction(masm, function, arguments_);
688
689 __ bind(&invoke);
690 }
691
692 void CompileRegular(MacroAssembler* masm,
693 Register receiver,
694 Register holder,
695 Register scratch,
696 JSObject* holder_obj,
697 Label* miss_label) {
698 __ EnterInternalFrame();
699 // Save the name_ register across the call.
700 __ push(name_);
701
702 PushInterceptorArguments(masm,
703 receiver,
704 holder,
705 name_,
706 holder_obj);
707
708 ExternalReference ref = ExternalReference(
709 IC_Utility(IC::kLoadPropertyWithInterceptorForCall));
710 __ mov(r0, Operand(5));
711 __ mov(r1, Operand(ref));
712
713 CEntryStub stub(1);
714 __ CallStub(&stub);
715
716 // Restore the name_ register.
717 __ pop(name_);
718 __ LeaveInternalFrame();
719 }
720
721 private:
722 const ParameterCount& arguments_;
723 int argc_;
724 Register name_;
725};
726
727
Steve Blocka7e24c12009-10-30 11:49:00 +0000728#undef __
729#define __ ACCESS_MASM(masm())
730
731
732Register StubCompiler::CheckPrototypes(JSObject* object,
733 Register object_reg,
734 JSObject* holder,
735 Register holder_reg,
736 Register scratch,
737 String* name,
738 Label* miss) {
739 // Check that the maps haven't changed.
740 Register result =
741 masm()->CheckMaps(object, object_reg, holder, holder_reg, scratch, miss);
742
743 // If we've skipped any global objects, it's not enough to verify
744 // that their maps haven't changed.
745 while (object != holder) {
746 if (object->IsGlobalObject()) {
747 GlobalObject* global = GlobalObject::cast(object);
748 Object* probe = global->EnsurePropertyCell(name);
749 if (probe->IsFailure()) {
750 set_failure(Failure::cast(probe));
751 return result;
752 }
753 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
754 ASSERT(cell->value()->IsTheHole());
755 __ mov(scratch, Operand(Handle<Object>(cell)));
756 __ ldr(scratch,
757 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
758 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
759 __ cmp(scratch, ip);
760 __ b(ne, miss);
761 }
762 object = JSObject::cast(object->GetPrototype());
763 }
764
765 // Return the register containin the holder.
766 return result;
767}
768
769
770void StubCompiler::GenerateLoadField(JSObject* object,
771 JSObject* holder,
772 Register receiver,
773 Register scratch1,
774 Register scratch2,
775 int index,
776 String* name,
777 Label* miss) {
778 // Check that the receiver isn't a smi.
779 __ tst(receiver, Operand(kSmiTagMask));
780 __ b(eq, miss);
781
782 // Check that the maps haven't changed.
783 Register reg =
784 CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
785 GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
786 __ Ret();
787}
788
789
790void StubCompiler::GenerateLoadConstant(JSObject* object,
791 JSObject* holder,
792 Register receiver,
793 Register scratch1,
794 Register scratch2,
795 Object* value,
796 String* name,
797 Label* miss) {
798 // Check that the receiver isn't a smi.
799 __ tst(receiver, Operand(kSmiTagMask));
800 __ b(eq, miss);
801
802 // Check that the maps haven't changed.
803 Register reg =
804 CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
805
806 // Return the constant value.
807 __ mov(r0, Operand(Handle<Object>(value)));
808 __ Ret();
809}
810
811
Leon Clarkee46be812010-01-19 14:06:41 +0000812bool StubCompiler::GenerateLoadCallback(JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +0000813 JSObject* holder,
814 Register receiver,
815 Register name_reg,
816 Register scratch1,
817 Register scratch2,
818 AccessorInfo* callback,
819 String* name,
Leon Clarkee46be812010-01-19 14:06:41 +0000820 Label* miss,
821 Failure** failure) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000822 // Check that the receiver isn't a smi.
823 __ tst(receiver, Operand(kSmiTagMask));
824 __ b(eq, miss);
825
826 // Check that the maps haven't changed.
827 Register reg =
828 CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
829
830 // Push the arguments on the JS stack of the caller.
831 __ push(receiver); // receiver
832 __ push(reg); // holder
833 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback data
834 __ push(ip);
835 __ ldr(reg, FieldMemOperand(ip, AccessorInfo::kDataOffset));
836 __ push(reg);
837 __ push(name_reg); // name
838
839 // Do tail-call to the runtime system.
840 ExternalReference load_callback_property =
841 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
842 __ TailCallRuntime(load_callback_property, 5, 1);
Leon Clarkee46be812010-01-19 14:06:41 +0000843
844 return true;
Steve Blocka7e24c12009-10-30 11:49:00 +0000845}
846
847
848void StubCompiler::GenerateLoadInterceptor(JSObject* object,
849 JSObject* holder,
850 LookupResult* lookup,
851 Register receiver,
852 Register name_reg,
853 Register scratch1,
854 Register scratch2,
855 String* name,
856 Label* miss) {
Leon Clarke4515c472010-02-03 11:58:03 +0000857 LoadInterceptorCompiler compiler(name_reg);
858 CompileLoadInterceptor(&compiler,
859 this,
860 masm(),
861 object,
862 holder,
863 name,
864 lookup,
865 receiver,
866 scratch1,
867 scratch2,
868 miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000869}
870
871
872Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
873 // ----------- S t a t e -------------
874 // -- r1: function
875 // -- lr: return address
876 // -----------------------------------
877
878 // Enter an internal frame.
879 __ EnterInternalFrame();
880
881 // Preserve the function.
882 __ push(r1);
883
884 // Push the function on the stack as the argument to the runtime function.
885 __ push(r1);
886 __ CallRuntime(Runtime::kLazyCompile, 1);
887
888 // Calculate the entry point.
889 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
890
891 // Restore saved function.
892 __ pop(r1);
893
894 // Tear down temporary frame.
895 __ LeaveInternalFrame();
896
897 // Do a tail-call of the compiled function.
898 __ Jump(r2);
899
900 return GetCodeWithFlags(flags, "LazyCompileStub");
901}
902
903
904Object* CallStubCompiler::CompileCallField(Object* object,
905 JSObject* holder,
906 int index,
907 String* name) {
908 // ----------- S t a t e -------------
909 // -- lr: return address
910 // -----------------------------------
911 Label miss;
912
913 const int argc = arguments().immediate();
914
915 // Get the receiver of the function from the stack into r0.
916 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
917 // Check that the receiver isn't a smi.
918 __ tst(r0, Operand(kSmiTagMask));
919 __ b(eq, &miss);
920
921 // Do the right check and compute the holder register.
922 Register reg =
923 CheckPrototypes(JSObject::cast(object), r0, holder, r3, r2, name, &miss);
924 GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
925
Leon Clarke4515c472010-02-03 11:58:03 +0000926 GenerateCallFunction(masm(), object, arguments(), &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000927
928 // Handle call cache miss.
929 __ bind(&miss);
930 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
931 __ Jump(ic, RelocInfo::CODE_TARGET);
932
933 // Return the generated code.
934 return GetCode(FIELD, name);
935}
936
937
938Object* CallStubCompiler::CompileCallConstant(Object* object,
939 JSObject* holder,
940 JSFunction* function,
941 String* name,
942 CheckType check) {
943 // ----------- S t a t e -------------
944 // -- lr: return address
945 // -----------------------------------
946 Label miss;
947
948 // Get the receiver from the stack
949 const int argc = arguments().immediate();
950 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
951
952 // Check that the receiver isn't a smi.
953 if (check != NUMBER_CHECK) {
954 __ tst(r1, Operand(kSmiTagMask));
955 __ b(eq, &miss);
956 }
957
958 // Make sure that it's okay not to patch the on stack receiver
959 // unless we're doing a receiver map check.
960 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
961
962 switch (check) {
963 case RECEIVER_MAP_CHECK:
964 // Check that the maps haven't changed.
965 CheckPrototypes(JSObject::cast(object), r1, holder, r3, r2, name, &miss);
966
967 // Patch the receiver on the stack with the global proxy if
968 // necessary.
969 if (object->IsGlobalObject()) {
970 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
971 __ str(r3, MemOperand(sp, argc * kPointerSize));
972 }
973 break;
974
975 case STRING_CHECK:
Leon Clarkee46be812010-01-19 14:06:41 +0000976 if (!function->IsBuiltin()) {
977 // Calling non-builtins with a value as receiver requires boxing.
978 __ jmp(&miss);
979 } else {
980 // Check that the object is a two-byte string or a symbol.
981 __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE);
982 __ b(hs, &miss);
983 // Check that the maps starting from the prototype haven't changed.
984 GenerateLoadGlobalFunctionPrototype(masm(),
985 Context::STRING_FUNCTION_INDEX,
986 r2);
987 CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
988 r1, name, &miss);
989 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000990 break;
991
992 case NUMBER_CHECK: {
Leon Clarkee46be812010-01-19 14:06:41 +0000993 if (!function->IsBuiltin()) {
994 // Calling non-builtins with a value as receiver requires boxing.
995 __ jmp(&miss);
996 } else {
997 Label fast;
998 // Check that the object is a smi or a heap number.
999 __ tst(r1, Operand(kSmiTagMask));
1000 __ b(eq, &fast);
1001 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
1002 __ b(ne, &miss);
1003 __ bind(&fast);
1004 // Check that the maps starting from the prototype haven't changed.
1005 GenerateLoadGlobalFunctionPrototype(masm(),
1006 Context::NUMBER_FUNCTION_INDEX,
1007 r2);
1008 CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
1009 r1, name, &miss);
1010 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001011 break;
1012 }
1013
1014 case BOOLEAN_CHECK: {
Leon Clarkee46be812010-01-19 14:06:41 +00001015 if (!function->IsBuiltin()) {
1016 // Calling non-builtins with a value as receiver requires boxing.
1017 __ jmp(&miss);
1018 } else {
1019 Label fast;
1020 // Check that the object is a boolean.
1021 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1022 __ cmp(r1, ip);
1023 __ b(eq, &fast);
1024 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1025 __ cmp(r1, ip);
1026 __ b(ne, &miss);
1027 __ bind(&fast);
1028 // Check that the maps starting from the prototype haven't changed.
1029 GenerateLoadGlobalFunctionPrototype(masm(),
1030 Context::BOOLEAN_FUNCTION_INDEX,
1031 r2);
1032 CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
1033 r1, name, &miss);
1034 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 break;
1036 }
1037
1038 case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
1039 CheckPrototypes(JSObject::cast(object), r1, holder, r3, r2, name, &miss);
1040 // Make sure object->HasFastElements().
1041 // Get the elements array of the object.
1042 __ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
1043 // Check that the object is in fast mode (not dictionary).
1044 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
1045 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1046 __ cmp(r2, ip);
1047 __ b(ne, &miss);
1048 break;
1049
1050 default:
1051 UNREACHABLE();
1052 }
1053
Leon Clarke4515c472010-02-03 11:58:03 +00001054 GenerateCallConstFunction(masm(), function, arguments());
Steve Blocka7e24c12009-10-30 11:49:00 +00001055
1056 // Handle call cache miss.
1057 __ bind(&miss);
1058 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1059 __ Jump(ic, RelocInfo::CODE_TARGET);
1060
1061 // Return the generated code.
1062 String* function_name = NULL;
1063 if (function->shared()->name()->IsString()) {
1064 function_name = String::cast(function->shared()->name());
1065 }
1066 return GetCode(CONSTANT_FUNCTION, function_name);
1067}
1068
1069
1070Object* CallStubCompiler::CompileCallInterceptor(Object* object,
1071 JSObject* holder,
1072 String* name) {
1073 // ----------- S t a t e -------------
1074 // -- lr: return address
1075 // -----------------------------------
1076 Label miss;
1077
Leon Clarke4515c472010-02-03 11:58:03 +00001078 // Get the number of arguments.
1079 const int argc = arguments().immediate();
1080
1081 LookupResult lookup;
1082 LookupPostInterceptor(holder, name, &lookup);
1083
1084 // Get the receiver from the stack into r0.
1085 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1086 // Load the name from the stack into r1.
1087 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
1088
1089 CallInterceptorCompiler compiler(arguments(), r1);
1090 CompileLoadInterceptor(&compiler,
1091 this,
1092 masm(),
1093 JSObject::cast(object),
1094 holder,
1095 name,
1096 &lookup,
1097 r0,
1098 r2,
1099 r3,
1100 &miss);
1101
1102 // Restore receiver.
1103 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1104
1105 GenerateCallFunction(masm(), object, arguments(), &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001106
1107 // Handle call cache miss.
1108 __ bind(&miss);
1109 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1110 __ Jump(ic, RelocInfo::CODE_TARGET);
1111
1112 // Return the generated code.
1113 return GetCode(INTERCEPTOR, name);
1114}
1115
1116
1117Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
1118 GlobalObject* holder,
1119 JSGlobalPropertyCell* cell,
1120 JSFunction* function,
1121 String* name) {
1122 // ----------- S t a t e -------------
1123 // -- lr: return address
1124 // -----------------------------------
1125 Label miss;
1126
1127 // Get the number of arguments.
1128 const int argc = arguments().immediate();
1129
1130 // Get the receiver from the stack.
1131 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1132
1133 // If the object is the holder then we know that it's a global
1134 // object which can only happen for contextual calls. In this case,
1135 // the receiver cannot be a smi.
1136 if (object != holder) {
1137 __ tst(r0, Operand(kSmiTagMask));
1138 __ b(eq, &miss);
1139 }
1140
1141 // Check that the maps haven't changed.
1142 CheckPrototypes(object, r0, holder, r3, r2, name, &miss);
1143
1144 // Get the value from the cell.
1145 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1146 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1147
1148 // Check that the cell contains the same function.
Leon Clarkee46be812010-01-19 14:06:41 +00001149 if (Heap::InNewSpace(function)) {
1150 // We can't embed a pointer to a function in new space so we have
1151 // to verify that the shared function info is unchanged. This has
1152 // the nice side effect that multiple closures based on the same
1153 // function can all use this call IC. Before we load through the
1154 // function, we have to verify that it still is a function.
1155 __ tst(r1, Operand(kSmiTagMask));
1156 __ b(eq, &miss);
1157 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1158 __ b(ne, &miss);
1159
1160 // Check the shared function info. Make sure it hasn't changed.
1161 __ mov(r3, Operand(Handle<SharedFunctionInfo>(function->shared())));
1162 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1163 __ cmp(r2, r3);
1164 __ b(ne, &miss);
1165 } else {
1166 __ cmp(r1, Operand(Handle<JSFunction>(function)));
1167 __ b(ne, &miss);
1168 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001169
1170 // Patch the receiver on the stack with the global proxy if
1171 // necessary.
1172 if (object->IsGlobalObject()) {
1173 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1174 __ str(r3, MemOperand(sp, argc * kPointerSize));
1175 }
1176
1177 // Setup the context (function already in r1).
1178 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1179
1180 // Jump to the cached code (tail call).
1181 __ IncrementCounter(&Counters::call_global_inline, 1, r2, r3);
1182 ASSERT(function->is_compiled());
1183 Handle<Code> code(function->code());
1184 ParameterCount expected(function->shared()->formal_parameter_count());
1185 __ InvokeCode(code, expected, arguments(),
1186 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
1187
1188 // Handle call cache miss.
1189 __ bind(&miss);
1190 __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
1191 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1192 __ Jump(ic, RelocInfo::CODE_TARGET);
1193
1194 // Return the generated code.
1195 return GetCode(NORMAL, name);
1196}
1197
1198
1199Object* StoreStubCompiler::CompileStoreField(JSObject* object,
1200 int index,
1201 Map* transition,
1202 String* name) {
1203 // ----------- S t a t e -------------
1204 // -- r0 : value
1205 // -- r2 : name
1206 // -- lr : return address
1207 // -- [sp] : receiver
1208 // -----------------------------------
1209 Label miss;
1210
1211 // Get the receiver from the stack.
1212 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
1213
1214 // name register might be clobbered.
1215 GenerateStoreField(masm(),
1216 Builtins::StoreIC_ExtendStorage,
1217 object,
1218 index,
1219 transition,
1220 r3, r2, r1,
1221 &miss);
1222 __ bind(&miss);
1223 __ mov(r2, Operand(Handle<String>(name))); // restore name
1224 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1225 __ Jump(ic, RelocInfo::CODE_TARGET);
1226
1227 // Return the generated code.
1228 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
1229}
1230
1231
1232Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
1233 AccessorInfo* callback,
1234 String* name) {
1235 // ----------- S t a t e -------------
1236 // -- r0 : value
1237 // -- r2 : name
1238 // -- lr : return address
1239 // -- [sp] : receiver
1240 // -----------------------------------
1241 Label miss;
1242
1243 // Get the object from the stack.
1244 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
1245
1246 // Check that the object isn't a smi.
1247 __ tst(r3, Operand(kSmiTagMask));
1248 __ b(eq, &miss);
1249
1250 // Check that the map of the object hasn't changed.
1251 __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
1252 __ cmp(r1, Operand(Handle<Map>(object->map())));
1253 __ b(ne, &miss);
1254
1255 // Perform global security token check if needed.
1256 if (object->IsJSGlobalProxy()) {
1257 __ CheckAccessGlobalProxy(r3, r1, &miss);
1258 }
1259
1260 // Stub never generated for non-global objects that require access
1261 // checks.
1262 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1263
1264 __ ldr(ip, MemOperand(sp)); // receiver
1265 __ push(ip);
1266 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info
1267 __ push(ip);
1268 __ push(r2); // name
1269 __ push(r0); // value
1270
1271 // Do tail-call to the runtime system.
1272 ExternalReference store_callback_property =
1273 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
1274 __ TailCallRuntime(store_callback_property, 4, 1);
1275
1276 // Handle store cache miss.
1277 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1279 __ Jump(ic, RelocInfo::CODE_TARGET);
1280
1281 // Return the generated code.
1282 return GetCode(CALLBACKS, name);
1283}
1284
1285
1286Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
1287 String* name) {
1288 // ----------- S t a t e -------------
1289 // -- r0 : value
1290 // -- r2 : name
1291 // -- lr : return address
1292 // -- [sp] : receiver
1293 // -----------------------------------
1294 Label miss;
1295
1296 // Get the object from the stack.
1297 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
1298
1299 // Check that the object isn't a smi.
1300 __ tst(r3, Operand(kSmiTagMask));
1301 __ b(eq, &miss);
1302
1303 // Check that the map of the object hasn't changed.
1304 __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
1305 __ cmp(r1, Operand(Handle<Map>(receiver->map())));
1306 __ b(ne, &miss);
1307
1308 // Perform global security token check if needed.
1309 if (receiver->IsJSGlobalProxy()) {
1310 __ CheckAccessGlobalProxy(r3, r1, &miss);
1311 }
1312
1313 // Stub never generated for non-global objects that require access
1314 // checks.
1315 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
1316
1317 __ ldr(ip, MemOperand(sp)); // receiver
1318 __ push(ip);
1319 __ push(r2); // name
1320 __ push(r0); // value
1321
1322 // Do tail-call to the runtime system.
1323 ExternalReference store_ic_property =
1324 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
1325 __ TailCallRuntime(store_ic_property, 3, 1);
1326
1327 // Handle store cache miss.
1328 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001329 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1330 __ Jump(ic, RelocInfo::CODE_TARGET);
1331
1332 // Return the generated code.
1333 return GetCode(INTERCEPTOR, name);
1334}
1335
1336
1337Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
1338 JSGlobalPropertyCell* cell,
1339 String* name) {
1340 // ----------- S t a t e -------------
1341 // -- r0 : value
1342 // -- r2 : name
1343 // -- lr : return address
1344 // -- [sp] : receiver
1345 // -----------------------------------
1346 Label miss;
1347
1348 // Check that the map of the global has not changed.
1349 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
1350 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
1351 __ cmp(r3, Operand(Handle<Map>(object->map())));
1352 __ b(ne, &miss);
1353
1354 // Store the value in the cell.
1355 __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
1356 __ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
1357
1358 __ IncrementCounter(&Counters::named_store_global_inline, 1, r1, r3);
1359 __ Ret();
1360
1361 // Handle store cache miss.
1362 __ bind(&miss);
1363 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r1, r3);
1364 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1365 __ Jump(ic, RelocInfo::CODE_TARGET);
1366
1367 // Return the generated code.
1368 return GetCode(NORMAL, name);
1369}
1370
1371
1372Object* LoadStubCompiler::CompileLoadField(JSObject* object,
1373 JSObject* holder,
1374 int index,
1375 String* name) {
1376 // ----------- S t a t e -------------
1377 // -- r2 : name
1378 // -- lr : return address
1379 // -- [sp] : receiver
1380 // -----------------------------------
1381 Label miss;
1382
1383 __ ldr(r0, MemOperand(sp, 0));
1384
1385 GenerateLoadField(object, holder, r0, r3, r1, index, name, &miss);
1386 __ bind(&miss);
1387 GenerateLoadMiss(masm(), Code::LOAD_IC);
1388
1389 // Return the generated code.
1390 return GetCode(FIELD, name);
1391}
1392
1393
Leon Clarkee46be812010-01-19 14:06:41 +00001394Object* LoadStubCompiler::CompileLoadCallback(String* name,
1395 JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +00001396 JSObject* holder,
Leon Clarkee46be812010-01-19 14:06:41 +00001397 AccessorInfo* callback) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001398 // ----------- S t a t e -------------
1399 // -- r2 : name
1400 // -- lr : return address
1401 // -- [sp] : receiver
1402 // -----------------------------------
1403 Label miss;
1404
1405 __ ldr(r0, MemOperand(sp, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00001406 Failure* failure = Failure::InternalError();
1407 bool success = GenerateLoadCallback(object, holder, r0, r2, r3, r1,
1408 callback, name, &miss, &failure);
1409 if (!success) return failure;
1410
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 __ bind(&miss);
1412 GenerateLoadMiss(masm(), Code::LOAD_IC);
1413
1414 // Return the generated code.
1415 return GetCode(CALLBACKS, name);
1416}
1417
1418
1419Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
1420 JSObject* holder,
1421 Object* value,
1422 String* name) {
1423 // ----------- S t a t e -------------
1424 // -- r2 : name
1425 // -- lr : return address
1426 // -- [sp] : receiver
1427 // -----------------------------------
1428 Label miss;
1429
1430 __ ldr(r0, MemOperand(sp, 0));
1431
1432 GenerateLoadConstant(object, holder, r0, r3, r1, value, name, &miss);
1433 __ bind(&miss);
1434 GenerateLoadMiss(masm(), Code::LOAD_IC);
1435
1436 // Return the generated code.
1437 return GetCode(CONSTANT_FUNCTION, name);
1438}
1439
1440
1441Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
1442 JSObject* holder,
1443 String* name) {
1444 // ----------- S t a t e -------------
1445 // -- r2 : name
1446 // -- lr : return address
1447 // -- [sp] : receiver
1448 // -----------------------------------
1449 Label miss;
1450
1451 __ ldr(r0, MemOperand(sp, 0));
1452
1453 LookupResult lookup;
Leon Clarke4515c472010-02-03 11:58:03 +00001454 LookupPostInterceptor(holder, name, &lookup);
Steve Blocka7e24c12009-10-30 11:49:00 +00001455 GenerateLoadInterceptor(object,
1456 holder,
1457 &lookup,
1458 r0,
1459 r2,
1460 r3,
1461 r1,
1462 name,
1463 &miss);
1464 __ bind(&miss);
1465 GenerateLoadMiss(masm(), Code::LOAD_IC);
1466
1467 // Return the generated code.
1468 return GetCode(INTERCEPTOR, name);
1469}
1470
1471
1472Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
1473 GlobalObject* holder,
1474 JSGlobalPropertyCell* cell,
1475 String* name,
1476 bool is_dont_delete) {
1477 // ----------- S t a t e -------------
1478 // -- r2 : name
1479 // -- lr : return address
1480 // -- [sp] : receiver
1481 // -----------------------------------
1482 Label miss;
1483
1484 // Get the receiver from the stack.
1485 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
1486
1487 // If the object is the holder then we know that it's a global
1488 // object which can only happen for contextual calls. In this case,
1489 // the receiver cannot be a smi.
1490 if (object != holder) {
1491 __ tst(r1, Operand(kSmiTagMask));
1492 __ b(eq, &miss);
1493 }
1494
1495 // Check that the map of the global has not changed.
1496 CheckPrototypes(object, r1, holder, r3, r0, name, &miss);
1497
1498 // Get the value from the cell.
1499 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1500 __ ldr(r0, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1501
1502 // Check for deleted property if property can actually be deleted.
1503 if (!is_dont_delete) {
1504 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1505 __ cmp(r0, ip);
1506 __ b(eq, &miss);
1507 }
1508
1509 __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
1510 __ Ret();
1511
1512 __ bind(&miss);
1513 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1, r1, r3);
1514 GenerateLoadMiss(masm(), Code::LOAD_IC);
1515
1516 // Return the generated code.
1517 return GetCode(NORMAL, name);
1518}
1519
1520
1521Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
1522 JSObject* receiver,
1523 JSObject* holder,
1524 int index) {
1525 // ----------- S t a t e -------------
1526 // -- lr : return address
1527 // -- sp[0] : key
1528 // -- sp[4] : receiver
1529 // -----------------------------------
1530 Label miss;
1531
1532 __ ldr(r2, MemOperand(sp, 0));
1533 __ ldr(r0, MemOperand(sp, kPointerSize));
1534
1535 __ cmp(r2, Operand(Handle<String>(name)));
1536 __ b(ne, &miss);
1537
1538 GenerateLoadField(receiver, holder, r0, r3, r1, index, name, &miss);
1539 __ bind(&miss);
1540 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1541
1542 return GetCode(FIELD, name);
1543}
1544
1545
1546Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name,
1547 JSObject* receiver,
1548 JSObject* holder,
1549 AccessorInfo* callback) {
1550 // ----------- S t a t e -------------
1551 // -- lr : return address
1552 // -- sp[0] : key
1553 // -- sp[4] : receiver
1554 // -----------------------------------
1555 Label miss;
1556
1557 __ ldr(r2, MemOperand(sp, 0));
1558 __ ldr(r0, MemOperand(sp, kPointerSize));
1559
1560 __ cmp(r2, Operand(Handle<String>(name)));
1561 __ b(ne, &miss);
1562
Leon Clarkee46be812010-01-19 14:06:41 +00001563 Failure* failure = Failure::InternalError();
1564 bool success = GenerateLoadCallback(receiver, holder, r0, r2, r3, r1,
1565 callback, name, &miss, &failure);
1566 if (!success) return failure;
1567
Steve Blocka7e24c12009-10-30 11:49:00 +00001568 __ bind(&miss);
1569 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1570
1571 return GetCode(CALLBACKS, name);
1572}
1573
1574
1575Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
1576 JSObject* receiver,
1577 JSObject* holder,
1578 Object* value) {
1579 // ----------- S t a t e -------------
1580 // -- lr : return address
1581 // -- sp[0] : key
1582 // -- sp[4] : receiver
1583 // -----------------------------------
1584 Label miss;
1585
1586 // Check the key is the cached one
1587 __ ldr(r2, MemOperand(sp, 0));
1588 __ ldr(r0, MemOperand(sp, kPointerSize));
1589
1590 __ cmp(r2, Operand(Handle<String>(name)));
1591 __ b(ne, &miss);
1592
1593 GenerateLoadConstant(receiver, holder, r0, r3, r1, value, name, &miss);
1594 __ bind(&miss);
1595 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1596
1597 // Return the generated code.
1598 return GetCode(CONSTANT_FUNCTION, name);
1599}
1600
1601
1602Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
1603 JSObject* holder,
1604 String* name) {
1605 // ----------- S t a t e -------------
1606 // -- lr : return address
1607 // -- sp[0] : key
1608 // -- sp[4] : receiver
1609 // -----------------------------------
1610 Label miss;
1611
1612 // Check the key is the cached one
1613 __ ldr(r2, MemOperand(sp, 0));
1614 __ ldr(r0, MemOperand(sp, kPointerSize));
1615
1616 __ cmp(r2, Operand(Handle<String>(name)));
1617 __ b(ne, &miss);
1618
1619 LookupResult lookup;
Leon Clarke4515c472010-02-03 11:58:03 +00001620 LookupPostInterceptor(holder, name, &lookup);
Steve Blocka7e24c12009-10-30 11:49:00 +00001621 GenerateLoadInterceptor(receiver,
1622 holder,
1623 &lookup,
1624 r0,
1625 r2,
1626 r3,
1627 r1,
1628 name,
1629 &miss);
1630 __ bind(&miss);
1631 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1632
1633 return GetCode(INTERCEPTOR, name);
1634}
1635
1636
1637Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
1638 // ----------- S t a t e -------------
1639 // -- lr : return address
1640 // -- sp[0] : key
1641 // -- sp[4] : receiver
1642 // -----------------------------------
1643 Label miss;
1644
1645 // Check the key is the cached one
1646 __ ldr(r2, MemOperand(sp, 0));
1647 __ ldr(r0, MemOperand(sp, kPointerSize));
1648
1649 __ cmp(r2, Operand(Handle<String>(name)));
1650 __ b(ne, &miss);
1651
1652 GenerateLoadArrayLength(masm(), r0, r3, &miss);
1653 __ bind(&miss);
1654 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1655
1656 return GetCode(CALLBACKS, name);
1657}
1658
1659
1660Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
1661 // ----------- S t a t e -------------
1662 // -- lr : return address
1663 // -- sp[0] : key
1664 // -- sp[4] : receiver
1665 // -----------------------------------
1666 Label miss;
1667 __ IncrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
1668
1669 __ ldr(r2, MemOperand(sp));
1670 __ ldr(r0, MemOperand(sp, kPointerSize)); // receiver
1671
1672 __ cmp(r2, Operand(Handle<String>(name)));
1673 __ b(ne, &miss);
1674
1675 GenerateLoadStringLength2(masm(), r0, r1, r3, &miss);
1676 __ bind(&miss);
1677 __ DecrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
1678
1679 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1680
1681 return GetCode(CALLBACKS, name);
1682}
1683
1684
1685// TODO(1224671): implement the fast case.
1686Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
1687 // ----------- S t a t e -------------
1688 // -- lr : return address
1689 // -- sp[0] : key
1690 // -- sp[4] : receiver
1691 // -----------------------------------
1692 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1693
1694 return GetCode(CALLBACKS, name);
1695}
1696
1697
1698Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
1699 int index,
1700 Map* transition,
1701 String* name) {
1702 // ----------- S t a t e -------------
1703 // -- r0 : value
1704 // -- r2 : name
1705 // -- lr : return address
1706 // -- [sp] : receiver
1707 // -----------------------------------
1708 Label miss;
1709
1710 __ IncrementCounter(&Counters::keyed_store_field, 1, r1, r3);
1711
1712 // Check that the name has not changed.
1713 __ cmp(r2, Operand(Handle<String>(name)));
1714 __ b(ne, &miss);
1715
1716 // Load receiver from the stack.
1717 __ ldr(r3, MemOperand(sp));
1718 // r1 is used as scratch register, r3 and r2 might be clobbered.
1719 GenerateStoreField(masm(),
1720 Builtins::StoreIC_ExtendStorage,
1721 object,
1722 index,
1723 transition,
1724 r3, r2, r1,
1725 &miss);
1726 __ bind(&miss);
1727
1728 __ DecrementCounter(&Counters::keyed_store_field, 1, r1, r3);
1729 __ mov(r2, Operand(Handle<String>(name))); // restore name register.
1730 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
1731 __ Jump(ic, RelocInfo::CODE_TARGET);
1732
1733 // Return the generated code.
1734 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
1735}
1736
1737
1738Object* ConstructStubCompiler::CompileConstructStub(
1739 SharedFunctionInfo* shared) {
1740 // ----------- S t a t e -------------
1741 // -- r0 : argc
1742 // -- r1 : constructor
1743 // -- lr : return address
1744 // -- [sp] : last argument
1745 // -----------------------------------
1746 Label generic_stub_call;
1747
1748 // Use r7 for holding undefined which is used in several places below.
1749 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
1750
1751#ifdef ENABLE_DEBUGGER_SUPPORT
1752 // Check to see whether there are any break points in the function code. If
1753 // there are jump to the generic constructor stub which calls the actual
1754 // code for the function thereby hitting the break points.
1755 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1756 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
1757 __ cmp(r2, r7);
1758 __ b(ne, &generic_stub_call);
1759#endif
1760
1761 // Load the initial map and verify that it is in fact a map.
1762 // r1: constructor function
1763 // r7: undefined
1764 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
1765 __ tst(r2, Operand(kSmiTagMask));
1766 __ b(eq, &generic_stub_call);
1767 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
1768 __ b(ne, &generic_stub_call);
1769
1770#ifdef DEBUG
1771 // Cannot construct functions this way.
1772 // r0: argc
1773 // r1: constructor function
1774 // r2: initial map
1775 // r7: undefined
1776 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
1777 __ Check(ne, "Function constructed by construct stub.");
1778#endif
1779
1780 // Now allocate the JSObject in new space.
1781 // r0: argc
1782 // r1: constructor function
1783 // r2: initial map
1784 // r7: undefined
1785 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1786 __ AllocateInNewSpace(r3,
1787 r4,
1788 r5,
1789 r6,
1790 &generic_stub_call,
1791 NO_ALLOCATION_FLAGS);
1792
1793 // Allocated the JSObject, now initialize the fields. Map is set to initial
1794 // map and properties and elements are set to empty fixed array.
1795 // r0: argc
1796 // r1: constructor function
1797 // r2: initial map
1798 // r3: object size (in words)
1799 // r4: JSObject (not tagged)
1800 // r7: undefined
1801 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1802 __ mov(r5, r4);
1803 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
1804 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1805 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
1806 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
1807 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
1808 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
1809
1810 // Calculate the location of the first argument. The stack contains only the
1811 // argc arguments.
1812 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
1813
1814 // Fill all the in-object properties with undefined.
1815 // r0: argc
1816 // r1: first argument
1817 // r3: object size (in words)
1818 // r4: JSObject (not tagged)
1819 // r5: First in-object property of JSObject (not tagged)
1820 // r7: undefined
1821 // Fill the initialized properties with a constant value or a passed argument
1822 // depending on the this.x = ...; assignment in the function.
1823 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
1824 if (shared->IsThisPropertyAssignmentArgument(i)) {
1825 Label not_passed, next;
1826 // Check if the argument assigned to the property is actually passed.
1827 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
1828 __ cmp(r0, Operand(arg_number));
1829 __ b(le, &not_passed);
1830 // Argument passed - find it on the stack.
1831 __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
1832 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1833 __ b(&next);
1834 __ bind(&not_passed);
1835 // Set the property to undefined.
1836 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
1837 __ bind(&next);
1838 } else {
1839 // Set the property to the constant value.
1840 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
1841 __ mov(r2, Operand(constant));
1842 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1843 }
1844 }
1845
1846 // Fill the unused in-object property fields with undefined.
1847 for (int i = shared->this_property_assignments_count();
1848 i < shared->CalculateInObjectProperties();
1849 i++) {
1850 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
1851 }
1852
1853 // r0: argc
1854 // r4: JSObject (not tagged)
1855 // Move argc to r1 and the JSObject to return to r0 and tag it.
1856 __ mov(r1, r0);
1857 __ mov(r0, r4);
1858 __ orr(r0, r0, Operand(kHeapObjectTag));
1859
1860 // r0: JSObject
1861 // r1: argc
1862 // Remove caller arguments and receiver from the stack and return.
1863 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
1864 __ add(sp, sp, Operand(kPointerSize));
1865 __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
1866 __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
1867 __ Jump(lr);
1868
1869 // Jump to the generic stub in case the specialized code cannot handle the
1870 // construction.
1871 __ bind(&generic_stub_call);
1872 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1873 Handle<Code> generic_construct_stub(code);
1874 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1875
1876 // Return the generated code.
1877 return GetCode();
1878}
1879
1880
1881#undef __
1882
1883} } // namespace v8::internal