blob: e14dfe62a00f564f4e95d09b351f423e0ec020e0 [file] [log] [blame]
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +00001// Copyright 2006-2008 Google Inc. All Rights Reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "ic-inl.h"
31#include "codegen-inl.h"
32#include "stub-cache.h"
33
34namespace v8 { namespace internal {
35
36#define __ masm->
37
38
39static void ProbeTable(MacroAssembler* masm,
40 Code::Flags flags,
41 StubCache::Table table,
42 Register name,
43 Register offset) {
44 ExternalReference key_offset(SCTableReference::keyReference(table));
45 ExternalReference value_offset(SCTableReference::valueReference(table));
46
47 Label miss;
48
49 // Save the offset on the stack.
50 __ push(offset);
51
52 // Check that the key in the entry matches the name.
53 __ mov(ip, Operand(key_offset));
54 __ ldr(ip, MemOperand(ip, offset, LSL, 1));
55 __ cmp(name, Operand(ip));
56 __ b(ne, &miss);
57
58 // Get the code entry from the cache.
59 __ mov(ip, Operand(value_offset));
60 __ ldr(offset, MemOperand(ip, offset, LSL, 1));
61
62 // Check that the flags match what we're looking for.
63 __ ldr(offset, FieldMemOperand(offset, Code::kFlagsOffset));
64 __ and_(offset, offset, Operand(~Code::kFlagsTypeMask));
65 __ cmp(offset, Operand(flags));
66 __ b(ne, &miss);
67
68 // Restore offset and re-load code entry from cache.
69 __ pop(offset);
70 __ mov(ip, Operand(value_offset));
71 __ ldr(offset, MemOperand(ip, offset, LSL, 1));
72
73 // Jump to the first instruction in the code stub.
74 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
75 __ Jump(offset);
76
77 // Miss: Restore offset and fall through.
78 __ bind(&miss);
79 __ pop(offset);
80}
81
82
83void StubCache::GenerateProbe(MacroAssembler* masm,
84 Code::Flags flags,
85 Register receiver,
86 Register name,
87 Register scratch) {
88 Label miss;
89
90 // Make sure that code is valid. The shifting code relies on the
91 // entry size being 8.
92 ASSERT(sizeof(Entry) == 8);
93
94 // Make sure the flags does not name a specific type.
95 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
96
97 // Make sure that there are no register conflicts.
98 ASSERT(!scratch.is(receiver));
99 ASSERT(!scratch.is(name));
100
101 // Check that the receiver isn't a smi.
102 __ tst(receiver, Operand(kSmiTagMask));
103 __ b(eq, &miss);
104
105 // Get the map of the receiver and compute the hash.
106 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
107 __ ldr(ip, FieldMemOperand(name, String::kLengthOffset));
108 __ add(scratch, scratch, Operand(ip));
109 __ eor(scratch, scratch, Operand(flags));
110 __ and_(scratch,
111 scratch,
112 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
113
114 // Probe the primary table.
115 ProbeTable(masm, flags, kPrimary, name, scratch);
116
117 // Primary miss: Compute hash for secondary probe.
118 __ sub(scratch, scratch, Operand(name));
119 __ add(scratch, scratch, Operand(flags));
120 __ and_(scratch,
121 scratch,
122 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
123
124 // Probe the secondary table.
125 ProbeTable(masm, flags, kSecondary, name, scratch);
126
127 // Cache miss: Fall-through and let caller handle the miss by
128 // entering the runtime system.
129 __ bind(&miss);
130}
131
132
133void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
134 int index,
135 Register prototype) {
136 // Load the global or builtins object from the current context.
137 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
138 // Load the global context from the global or builtins object.
139 __ ldr(prototype,
140 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
141 // Load the function from the global context.
142 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
143 // Load the initial map. The global functions all have initial maps.
144 __ ldr(prototype,
145 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
146 // Load the prototype from the initial map.
147 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
148}
149
150
151#undef __
152
153#define __ masm()->
154
155
156Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
157 HandleScope scope;
158
159 // Enter the JS frame but don't add additional arguments.
kasper.lund7276f142008-07-30 08:49:36 +0000160 __ EnterJSFrame(0);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000161
162 // Push the function on the stack and call the runtime function.
163 __ Push(MemOperand(pp, 0));
164 __ CallRuntime(Runtime::kLazyCompile, 1);
165
166 // Move result to r1 and restore number of arguments.
167 __ mov(r1, Operand(r0));
168 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kArgsLengthOffset));
169
kasper.lund7276f142008-07-30 08:49:36 +0000170 __ ExitJSFrame(DO_NOT_RETURN);
christian.plesner.hansen43d26ec2008-07-03 15:10:15 +0000171
172 // Do a tail-call of the compiled function.
173 __ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
174 __ Jump(r1);
175
176 return GetCodeWithFlags(flags);
177}
178
179
180Object* CallStubCompiler::CompileCallField(Object* object,
181 JSObject* holder,
182 int index) {
183 // ----------- S t a t e -------------
184 // -- r0: number of arguments
185 // -- r1: receiver
186 // -- lr: return address
187 // -----------------------------------
188
189 HandleScope scope;
190 Label miss;
191
192 // Check that the receiver isn't a smi.
193 __ tst(r1, Operand(kSmiTagMask));
194 __ b(eq, &miss);
195
196 // Do the right check and compute the holder register.
197 Register reg =
198 __ CheckMaps(JSObject::cast(object), r1, holder, r3, r2, &miss);
199
200 // Get the properties array of the holder and get the function from the field.
201 int offset = index * kPointerSize + Array::kHeaderSize;
202 __ ldr(r3, FieldMemOperand(reg, JSObject::kPropertiesOffset));
203 __ ldr(r3, FieldMemOperand(r3, offset));
204
205 // Check that the function really is a function.
206 __ tst(r3, Operand(kSmiTagMask));
207 __ b(eq, &miss);
208 // Get the map.
209 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
210 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
211 __ cmp(r2, Operand(JS_FUNCTION_TYPE));
212 __ b(ne, &miss);
213
214 // Patch the function on the stack; 1 ~ receiver.
215 __ add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
216 __ str(r3, MemOperand(ip, 1 * kPointerSize));
217
218 // Setup the context and jump to the call code of the function (tail call).
219 __ ldr(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
220 __ ldr(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
221 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
222 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
223 __ Jump(r2);
224
225 // Handle call cache miss.
226 __ bind(&miss);
227 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
228 __ Jump(ic, code_target);
229
230 // Return the generated code.
231 return GetCode(FIELD);
232}
233
234
235Object* CallStubCompiler::CompileCallConstant(Object* object,
236 JSObject* holder,
237 JSFunction* function,
238 CheckType check) {
239 // ----------- S t a t e -------------
240 // -- r0: number of arguments
241 // -- r1: receiver
242 // -- lr: return address
243 // -----------------------------------
244
245 HandleScope scope;
246 Label miss;
247
248 // Check that the receiver isn't a smi.
249 if (check != NUMBER_CHECK) {
250 __ tst(r1, Operand(kSmiTagMask));
251 __ b(eq, &miss);
252 }
253
254 switch (check) {
255 case RECEIVER_MAP_CHECK:
256 // Check that the maps haven't changed.
257 __ CheckMaps(JSObject::cast(object), r1, holder, r3, r2, &miss);
258 break;
259
260 case STRING_CHECK:
261 // Check that the object is a two-byte string or a symbol.
262 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
263 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
264 __ cmp(r2, Operand(FIRST_NONSTRING_TYPE));
265 __ b(hs, &miss);
266 // Check that the maps starting from the prototype haven't changed.
267 GenerateLoadGlobalFunctionPrototype(masm(),
268 Context::STRING_FUNCTION_INDEX,
269 r2);
270 __ CheckMaps(JSObject::cast(object->GetPrototype()),
271 r2, holder, r3, r1, &miss);
272 break;
273
274 case NUMBER_CHECK: {
275 Label fast;
276 // Check that the object is a smi or a heap number.
277 __ tst(r1, Operand(kSmiTagMask));
278 __ b(eq, &fast);
279 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
280 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
281 __ cmp(r2, Operand(HEAP_NUMBER_TYPE));
282 __ b(ne, &miss);
283 __ bind(&fast);
284 // Check that the maps starting from the prototype haven't changed.
285 GenerateLoadGlobalFunctionPrototype(masm(),
286 Context::NUMBER_FUNCTION_INDEX,
287 r2);
288 __ CheckMaps(JSObject::cast(object->GetPrototype()),
289 r2, holder, r3, r1, &miss);
290 break;
291 }
292
293 case BOOLEAN_CHECK: {
294 Label fast;
295 // Check that the object is a boolean.
296 __ cmp(r1, Operand(Factory::true_value()));
297 __ b(eq, &fast);
298 __ cmp(r1, Operand(Factory::false_value()));
299 __ b(ne, &miss);
300 __ bind(&fast);
301 // Check that the maps starting from the prototype haven't changed.
302 GenerateLoadGlobalFunctionPrototype(masm(),
303 Context::BOOLEAN_FUNCTION_INDEX,
304 r2);
305 __ CheckMaps(JSObject::cast(object->GetPrototype()),
306 r2, holder, r3, r1, &miss);
307 break;
308 }
309
310 case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
311 __ CheckMaps(JSObject::cast(object), r1, holder, r3, r2, &miss);
312 // Make sure object->elements()->map() != Heap::hash_table_map()
313 // Get the elements array of the object.
314 __ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
315 // Check that the object is in fast mode (not dictionary).
316 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
317 __ cmp(r2, Operand(Factory::hash_table_map()));
318 __ b(eq, &miss);
319 break;
320
321 default:
322 UNREACHABLE();
323 }
324
325 // Get the function and setup the context.
326 __ mov(r3, Operand(Handle<JSFunction>(function)));
327 __ ldr(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
328
329 // Patch the function on the stack; 1 ~ receiver.
330 __ add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
331 __ str(r3, MemOperand(ip, 1 * kPointerSize));
332
333 // Jump to the cached code (tail call).
334 Handle<Code> code(function->code());
335 __ Jump(code, code_target);
336
337 // Handle call cache miss.
338 __ bind(&miss);
339 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
340 __ Jump(ic, code_target);
341
342 // Return the generated code.
343 return GetCode(CONSTANT_FUNCTION);
344}
345
346
347Object* CallStubCompiler::CompileCallInterceptor(Object* object,
348 JSObject* holder,
349 String* name) {
350 // ----------- S t a t e -------------
351 // -- r0: number of arguments
352 // -- r1: receiver
353 // -- lr: return address
354 // -----------------------------------
355
356 HandleScope scope;
357 Label miss;
358
359 // TODO(1224669): Implement.
360
361 // Handle call cache miss.
362 __ bind(&miss);
363 Handle<Code> ic = ComputeCallMiss(arguments().immediate());
364 __ Jump(ic, code_target);
365
366 // Return the generated code.
367 return GetCode(INTERCEPTOR);
368}
369
370
371Object* StoreStubCompiler::CompileStoreField(JSObject* object,
372 int index,
373 Map* transition,
374 String* name) {
375 // ----------- S t a t e -------------
376 // -- r0 : value
377 // -- r2 : name
378 // -- lr : return address
379 // -- [sp] : receiver
380 // -----------------------------------
381
382 HandleScope scope;
383 Label miss, exit;
384
385 // Get the receiver from the stack.
386 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
387
388 // Check that the receiver isn't a smi.
389 __ tst(r3, Operand(kSmiTagMask));
390 __ b(eq, &miss);
391
392 // Check that the map of the receiver hasn't changed.
393 __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
394 __ cmp(r1, Operand(Handle<Map>(object->map())));
395 __ b(ne, &miss);
396
397 // Perform global security token check if needed.
398 if (object->IsJSGlobalObject()) {
399 __ CheckAccessGlobal(r3, r1, &miss);
400 }
401
402 // Stub never generated for non-global objects that require access
403 // checks.
404 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
405
406 // Get the properties array
407 __ ldr(r1, FieldMemOperand(r3, JSObject::kPropertiesOffset));
408
409 // Perform map transition for the receiver if necessary.
410 if (transition != NULL) {
411 // Update the map of the object; no write barrier updating is
412 // needed because the map is never in new space.
413 __ mov(ip, Operand(Handle<Map>(transition)));
414 __ str(ip, FieldMemOperand(r3, HeapObject::kMapOffset));
415 }
416
417 // Write to the properties array.
418 int offset = index * kPointerSize + Array::kHeaderSize;
419 __ str(r0, FieldMemOperand(r1, offset));
420
421 // Skip updating write barrier if storing a smi.
422 __ tst(r0, Operand(kSmiTagMask));
423 __ b(eq, &exit);
424
425 // Update the write barrier for the array address.
426 __ mov(r3, Operand(offset));
427 __ RecordWrite(r1, r3, r2); // OK to clobber r2, since we return
428
429 // Return the value (register r0).
430 __ bind(&exit);
431 __ Ret();
432
433 // Handle store cache miss.
434 __ bind(&miss);
435 __ mov(r2, Operand(Handle<String>(name))); // restore name
436 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
437 __ Jump(ic, code_target);
438
439 // Return the generated code.
440 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION);
441}
442
443
444Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
445 AccessorInfo* callback,
446 String* name) {
447 // ----------- S t a t e -------------
448 // -- r0 : value
449 // -- r2 : name
450 // -- lr : return address
451 // -- [sp] : receiver
452 // -----------------------------------
453
454 HandleScope scope;
455 Label miss;
456
457 // Get the object from the stack.
458 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
459
460 // Check that the object isn't a smi.
461 __ tst(r3, Operand(kSmiTagMask));
462 __ b(eq, &miss);
463
464 // Check that the map of the object hasn't changed.
465 __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
466 __ cmp(r1, Operand(Handle<Map>(object->map())));
467 __ b(ne, &miss);
468
469 // Perform global security token check if needed.
470 if (object->IsJSGlobalObject()) {
471 __ CheckAccessGlobal(r3, r1, &miss);
472 }
473
474 // Stub never generated for non-global objects that require access
475 // checks.
476 ASSERT(object->IsJSGlobalObject() || !object->IsAccessCheckNeeded());
477
478 __ ldr(ip, MemOperand(sp)); // receiver
479 __ push(ip);
480 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info
481 __ push(ip);
482 __ push(r2); // name
483 __ push(r0); // value
484
485 // Do tail-call to the C builtin.
486 __ mov(r0, Operand(3)); // not counting receiver
487 __ JumpToBuiltin(ExternalReference(IC_Utility(IC::kStoreCallbackProperty)));
488
489 // Handle store cache miss.
490 __ bind(&miss);
491 __ mov(r2, Operand(Handle<String>(name))); // restore name
492 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
493 __ Jump(ic, code_target);
494
495 // Return the generated code.
496 return GetCode(CALLBACKS);
497}
498
499
500Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
501 String* name) {
502 // ----------- S t a t e -------------
503 // -- r0 : value
504 // -- r2 : name
505 // -- lr : return address
506 // -- [sp] : receiver
507 // -----------------------------------
508
509 HandleScope scope;
510 Label miss;
511
512 // Get the object from the stack.
513 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
514
515 // Check that the object isn't a smi.
516 __ tst(r3, Operand(kSmiTagMask));
517 __ b(eq, &miss);
518
519 // Check that the map of the object hasn't changed.
520 __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
521 __ cmp(r1, Operand(Handle<Map>(receiver->map())));
522 __ b(ne, &miss);
523
524 // Perform global security token check if needed.
525 if (receiver->IsJSGlobalObject()) {
526 __ CheckAccessGlobal(r3, r1, &miss);
527 }
528
529 // Stub never generated for non-global objects that require access
530 // checks.
531 ASSERT(receiver->IsJSGlobalObject() || !receiver->IsAccessCheckNeeded());
532
533 __ ldr(ip, MemOperand(sp)); // receiver
534 __ push(ip);
535 __ push(r2); // name
536 __ push(r0); // value
537
538 // Do tail-call to the C builtin.
539 __ mov(r0, Operand(2)); // not counting receiver
540 ExternalReference store_interceptor =
541 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
542 __ JumpToBuiltin(store_interceptor);
543
544 // Handle store cache miss.
545 __ bind(&miss);
546 __ mov(r2, Operand(Handle<String>(name))); // restore name
547 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
548 __ Jump(ic, code_target);
549
550 // Return the generated code.
551 return GetCode(INTERCEPTOR);
552}
553
554
555Object* LoadStubCompiler::CompileLoadField(JSObject* object,
556 JSObject* holder,
557 int index) {
558 // ----------- S t a t e -------------
559 // -- r0 : receiver
560 // -- r2 : name
561 // -- lr : return address
562 // -- [sp] : receiver
563 // -----------------------------------
564
565 HandleScope scope;
566 Label miss;
567
568 // Check that the receiver isn't a smi.
569 __ tst(r0, Operand(kSmiTagMask));
570 __ b(eq, &miss);
571
572 // Check that the maps haven't changed.
573 Register reg = __ CheckMaps(object, r0, holder, r3, r1, &miss);
574
575 // Get the properties array of the holder.
576 __ ldr(r3, FieldMemOperand(reg, JSObject::kPropertiesOffset));
577
578 // Return the value from the properties array.
579 int offset = index * kPointerSize + Array::kHeaderSize;
580 __ ldr(r0, FieldMemOperand(r3, offset));
581 __ Ret();
582
583 // Handle load cache miss.
584 __ bind(&miss);
585 __ ldr(r0, MemOperand(sp)); // restore receiver
586 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
587 __ Jump(ic, code_target);
588
589 // Return the generated code.
590 return GetCode(FIELD);
591}
592
593
594Object* LoadStubCompiler::CompileLoadCallback(JSObject* object,
595 JSObject* holder,
596 AccessorInfo* callback) {
597 // ----------- S t a t e -------------
598 // -- r0 : receiver
599 // -- r2 : name
600 // -- lr : return address
601 // -- [sp] : receiver
602 // -----------------------------------
603
604 HandleScope scope;
605 Label miss;
606
607 // Check that the receiver isn't a smi.
608 __ tst(r0, Operand(kSmiTagMask));
609 __ b(eq, &miss);
610
611 // Check that the maps haven't changed.
612 Register reg = __ CheckMaps(object, r0, holder, r3, r1, &miss);
613
614 // Push the arguments on the JS stack of the caller.
615 __ push(r0); // receiver
616 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback data
617 __ push(ip);
618 __ push(r2); // name
619 __ push(reg); // holder
620
621 // Do tail-call to the C builtin.
622 __ mov(r0, Operand(3)); // not counting receiver
623 __ JumpToBuiltin(ExternalReference(IC_Utility(IC::kLoadCallbackProperty)));
624
625 // Handle load cache miss.
626 __ bind(&miss);
627 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
628 __ Jump(ic, code_target);
629
630 // Return the generated code.
631 return GetCode(CALLBACKS);
632}
633
634
635Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
636 JSObject* holder,
637 Object* value) {
638 // ----------- S t a t e -------------
639 // -- r0 : receiver
640 // -- r2 : name
641 // -- lr : return address
642 // -- [sp] : receiver
643 // -----------------------------------
644
645 HandleScope scope;
646 Label miss;
647
648 // Check that the receiver isn't a smi.
649 __ tst(r0, Operand(kSmiTagMask));
650 __ b(eq, &miss);
651
652 // Check that the maps haven't changed.
653 Register reg = __ CheckMaps(object, r0, holder, r3, r1, &miss);
654
655 // Return the constant value.
656 __ mov(r0, Operand(Handle<Object>(value)));
657 __ Ret();
658
659 // Handle load cache miss.
660 __ bind(&miss);
661 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
662 __ Jump(ic, code_target);
663
664 // Return the generated code.
665 return GetCode(CONSTANT_FUNCTION);
666}
667
668
669Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
670 JSObject* holder,
671 String* name) {
672 // ----------- S t a t e -------------
673 // -- r0 : receiver
674 // -- r2 : name
675 // -- lr : return address
676 // -- [sp] : receiver
677 // -----------------------------------
678
679 HandleScope scope;
680 Label miss;
681
682 // Check that the receiver isn't a smi.
683 __ tst(r0, Operand(kSmiTagMask));
684 __ b(eq, &miss);
685
686 // Check that the maps haven't changed.
687 Register reg = __ CheckMaps(object, r0, holder, r3, r1, &miss);
688
689 // Push the arguments on the JS stack of the caller.
690 __ push(r0); // receiver
691 __ push(reg); // holder
692 __ push(r2); // name
693
694 // Do tail-call to the C builtin.
695 __ mov(r0, Operand(2)); // not counting receiver
696 __ JumpToBuiltin(ExternalReference(IC_Utility(IC::kLoadInterceptorProperty)));
697
698 // Handle load cache miss.
699 __ bind(&miss);
700 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
701 __ Jump(ic, code_target);
702
703 // Return the generated code.
704 return GetCode(INTERCEPTOR);
705}
706
707
708// TODO(1224671): IC stubs for keyed loads have not been implemented
709// for ARM.
710Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
711 JSObject* receiver,
712 JSObject* holder,
713 int index) {
714 UNIMPLEMENTED();
715 return Heap::undefined_value();
716}
717
718
719Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name,
720 JSObject* receiver,
721 JSObject* holder,
722 AccessorInfo* callback) {
723 UNIMPLEMENTED();
724 return Heap::undefined_value();
725}
726
727
728Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
729 JSObject* receiver,
730 JSObject* holder,
731 Object* value) {
732 UNIMPLEMENTED();
733 return Heap::undefined_value();
734}
735
736
737Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
738 JSObject* holder,
739 String* name) {
740 UNIMPLEMENTED();
741 return Heap::undefined_value();
742}
743
744
745Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
746 UNIMPLEMENTED();
747 return Heap::undefined_value();
748}
749
750
751Object* KeyedLoadStubCompiler::CompileLoadShortStringLength(String* name) {
752 UNIMPLEMENTED();
753 return Heap::undefined_value();
754}
755
756
757Object* KeyedLoadStubCompiler::CompileLoadMediumStringLength(String* name) {
758 UNIMPLEMENTED();
759 return Heap::undefined_value();
760}
761
762
763Object* KeyedLoadStubCompiler::CompileLoadLongStringLength(String* name) {
764 UNIMPLEMENTED();
765 return Heap::undefined_value();
766}
767
768
769Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
770 UNIMPLEMENTED();
771 return Heap::undefined_value();
772}
773
774
775Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
776 int index,
777 Map* transition,
778 String* name) {
779 UNIMPLEMENTED();
780 return Heap::undefined_value();
781}
782
783
784
785#undef __
786
787} } // namespace v8::internal