blob: 47d675b0734da86324423faab1c567f354237ae1 [file] [log] [blame]
Ben Murdoch8b112d22011-06-08 16:22:53 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "ic-inl.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40
41
Steve Block44f0eee2011-05-26 01:26:41 +010042static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +000044 Code::Flags flags,
45 StubCache::Table table,
46 Register name,
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080047 Register offset,
48 Register scratch,
49 Register scratch2) {
Steve Block44f0eee2011-05-26 01:26:41 +010050 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
Steve Blocka7e24c12009-10-30 11:49:00 +000052
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080053 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
Steve Blocka7e24c12009-10-30 11:49:00 +000055
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080056 // Check the relative positions of the address fields.
57 ASSERT(value_off_addr > key_off_addr);
58 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
61 Label miss;
62 Register offsets_base_addr = scratch;
Steve Blocka7e24c12009-10-30 11:49:00 +000063
64 // Check that the key in the entry matches the name.
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080065 __ mov(offsets_base_addr, Operand(key_offset));
66 __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1));
Steve Block6ded16b2010-05-10 14:33:55 +010067 __ cmp(name, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +000068 __ b(ne, &miss);
69
70 // Get the code entry from the cache.
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080071 __ add(offsets_base_addr, offsets_base_addr,
72 Operand(value_off_addr - key_off_addr));
73 __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1));
Steve Blocka7e24c12009-10-30 11:49:00 +000074
75 // Check that the flags match what we're looking for.
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080076 __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
77 __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup));
78 __ cmp(scratch2, Operand(flags));
Steve Blocka7e24c12009-10-30 11:49:00 +000079 __ b(ne, &miss);
80
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080081 // Re-load code entry from cache.
82 __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1));
Steve Blocka7e24c12009-10-30 11:49:00 +000083
84 // Jump to the first instruction in the code stub.
85 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
86 __ Jump(offset);
87
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -080088 // Miss: fall through.
Steve Blocka7e24c12009-10-30 11:49:00 +000089 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +000090}
91
92
Ben Murdoch3bec4d22010-07-22 14:51:16 +010093// Helper function used to check that the dictionary doesn't contain
94// the property. This function may return false negatives, so miss_label
95// must always call a backup property check that is complete.
96// This function is safe to call if the receiver has fast properties.
97// Name must be a symbol and receiver must be a heap object.
98static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
99 Label* miss_label,
100 Register receiver,
101 String* name,
102 Register scratch0,
103 Register scratch1) {
104 ASSERT(name->IsSymbol());
Steve Block44f0eee2011-05-26 01:26:41 +0100105 Counters* counters = masm->isolate()->counters();
106 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
107 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100108
109 Label done;
110
111 const int kInterceptorOrAccessCheckNeededMask =
112 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
113
114 // Bail out if the receiver has a named interceptor or requires access checks.
115 Register map = scratch1;
116 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
117 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
118 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
119 __ b(ne, miss_label);
120
121 // Check that receiver is a JSObject.
122 __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
123 __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE));
124 __ b(lt, miss_label);
125
126 // Load properties array.
127 Register properties = scratch0;
128 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
129 // Check that the properties array is a dictionary.
130 __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
131 Register tmp = properties;
132 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
133 __ cmp(map, tmp);
134 __ b(ne, miss_label);
135
136 // Restore the temporarily used register.
137 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
138
139 // Compute the capacity mask.
140 const int kCapacityOffset =
141 StringDictionary::kHeaderSize +
142 StringDictionary::kCapacityIndex * kPointerSize;
143
144 // Generate an unrolled loop that performs a few probes before
145 // giving up.
146 static const int kProbes = 4;
147 const int kElementsStartOffset =
148 StringDictionary::kHeaderSize +
149 StringDictionary::kElementsStartIndex * kPointerSize;
150
151 // If names of slots in range from 1 to kProbes - 1 for the hash value are
152 // not equal to the name and kProbes-th slot is not used (its name is the
153 // undefined value), it guarantees the hash table doesn't contain the
154 // property. It's true even if some slots represent deleted properties
155 // (their names are the null value).
156 for (int i = 0; i < kProbes; i++) {
157 // scratch0 points to properties hash.
158 // Compute the masked index: (hash + i + i * i) & mask.
159 Register index = scratch1;
160 // Capacity is smi 2^n.
161 __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
162 __ sub(index, index, Operand(1));
163 __ and_(index, index, Operand(
164 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
165
166 // Scale the index by multiplying by the entry size.
167 ASSERT(StringDictionary::kEntrySize == 3);
168 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
169
170 Register entity_name = scratch1;
171 // Having undefined at this place means the name is not contained.
172 ASSERT_EQ(kSmiTagSize, 1);
173 Register tmp = properties;
174 __ add(tmp, properties, Operand(index, LSL, 1));
175 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
176
177 ASSERT(!tmp.is(entity_name));
178 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
179 __ cmp(entity_name, tmp);
180 if (i != kProbes - 1) {
181 __ b(eq, &done);
182
183 // Stop if found the property.
184 __ cmp(entity_name, Operand(Handle<String>(name)));
185 __ b(eq, miss_label);
186
187 // Check if the entry name is not a symbol.
188 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
189 __ ldrb(entity_name,
190 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
191 __ tst(entity_name, Operand(kIsSymbolMask));
192 __ b(eq, miss_label);
193
194 // Restore the properties.
195 __ ldr(properties,
196 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
197 } else {
198 // Give up probing if still not found the undefined value.
199 __ b(ne, miss_label);
200 }
201 }
202 __ bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100203 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100204}
205
206
Steve Blocka7e24c12009-10-30 11:49:00 +0000207void StubCache::GenerateProbe(MacroAssembler* masm,
208 Code::Flags flags,
209 Register receiver,
210 Register name,
211 Register scratch,
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800212 Register extra,
213 Register extra2) {
Steve Block44f0eee2011-05-26 01:26:41 +0100214 Isolate* isolate = masm->isolate();
Steve Blocka7e24c12009-10-30 11:49:00 +0000215 Label miss;
216
217 // Make sure that code is valid. The shifting code relies on the
218 // entry size being 8.
219 ASSERT(sizeof(Entry) == 8);
220
221 // Make sure the flags does not name a specific type.
222 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
223
224 // Make sure that there are no register conflicts.
225 ASSERT(!scratch.is(receiver));
226 ASSERT(!scratch.is(name));
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800227 ASSERT(!extra.is(receiver));
228 ASSERT(!extra.is(name));
229 ASSERT(!extra.is(scratch));
230 ASSERT(!extra2.is(receiver));
231 ASSERT(!extra2.is(name));
232 ASSERT(!extra2.is(scratch));
233 ASSERT(!extra2.is(extra));
234
235 // Check scratch, extra and extra2 registers are valid.
236 ASSERT(!scratch.is(no_reg));
237 ASSERT(!extra.is(no_reg));
238 ASSERT(!extra2.is(no_reg));
Steve Blocka7e24c12009-10-30 11:49:00 +0000239
240 // Check that the receiver isn't a smi.
241 __ tst(receiver, Operand(kSmiTagMask));
242 __ b(eq, &miss);
243
244 // Get the map of the receiver and compute the hash.
Steve Blockd0582a62009-12-15 09:54:21 +0000245 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000246 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
247 __ add(scratch, scratch, Operand(ip));
248 __ eor(scratch, scratch, Operand(flags));
249 __ and_(scratch,
250 scratch,
251 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
252
253 // Probe the primary table.
Steve Block44f0eee2011-05-26 01:26:41 +0100254 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000255
256 // Primary miss: Compute hash for secondary probe.
257 __ sub(scratch, scratch, Operand(name));
258 __ add(scratch, scratch, Operand(flags));
259 __ and_(scratch,
260 scratch,
261 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
262
263 // Probe the secondary table.
Steve Block44f0eee2011-05-26 01:26:41 +0100264 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000265
266 // Cache miss: Fall-through and let caller handle the miss by
267 // entering the runtime system.
268 __ bind(&miss);
269}
270
271
272void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
273 int index,
274 Register prototype) {
275 // Load the global or builtins object from the current context.
276 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
277 // Load the global context from the global or builtins object.
278 __ ldr(prototype,
279 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
280 // Load the function from the global context.
281 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
282 // Load the initial map. The global functions all have initial maps.
283 __ ldr(prototype,
284 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
285 // Load the prototype from the initial map.
286 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
287}
288
289
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100290void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100291 MacroAssembler* masm, int index, Register prototype, Label* miss) {
Steve Block44f0eee2011-05-26 01:26:41 +0100292 Isolate* isolate = masm->isolate();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100293 // Check we're still in the same context.
294 __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block44f0eee2011-05-26 01:26:41 +0100295 __ Move(ip, isolate->global());
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100296 __ cmp(prototype, ip);
297 __ b(ne, miss);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100298 // Get the global function with the given index.
Steve Block44f0eee2011-05-26 01:26:41 +0100299 JSFunction* function =
300 JSFunction::cast(isolate->global_context()->get(index));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100301 // Load its initial map. The global functions all have initial maps.
302 __ Move(prototype, Handle<Map>(function->initial_map()));
303 // Load the prototype from the initial map.
304 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
305}
306
307
Steve Blocka7e24c12009-10-30 11:49:00 +0000308// Load a fast property out of a holder object (src). In-object properties
309// are loaded directly otherwise the property is loaded from the properties
310// fixed array.
311void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
312 Register dst, Register src,
313 JSObject* holder, int index) {
314 // Adjust for the number of properties stored in the holder.
315 index -= holder->map()->inobject_properties();
316 if (index < 0) {
317 // Get the property straight out of the holder.
318 int offset = holder->map()->instance_size() + (index * kPointerSize);
319 __ ldr(dst, FieldMemOperand(src, offset));
320 } else {
321 // Calculate the offset into the properties array.
322 int offset = index * kPointerSize + FixedArray::kHeaderSize;
323 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
324 __ ldr(dst, FieldMemOperand(dst, offset));
325 }
326}
327
328
329void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
330 Register receiver,
331 Register scratch,
332 Label* miss_label) {
333 // Check that the receiver isn't a smi.
334 __ tst(receiver, Operand(kSmiTagMask));
335 __ b(eq, miss_label);
336
337 // Check that the object is a JS array.
338 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
339 __ b(ne, miss_label);
340
341 // Load length directly from the JS array.
342 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
343 __ Ret();
344}
345
346
Andrei Popescu402d9372010-02-26 13:31:12 +0000347// Generate code to check if an object is a string. If the object is a
348// heap object, its map's instance type is left in the scratch1 register.
349// If this is not needed, scratch1 and scratch2 may be the same register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000350static void GenerateStringCheck(MacroAssembler* masm,
351 Register receiver,
352 Register scratch1,
353 Register scratch2,
354 Label* smi,
355 Label* non_string_object) {
356 // Check that the receiver isn't a smi.
357 __ tst(receiver, Operand(kSmiTagMask));
358 __ b(eq, smi);
359
360 // Check that the object is a string.
361 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
362 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
363 __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
364 // The cast is to resolve the overload for the argument of 0x0.
365 __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
366 __ b(ne, non_string_object);
367}
368
369
370// Generate code to load the length from a string object and return the length.
371// If the receiver object is not a string or a wrapped string object the
372// execution continues at the miss label. The register containing the
373// receiver is potentially clobbered.
Andrei Popescu402d9372010-02-26 13:31:12 +0000374void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
375 Register receiver,
376 Register scratch1,
377 Register scratch2,
Steve Block1e0659c2011-05-24 12:43:12 +0100378 Label* miss,
379 bool support_wrappers) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000380 Label check_wrapper;
Steve Blocka7e24c12009-10-30 11:49:00 +0000381
Steve Blocka7e24c12009-10-30 11:49:00 +0000382 // Check if the object is a string leaving the instance type in the
383 // scratch1 register.
Steve Block1e0659c2011-05-24 12:43:12 +0100384 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
385 support_wrappers ? &check_wrapper : miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000386
387 // Load length directly from the string.
Steve Blocka7e24c12009-10-30 11:49:00 +0000388 __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000389 __ Ret();
390
Steve Block1e0659c2011-05-24 12:43:12 +0100391 if (support_wrappers) {
392 // Check if the object is a JSValue wrapper.
393 __ bind(&check_wrapper);
394 __ cmp(scratch1, Operand(JS_VALUE_TYPE));
395 __ b(ne, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000396
Steve Block1e0659c2011-05-24 12:43:12 +0100397 // Unwrap the value and check if the wrapped value is a string.
398 __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
399 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
400 __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
401 __ Ret();
402 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000403}
404
405
406void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
407 Register receiver,
408 Register scratch1,
409 Register scratch2,
410 Label* miss_label) {
411 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
412 __ mov(r0, scratch1);
413 __ Ret();
414}
415
416
417// Generate StoreField code, value is passed in r0 register.
Andrei Popescu402d9372010-02-26 13:31:12 +0000418// When leaving generated code after success, the receiver_reg and name_reg
419// may be clobbered. Upon branch to miss_label, the receiver and name
420// registers have their original values.
Steve Blocka7e24c12009-10-30 11:49:00 +0000421void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +0000422 JSObject* object,
423 int index,
424 Map* transition,
425 Register receiver_reg,
426 Register name_reg,
427 Register scratch,
428 Label* miss_label) {
429 // r0 : value
430 Label exit;
431
432 // Check that the receiver isn't a smi.
433 __ tst(receiver_reg, Operand(kSmiTagMask));
434 __ b(eq, miss_label);
435
436 // Check that the map of the receiver hasn't changed.
437 __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
438 __ cmp(scratch, Operand(Handle<Map>(object->map())));
439 __ b(ne, miss_label);
440
441 // Perform global security token check if needed.
442 if (object->IsJSGlobalProxy()) {
443 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
444 }
445
446 // Stub never generated for non-global objects that require access
447 // checks.
448 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
449
450 // Perform map transition for the receiver if necessary.
451 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
452 // The properties must be extended before we can store the value.
453 // We jump to a runtime call that extends the properties array.
Andrei Popescu402d9372010-02-26 13:31:12 +0000454 __ push(receiver_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000455 __ mov(r2, Operand(Handle<Map>(transition)));
Steve Block6ded16b2010-05-10 14:33:55 +0100456 __ Push(r2, r0);
457 __ TailCallExternalReference(
Steve Block44f0eee2011-05-26 01:26:41 +0100458 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
459 masm->isolate()),
460 3,
461 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000462 return;
463 }
464
465 if (transition != NULL) {
466 // Update the map of the object; no write barrier updating is
467 // needed because the map is never in new space.
468 __ mov(ip, Operand(Handle<Map>(transition)));
469 __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
470 }
471
472 // Adjust for the number of properties stored in the object. Even in the
473 // face of a transition we can use the old map here because the size of the
474 // object and the number of in-object properties is not going to change.
475 index -= object->map()->inobject_properties();
476
477 if (index < 0) {
478 // Set the property straight into the object.
479 int offset = object->map()->instance_size() + (index * kPointerSize);
480 __ str(r0, FieldMemOperand(receiver_reg, offset));
481
482 // Skip updating write barrier if storing a smi.
483 __ tst(r0, Operand(kSmiTagMask));
484 __ b(eq, &exit);
485
486 // Update the write barrier for the array address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100487 // Pass the now unused name_reg as a scratch register.
488 __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000489 } else {
490 // Write to the properties array.
491 int offset = index * kPointerSize + FixedArray::kHeaderSize;
492 // Get the properties array
493 __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
494 __ str(r0, FieldMemOperand(scratch, offset));
495
496 // Skip updating write barrier if storing a smi.
497 __ tst(r0, Operand(kSmiTagMask));
498 __ b(eq, &exit);
499
500 // Update the write barrier for the array address.
501 // Ok to clobber receiver_reg and name_reg, since we return.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100502 __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000503 }
504
505 // Return the value (register r0).
506 __ bind(&exit);
507 __ Ret();
508}
509
510
511void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
512 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
513 Code* code = NULL;
514 if (kind == Code::LOAD_IC) {
Steve Block44f0eee2011-05-26 01:26:41 +0100515 code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000516 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100517 code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
Steve Blocka7e24c12009-10-30 11:49:00 +0000518 }
519
520 Handle<Code> ic(code);
521 __ Jump(ic, RelocInfo::CODE_TARGET);
522}
523
524
Leon Clarke4515c472010-02-03 11:58:03 +0000525static void GenerateCallFunction(MacroAssembler* masm,
526 Object* object,
527 const ParameterCount& arguments,
528 Label* miss) {
529 // ----------- S t a t e -------------
530 // -- r0: receiver
531 // -- r1: function to call
532 // -----------------------------------
533
534 // Check that the function really is a function.
Steve Block1e0659c2011-05-24 12:43:12 +0100535 __ JumpIfSmi(r1, miss);
Andrei Popescu402d9372010-02-26 13:31:12 +0000536 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
Leon Clarke4515c472010-02-03 11:58:03 +0000537 __ b(ne, miss);
538
539 // Patch the receiver on the stack with the global proxy if
540 // necessary.
541 if (object->IsGlobalObject()) {
542 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
543 __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
544 }
545
546 // Invoke the function.
547 __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
548}
549
550
Leon Clarke4515c472010-02-03 11:58:03 +0000551static void PushInterceptorArguments(MacroAssembler* masm,
552 Register receiver,
553 Register holder,
554 Register name,
555 JSObject* holder_obj) {
Leon Clarke4515c472010-02-03 11:58:03 +0000556 __ push(name);
557 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
Steve Block44f0eee2011-05-26 01:26:41 +0100558 ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
Steve Block6ded16b2010-05-10 14:33:55 +0100559 Register scratch = name;
Leon Clarke4515c472010-02-03 11:58:03 +0000560 __ mov(scratch, Operand(Handle<Object>(interceptor)));
561 __ push(scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100562 __ push(receiver);
563 __ push(holder);
Leon Clarke4515c472010-02-03 11:58:03 +0000564 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
565 __ push(scratch);
566}
567
568
569static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
570 Register receiver,
571 Register holder,
572 Register name,
573 JSObject* holder_obj) {
574 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
575
576 ExternalReference ref =
Steve Block44f0eee2011-05-26 01:26:41 +0100577 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
578 masm->isolate());
Leon Clarke4515c472010-02-03 11:58:03 +0000579 __ mov(r0, Operand(5));
580 __ mov(r1, Operand(ref));
581
582 CEntryStub stub(1);
583 __ CallStub(&stub);
584}
585
Steve Block1e0659c2011-05-24 12:43:12 +0100586static const int kFastApiCallArguments = 3;
Leon Clarke4515c472010-02-03 11:58:03 +0000587
Steve Block6ded16b2010-05-10 14:33:55 +0100588// Reserves space for the extra arguments to FastHandleApiCall in the
589// caller's frame.
590//
Steve Block1e0659c2011-05-24 12:43:12 +0100591// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
Steve Block6ded16b2010-05-10 14:33:55 +0100592static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
593 Register scratch) {
594 __ mov(scratch, Operand(Smi::FromInt(0)));
Steve Block1e0659c2011-05-24 12:43:12 +0100595 for (int i = 0; i < kFastApiCallArguments; i++) {
596 __ push(scratch);
597 }
Steve Block6ded16b2010-05-10 14:33:55 +0100598}
599
600
601// Undoes the effects of ReserveSpaceForFastApiCall.
602static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100603 __ Drop(kFastApiCallArguments);
Steve Block6ded16b2010-05-10 14:33:55 +0100604}
605
606
Steve Block1e0659c2011-05-24 12:43:12 +0100607static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
608 const CallOptimization& optimization,
609 int argc) {
610 // ----------- S t a t e -------------
611 // -- sp[0] : holder (set by CheckPrototypes)
612 // -- sp[4] : callee js function
613 // -- sp[8] : call data
614 // -- sp[12] : last js argument
615 // -- ...
616 // -- sp[(argc + 3) * 4] : first js argument
617 // -- sp[(argc + 4) * 4] : receiver
618 // -----------------------------------
Steve Block6ded16b2010-05-10 14:33:55 +0100619 // Get the function and setup the context.
620 JSFunction* function = optimization.constant_function();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800621 __ mov(r5, Operand(Handle<JSFunction>(function)));
622 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
Steve Block6ded16b2010-05-10 14:33:55 +0100623
624 // Pass the additional arguments FastHandleApiCall expects.
Steve Block6ded16b2010-05-10 14:33:55 +0100625 Object* call_data = optimization.api_call_info()->data();
Steve Block1e0659c2011-05-24 12:43:12 +0100626 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
Steve Block44f0eee2011-05-26 01:26:41 +0100627 if (masm->isolate()->heap()->InNewSpace(call_data)) {
Steve Block1e0659c2011-05-24 12:43:12 +0100628 __ Move(r0, api_call_info_handle);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800629 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
Steve Block6ded16b2010-05-10 14:33:55 +0100630 } else {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800631 __ Move(r6, Handle<Object>(call_data));
Steve Block6ded16b2010-05-10 14:33:55 +0100632 }
Steve Block1e0659c2011-05-24 12:43:12 +0100633 // Store js function and call data.
634 __ stm(ib, sp, r5.bit() | r6.bit());
Steve Block6ded16b2010-05-10 14:33:55 +0100635
Steve Block1e0659c2011-05-24 12:43:12 +0100636 // r2 points to call data as expected by Arguments
637 // (refer to layout above).
638 __ add(r2, sp, Operand(2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +0100639
Steve Block1e0659c2011-05-24 12:43:12 +0100640 Object* callback = optimization.api_call_info()->callback();
641 Address api_function_address = v8::ToCData<Address>(callback);
642 ApiFunction fun(api_function_address);
Steve Block6ded16b2010-05-10 14:33:55 +0100643
Steve Block1e0659c2011-05-24 12:43:12 +0100644 const int kApiStackSpace = 4;
645 __ EnterExitFrame(false, kApiStackSpace);
646
647 // r0 = v8::Arguments&
648 // Arguments is after the return address.
649 __ add(r0, sp, Operand(1 * kPointerSize));
650 // v8::Arguments::implicit_args = data
651 __ str(r2, MemOperand(r0, 0 * kPointerSize));
652 // v8::Arguments::values = last argument
653 __ add(ip, r2, Operand(argc * kPointerSize));
654 __ str(ip, MemOperand(r0, 1 * kPointerSize));
655 // v8::Arguments::length_ = argc
656 __ mov(ip, Operand(argc));
657 __ str(ip, MemOperand(r0, 2 * kPointerSize));
658 // v8::Arguments::is_construct_call = 0
659 __ mov(ip, Operand(0));
660 __ str(ip, MemOperand(r0, 3 * kPointerSize));
661
662 // Emitting a stub call may try to allocate (if the code is not
663 // already generated). Do not allow the assembler to perform a
664 // garbage collection but instead return the allocation failure
665 // object.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100666 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
Steve Block44f0eee2011-05-26 01:26:41 +0100667 ExternalReference ref = ExternalReference(&fun,
668 ExternalReference::DIRECT_API_CALL,
669 masm->isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100670 return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
Steve Block6ded16b2010-05-10 14:33:55 +0100671}
672
Steve Block6ded16b2010-05-10 14:33:55 +0100673class CallInterceptorCompiler BASE_EMBEDDED {
674 public:
675 CallInterceptorCompiler(StubCompiler* stub_compiler,
676 const ParameterCount& arguments,
677 Register name)
678 : stub_compiler_(stub_compiler),
679 arguments_(arguments),
680 name_(name) {}
681
Steve Block1e0659c2011-05-24 12:43:12 +0100682 MaybeObject* Compile(MacroAssembler* masm,
683 JSObject* object,
684 JSObject* holder,
685 String* name,
686 LookupResult* lookup,
687 Register receiver,
688 Register scratch1,
689 Register scratch2,
690 Register scratch3,
691 Label* miss) {
Steve Block6ded16b2010-05-10 14:33:55 +0100692 ASSERT(holder->HasNamedInterceptor());
693 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
694
695 // Check that the receiver isn't a smi.
Steve Block1e0659c2011-05-24 12:43:12 +0100696 __ JumpIfSmi(receiver, miss);
Steve Block6ded16b2010-05-10 14:33:55 +0100697
698 CallOptimization optimization(lookup);
699
700 if (optimization.is_constant_call()) {
Steve Block1e0659c2011-05-24 12:43:12 +0100701 return CompileCacheable(masm,
702 object,
703 receiver,
704 scratch1,
705 scratch2,
706 scratch3,
707 holder,
708 lookup,
709 name,
710 optimization,
711 miss);
Steve Block6ded16b2010-05-10 14:33:55 +0100712 } else {
713 CompileRegular(masm,
714 object,
715 receiver,
716 scratch1,
717 scratch2,
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100718 scratch3,
Steve Block6ded16b2010-05-10 14:33:55 +0100719 name,
720 holder,
721 miss);
Steve Block44f0eee2011-05-26 01:26:41 +0100722 return masm->isolate()->heap()->undefined_value();
Steve Block6ded16b2010-05-10 14:33:55 +0100723 }
724 }
725
726 private:
Steve Block1e0659c2011-05-24 12:43:12 +0100727 MaybeObject* CompileCacheable(MacroAssembler* masm,
728 JSObject* object,
729 Register receiver,
730 Register scratch1,
731 Register scratch2,
732 Register scratch3,
733 JSObject* interceptor_holder,
734 LookupResult* lookup,
735 String* name,
736 const CallOptimization& optimization,
737 Label* miss_label) {
Steve Block6ded16b2010-05-10 14:33:55 +0100738 ASSERT(optimization.is_constant_call());
739 ASSERT(!lookup->holder()->IsGlobalObject());
740
Steve Block44f0eee2011-05-26 01:26:41 +0100741 Counters* counters = masm->isolate()->counters();
742
Steve Block6ded16b2010-05-10 14:33:55 +0100743 int depth1 = kInvalidProtoDepth;
744 int depth2 = kInvalidProtoDepth;
745 bool can_do_fast_api_call = false;
746 if (optimization.is_simple_api_call() &&
747 !lookup->holder()->IsGlobalObject()) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100748 depth1 =
749 optimization.GetPrototypeDepthOfExpectedType(object,
750 interceptor_holder);
Steve Block6ded16b2010-05-10 14:33:55 +0100751 if (depth1 == kInvalidProtoDepth) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100752 depth2 =
753 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
754 lookup->holder());
Steve Block6ded16b2010-05-10 14:33:55 +0100755 }
756 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
757 (depth2 != kInvalidProtoDepth);
758 }
759
Steve Block44f0eee2011-05-26 01:26:41 +0100760 __ IncrementCounter(counters->call_const_interceptor(), 1,
Steve Block6ded16b2010-05-10 14:33:55 +0100761 scratch1, scratch2);
762
763 if (can_do_fast_api_call) {
Steve Block44f0eee2011-05-26 01:26:41 +0100764 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
Steve Block6ded16b2010-05-10 14:33:55 +0100765 scratch1, scratch2);
766 ReserveSpaceForFastApiCall(masm, scratch1);
767 }
768
Leon Clarkef7060e22010-06-03 12:02:55 +0100769 // Check that the maps from receiver to interceptor's holder
770 // haven't changed and thus we can invoke interceptor.
Steve Block6ded16b2010-05-10 14:33:55 +0100771 Label miss_cleanup;
772 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
773 Register holder =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100774 stub_compiler_->CheckPrototypes(object, receiver,
775 interceptor_holder, scratch1,
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100776 scratch2, scratch3, name, depth1, miss);
Steve Block6ded16b2010-05-10 14:33:55 +0100777
Leon Clarkef7060e22010-06-03 12:02:55 +0100778 // Invoke an interceptor and if it provides a value,
779 // branch to |regular_invoke|.
Steve Block6ded16b2010-05-10 14:33:55 +0100780 Label regular_invoke;
Leon Clarkef7060e22010-06-03 12:02:55 +0100781 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100782 &regular_invoke);
783
Leon Clarkef7060e22010-06-03 12:02:55 +0100784 // Interceptor returned nothing for this property. Try to use cached
785 // constant function.
Steve Block6ded16b2010-05-10 14:33:55 +0100786
Leon Clarkef7060e22010-06-03 12:02:55 +0100787 // Check that the maps from interceptor's holder to constant function's
788 // holder haven't changed and thus we can use cached constant function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100789 if (interceptor_holder != lookup->holder()) {
790 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
791 lookup->holder(), scratch1,
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100792 scratch2, scratch3, name, depth2, miss);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100793 } else {
794 // CheckPrototypes has a side effect of fetching a 'holder'
795 // for API (object which is instanceof for the signature). It's
796 // safe to omit it here, as if present, it should be fetched
797 // by the previous CheckPrototypes.
798 ASSERT(depth2 == kInvalidProtoDepth);
799 }
Steve Block6ded16b2010-05-10 14:33:55 +0100800
Leon Clarkef7060e22010-06-03 12:02:55 +0100801 // Invoke function.
Steve Block6ded16b2010-05-10 14:33:55 +0100802 if (can_do_fast_api_call) {
Steve Block1e0659c2011-05-24 12:43:12 +0100803 MaybeObject* result = GenerateFastApiDirectCall(masm,
804 optimization,
805 arguments_.immediate());
806 if (result->IsFailure()) return result;
Steve Block6ded16b2010-05-10 14:33:55 +0100807 } else {
808 __ InvokeFunction(optimization.constant_function(), arguments_,
809 JUMP_FUNCTION);
810 }
811
Leon Clarkef7060e22010-06-03 12:02:55 +0100812 // Deferred code for fast API call case---clean preallocated space.
Steve Block6ded16b2010-05-10 14:33:55 +0100813 if (can_do_fast_api_call) {
814 __ bind(&miss_cleanup);
815 FreeSpaceForFastApiCall(masm);
816 __ b(miss_label);
817 }
818
Leon Clarkef7060e22010-06-03 12:02:55 +0100819 // Invoke a regular function.
Steve Block6ded16b2010-05-10 14:33:55 +0100820 __ bind(&regular_invoke);
821 if (can_do_fast_api_call) {
822 FreeSpaceForFastApiCall(masm);
823 }
Steve Block1e0659c2011-05-24 12:43:12 +0100824
Steve Block44f0eee2011-05-26 01:26:41 +0100825 return masm->isolate()->heap()->undefined_value();
Steve Block6ded16b2010-05-10 14:33:55 +0100826 }
827
828 void CompileRegular(MacroAssembler* masm,
829 JSObject* object,
830 Register receiver,
831 Register scratch1,
832 Register scratch2,
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100833 Register scratch3,
Steve Block6ded16b2010-05-10 14:33:55 +0100834 String* name,
Leon Clarkef7060e22010-06-03 12:02:55 +0100835 JSObject* interceptor_holder,
Steve Block6ded16b2010-05-10 14:33:55 +0100836 Label* miss_label) {
837 Register holder =
Leon Clarkef7060e22010-06-03 12:02:55 +0100838 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100839 scratch1, scratch2, scratch3, name,
Steve Block6ded16b2010-05-10 14:33:55 +0100840 miss_label);
841
842 // Call a runtime function to load the interceptor property.
843 __ EnterInternalFrame();
844 // Save the name_ register across the call.
845 __ push(name_);
846
847 PushInterceptorArguments(masm,
848 receiver,
849 holder,
850 name_,
Leon Clarkef7060e22010-06-03 12:02:55 +0100851 interceptor_holder);
Steve Block6ded16b2010-05-10 14:33:55 +0100852
853 __ CallExternalReference(
Steve Block44f0eee2011-05-26 01:26:41 +0100854 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
855 masm->isolate()),
856 5);
Steve Block6ded16b2010-05-10 14:33:55 +0100857
858 // Restore the name_ register.
859 __ pop(name_);
860 __ LeaveInternalFrame();
861 }
862
863 void LoadWithInterceptor(MacroAssembler* masm,
864 Register receiver,
865 Register holder,
866 JSObject* holder_obj,
867 Register scratch,
868 Label* interceptor_succeeded) {
869 __ EnterInternalFrame();
870 __ Push(holder, name_);
871
872 CompileCallLoadPropertyWithInterceptor(masm,
873 receiver,
874 holder,
875 name_,
876 holder_obj);
877
878 __ pop(name_); // Restore the name.
879 __ pop(receiver); // Restore the holder.
880 __ LeaveInternalFrame();
881
882 // If interceptor returns no-result sentinel, call the constant function.
883 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
884 __ cmp(r0, scratch);
885 __ b(ne, interceptor_succeeded);
886 }
887
888 StubCompiler* stub_compiler_;
889 const ParameterCount& arguments_;
890 Register name_;
891};
892
893
894// Generate code to check that a global property cell is empty. Create
895// the property cell at compilation time if no cell exists for the
896// property.
John Reck59135872010-11-02 12:39:01 -0700897MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
898 MacroAssembler* masm,
899 GlobalObject* global,
900 String* name,
901 Register scratch,
902 Label* miss) {
903 Object* probe;
904 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
905 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
906 }
Steve Block6ded16b2010-05-10 14:33:55 +0100907 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
908 ASSERT(cell->value()->IsTheHole());
909 __ mov(scratch, Operand(Handle<Object>(cell)));
910 __ ldr(scratch,
911 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
912 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
913 __ cmp(scratch, ip);
914 __ b(ne, miss);
915 return cell;
916}
917
Ben Murdochb0fe1622011-05-05 13:52:32 +0100918// Calls GenerateCheckPropertyCell for each global object in the prototype chain
919// from object to (but not including) holder.
920MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
921 MacroAssembler* masm,
922 JSObject* object,
923 JSObject* holder,
924 String* name,
925 Register scratch,
926 Label* miss) {
927 JSObject* current = object;
928 while (current != holder) {
929 if (current->IsGlobalObject()) {
930 // Returns a cell or a failure.
931 MaybeObject* result = GenerateCheckPropertyCell(
932 masm,
933 GlobalObject::cast(current),
934 name,
935 scratch,
936 miss);
937 if (result->IsFailure()) return result;
938 }
939 ASSERT(current->IsJSObject());
940 current = JSObject::cast(current->GetPrototype());
941 }
942 return NULL;
943}
944
945
Steve Block1e0659c2011-05-24 12:43:12 +0100946// Convert and store int passed in register ival to IEEE 754 single precision
947// floating point value at memory location (dst + 4 * wordoffset)
948// If VFP3 is available use it for conversion.
949static void StoreIntAsFloat(MacroAssembler* masm,
950 Register dst,
951 Register wordoffset,
952 Register ival,
953 Register fval,
954 Register scratch1,
955 Register scratch2) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100956 if (CpuFeatures::IsSupported(VFP3)) {
Steve Block1e0659c2011-05-24 12:43:12 +0100957 CpuFeatures::Scope scope(VFP3);
958 __ vmov(s0, ival);
959 __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
960 __ vcvt_f32_s32(s0, s0);
961 __ vstr(s0, scratch1, 0);
962 } else {
963 Label not_special, done;
964 // Move sign bit from source to destination. This works because the sign
965 // bit in the exponent word of the double has the same position and polarity
966 // as the 2's complement sign bit in a Smi.
967 ASSERT(kBinary32SignMask == 0x80000000u);
968
969 __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
970 // Negate value if it is negative.
971 __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
972
973 // We have -1, 0 or 1, which we treat specially. Register ival contains
974 // absolute value: it is either equal to 1 (special case of -1 and 1),
975 // greater than 1 (not a special case) or less than 1 (special case of 0).
976 __ cmp(ival, Operand(1));
977 __ b(gt, &not_special);
978
979 // For 1 or -1 we need to or in the 0 exponent (biased).
980 static const uint32_t exponent_word_for_1 =
981 kBinary32ExponentBias << kBinary32ExponentShift;
982
983 __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
984 __ b(&done);
985
986 __ bind(&not_special);
987 // Count leading zeros.
988 // Gets the wrong answer for 0, but we already checked for that case above.
989 Register zeros = scratch2;
990 __ CountLeadingZeros(zeros, ival, scratch1);
991
992 // Compute exponent and or it into the exponent register.
993 __ rsb(scratch1,
994 zeros,
995 Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
996
997 __ orr(fval,
998 fval,
999 Operand(scratch1, LSL, kBinary32ExponentShift));
1000
1001 // Shift up the source chopping the top bit off.
1002 __ add(zeros, zeros, Operand(1));
1003 // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
1004 __ mov(ival, Operand(ival, LSL, zeros));
1005 // And the top (top 20 bits).
1006 __ orr(fval,
1007 fval,
1008 Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
1009
1010 __ bind(&done);
1011 __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
1012 }
1013}
1014
1015
1016// Convert unsigned integer with specified number of leading zeroes in binary
1017// representation to IEEE 754 double.
1018// Integer to convert is passed in register hiword.
1019// Resulting double is returned in registers hiword:loword.
1020// This functions does not work correctly for 0.
1021static void GenerateUInt2Double(MacroAssembler* masm,
1022 Register hiword,
1023 Register loword,
1024 Register scratch,
1025 int leading_zeroes) {
1026 const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1027 const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1028
1029 const int mantissa_shift_for_hi_word =
1030 meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1031
1032 const int mantissa_shift_for_lo_word =
1033 kBitsPerInt - mantissa_shift_for_hi_word;
1034
1035 __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
1036 if (mantissa_shift_for_hi_word > 0) {
1037 __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
1038 __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
1039 } else {
1040 __ mov(loword, Operand(0, RelocInfo::NONE));
1041 __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
1042 }
1043
1044 // If least significant bit of biased exponent was not 1 it was corrupted
1045 // by most significant bit of mantissa so we should fix that.
1046 if (!(biased_exponent & 1)) {
1047 __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
1048 }
1049}
1050
Steve Block6ded16b2010-05-10 14:33:55 +01001051
Steve Blocka7e24c12009-10-30 11:49:00 +00001052#undef __
1053#define __ ACCESS_MASM(masm())
1054
1055
1056Register StubCompiler::CheckPrototypes(JSObject* object,
1057 Register object_reg,
1058 JSObject* holder,
1059 Register holder_reg,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001060 Register scratch1,
1061 Register scratch2,
Steve Blocka7e24c12009-10-30 11:49:00 +00001062 String* name,
Andrei Popescu402d9372010-02-26 13:31:12 +00001063 int save_at_depth,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001064 Label* miss) {
1065 // Make sure there's no overlap between holder and object registers.
1066 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1067 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1068 && !scratch2.is(scratch1));
1069
1070 // Keep track of the current object in register reg.
1071 Register reg = object_reg;
1072 int depth = 0;
1073
1074 if (save_at_depth == depth) {
1075 __ str(reg, MemOperand(sp));
1076 }
1077
1078 // Check the maps in the prototype chain.
1079 // Traverse the prototype chain from the object and do map checks.
1080 JSObject* current = object;
1081 while (current != holder) {
1082 depth++;
1083
1084 // Only global objects and objects that do not require access
1085 // checks are allowed in stubs.
1086 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1087
Ben Murdochb0fe1622011-05-05 13:52:32 +01001088 ASSERT(current->GetPrototype()->IsJSObject());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001089 JSObject* prototype = JSObject::cast(current->GetPrototype());
1090 if (!current->HasFastProperties() &&
1091 !current->IsJSGlobalObject() &&
1092 !current->IsJSGlobalProxy()) {
1093 if (!name->IsSymbol()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001094 MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001095 Object* lookup_result = NULL; // Initialization to please compiler.
1096 if (!maybe_lookup_result->ToObject(&lookup_result)) {
1097 set_failure(Failure::cast(maybe_lookup_result));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001098 return reg;
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001099 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001100 name = String::cast(lookup_result);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001101 }
1102 ASSERT(current->property_dictionary()->FindEntry(name) ==
1103 StringDictionary::kNotFound);
1104
1105 GenerateDictionaryNegativeLookup(masm(),
1106 miss,
1107 reg,
1108 name,
1109 scratch1,
1110 scratch2);
1111 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1112 reg = holder_reg; // from now the object is in holder_reg
1113 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01001114 } else if (heap()->InNewSpace(prototype)) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001115 // Get the map of the current object.
1116 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1117 __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1118
1119 // Branch on the result of the map check.
1120 __ b(ne, miss);
1121
1122 // Check access rights to the global object. This has to happen
1123 // after the map check so that we know that the object is
1124 // actually a global object.
1125 if (current->IsJSGlobalProxy()) {
1126 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1127 // Restore scratch register to be the map of the object. In the
1128 // new space case below, we load the prototype from the map in
1129 // the scratch register.
1130 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1131 }
1132
1133 reg = holder_reg; // from now the object is in holder_reg
Ben Murdochb0fe1622011-05-05 13:52:32 +01001134 // The prototype is in new space; we cannot store a reference
1135 // to it in the code. Load it from the map.
1136 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1137 } else {
1138 // Check the map of the current object.
1139 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1140 __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1141 // Branch on the result of the map check.
1142 __ b(ne, miss);
1143 // Check access rights to the global object. This has to happen
1144 // after the map check so that we know that the object is
1145 // actually a global object.
1146 if (current->IsJSGlobalProxy()) {
1147 __ CheckAccessGlobalProxy(reg, scratch1, miss);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001148 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001149 // The prototype is in old space; load it directly.
1150 reg = holder_reg; // from now the object is in holder_reg
1151 __ mov(reg, Operand(Handle<JSObject>(prototype)));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001152 }
1153
1154 if (save_at_depth == depth) {
1155 __ str(reg, MemOperand(sp));
1156 }
1157
1158 // Go to the next object in the prototype chain.
1159 current = prototype;
1160 }
1161
1162 // Check the holder map.
1163 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1164 __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1165 __ b(ne, miss);
1166
1167 // Log the check depth.
Steve Block44f0eee2011-05-26 01:26:41 +01001168 LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001169
Ben Murdochb0fe1622011-05-05 13:52:32 +01001170 // Perform security check for access to the global object.
1171 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1172 if (holder->IsJSGlobalProxy()) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001173 __ CheckAccessGlobalProxy(reg, scratch1, miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001174 };
Steve Blocka7e24c12009-10-30 11:49:00 +00001175
1176 // If we've skipped any global objects, it's not enough to verify
Steve Block6ded16b2010-05-10 14:33:55 +01001177 // that their maps haven't changed. We also need to check that the
1178 // property cell for the property is still empty.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001179 MaybeObject* result = GenerateCheckPropertyCells(masm(),
1180 object,
1181 holder,
1182 name,
1183 scratch1,
1184 miss);
1185 if (result->IsFailure()) set_failure(Failure::cast(result));
Steve Blocka7e24c12009-10-30 11:49:00 +00001186
Andrei Popescu402d9372010-02-26 13:31:12 +00001187 // Return the register containing the holder.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001188 return reg;
Steve Blocka7e24c12009-10-30 11:49:00 +00001189}
1190
1191
1192void StubCompiler::GenerateLoadField(JSObject* object,
1193 JSObject* holder,
1194 Register receiver,
1195 Register scratch1,
1196 Register scratch2,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001197 Register scratch3,
Steve Blocka7e24c12009-10-30 11:49:00 +00001198 int index,
1199 String* name,
1200 Label* miss) {
1201 // Check that the receiver isn't a smi.
1202 __ tst(receiver, Operand(kSmiTagMask));
1203 __ b(eq, miss);
1204
1205 // Check that the maps haven't changed.
1206 Register reg =
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001207 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1208 name, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001209 GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
1210 __ Ret();
1211}
1212
1213
1214void StubCompiler::GenerateLoadConstant(JSObject* object,
1215 JSObject* holder,
1216 Register receiver,
1217 Register scratch1,
1218 Register scratch2,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001219 Register scratch3,
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 Object* value,
1221 String* name,
1222 Label* miss) {
1223 // Check that the receiver isn't a smi.
1224 __ tst(receiver, Operand(kSmiTagMask));
1225 __ b(eq, miss);
1226
1227 // Check that the maps haven't changed.
1228 Register reg =
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001229 CheckPrototypes(object, receiver, holder,
1230 scratch1, scratch2, scratch3, name, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001231
1232 // Return the constant value.
1233 __ mov(r0, Operand(Handle<Object>(value)));
1234 __ Ret();
1235}
1236
1237
Steve Block1e0659c2011-05-24 12:43:12 +01001238MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1239 JSObject* holder,
1240 Register receiver,
1241 Register name_reg,
1242 Register scratch1,
1243 Register scratch2,
1244 Register scratch3,
1245 AccessorInfo* callback,
1246 String* name,
1247 Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 // Check that the receiver isn't a smi.
1249 __ tst(receiver, Operand(kSmiTagMask));
1250 __ b(eq, miss);
1251
1252 // Check that the maps haven't changed.
1253 Register reg =
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001254 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1255 name, miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001256
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001257 // Build AccessorInfo::args_ list on the stack and push property name below
1258 // the exit frame to make GC aware of them and store pointers to them.
1259 __ push(receiver);
1260 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1261 Handle<AccessorInfo> callback_handle(callback);
Steve Block44f0eee2011-05-26 01:26:41 +01001262 if (heap()->InNewSpace(callback_handle->data())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001263 __ Move(scratch3, callback_handle);
1264 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1265 } else {
1266 __ Move(scratch3, Handle<Object>(callback_handle->data()));
1267 }
1268 __ Push(reg, scratch3, name_reg);
1269 __ mov(r0, sp); // r0 = Handle<String>
Steve Blocka7e24c12009-10-30 11:49:00 +00001270
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001271 Address getter_address = v8::ToCData<Address>(callback->getter());
1272 ApiFunction fun(getter_address);
Leon Clarkee46be812010-01-19 14:06:41 +00001273
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001274 const int kApiStackSpace = 1;
1275 __ EnterExitFrame(false, kApiStackSpace);
1276 // Create AccessorInfo instance on the stack above the exit frame with
1277 // scratch2 (internal::Object **args_) as the data.
1278 __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1279 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
1280
1281 // Emitting a stub call may try to allocate (if the code is not
1282 // already generated). Do not allow the assembler to perform a
1283 // garbage collection but instead return the allocation failure
1284 // object.
1285 const int kStackUnwindSpace = 4;
1286 ExternalReference ref =
Steve Block44f0eee2011-05-26 01:26:41 +01001287 ExternalReference(&fun,
1288 ExternalReference::DIRECT_GETTER_CALL,
1289 masm()->isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001290 return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
Steve Blocka7e24c12009-10-30 11:49:00 +00001291}
1292
1293
1294void StubCompiler::GenerateLoadInterceptor(JSObject* object,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001295 JSObject* interceptor_holder,
Steve Blocka7e24c12009-10-30 11:49:00 +00001296 LookupResult* lookup,
1297 Register receiver,
1298 Register name_reg,
1299 Register scratch1,
1300 Register scratch2,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001301 Register scratch3,
Steve Blocka7e24c12009-10-30 11:49:00 +00001302 String* name,
1303 Label* miss) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001304 ASSERT(interceptor_holder->HasNamedInterceptor());
1305 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1306
1307 // Check that the receiver isn't a smi.
Steve Block1e0659c2011-05-24 12:43:12 +01001308 __ JumpIfSmi(receiver, miss);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001309
1310 // So far the most popular follow ups for interceptor loads are FIELD
1311 // and CALLBACKS, so inline only them, other cases may be added
1312 // later.
1313 bool compile_followup_inline = false;
1314 if (lookup->IsProperty() && lookup->IsCacheable()) {
1315 if (lookup->type() == FIELD) {
1316 compile_followup_inline = true;
1317 } else if (lookup->type() == CALLBACKS &&
1318 lookup->GetCallbackObject()->IsAccessorInfo() &&
1319 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1320 compile_followup_inline = true;
1321 }
1322 }
1323
1324 if (compile_followup_inline) {
1325 // Compile the interceptor call, followed by inline code to load the
1326 // property from further up the prototype chain if the call fails.
1327 // Check that the maps haven't changed.
1328 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001329 scratch1, scratch2, scratch3,
1330 name, miss);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001331 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1332
1333 // Save necessary data before invoking an interceptor.
1334 // Requires a frame to make GC aware of pushed pointers.
1335 __ EnterInternalFrame();
1336
1337 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1338 // CALLBACKS case needs a receiver to be passed into C++ callback.
1339 __ Push(receiver, holder_reg, name_reg);
1340 } else {
1341 __ Push(holder_reg, name_reg);
1342 }
1343
1344 // Invoke an interceptor. Note: map checks from receiver to
1345 // interceptor's holder has been compiled before (see a caller
1346 // of this method.)
1347 CompileCallLoadPropertyWithInterceptor(masm(),
1348 receiver,
1349 holder_reg,
1350 name_reg,
1351 interceptor_holder);
1352
1353 // Check if interceptor provided a value for property. If it's
1354 // the case, return immediately.
1355 Label interceptor_failed;
1356 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1357 __ cmp(r0, scratch1);
1358 __ b(eq, &interceptor_failed);
1359 __ LeaveInternalFrame();
1360 __ Ret();
1361
1362 __ bind(&interceptor_failed);
1363 __ pop(name_reg);
1364 __ pop(holder_reg);
1365 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1366 __ pop(receiver);
1367 }
1368
1369 __ LeaveInternalFrame();
1370
1371 // Check that the maps from interceptor's holder to lookup's holder
1372 // haven't changed. And load lookup's holder into |holder| register.
1373 if (interceptor_holder != lookup->holder()) {
1374 holder_reg = CheckPrototypes(interceptor_holder,
1375 holder_reg,
1376 lookup->holder(),
1377 scratch1,
1378 scratch2,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001379 scratch3,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001380 name,
1381 miss);
1382 }
1383
1384 if (lookup->type() == FIELD) {
1385 // We found FIELD property in prototype chain of interceptor's holder.
1386 // Retrieve a field from field's holder.
1387 GenerateFastPropertyLoad(masm(), r0, holder_reg,
1388 lookup->holder(), lookup->GetFieldIndex());
1389 __ Ret();
1390 } else {
1391 // We found CALLBACKS property in prototype chain of interceptor's
1392 // holder.
1393 ASSERT(lookup->type() == CALLBACKS);
1394 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1395 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1396 ASSERT(callback != NULL);
1397 ASSERT(callback->getter() != NULL);
1398
1399 // Tail call to runtime.
1400 // Important invariant in CALLBACKS case: the code above must be
1401 // structured to never clobber |receiver| register.
1402 __ Move(scratch2, Handle<AccessorInfo>(callback));
1403 // holder_reg is either receiver or scratch1.
1404 if (!receiver.is(holder_reg)) {
1405 ASSERT(scratch1.is(holder_reg));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001406 __ Push(receiver, holder_reg);
1407 __ ldr(scratch3,
1408 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1409 __ Push(scratch3, scratch2, name_reg);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001410 } else {
1411 __ push(receiver);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001412 __ ldr(scratch3,
1413 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1414 __ Push(holder_reg, scratch3, scratch2, name_reg);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001415 }
1416
1417 ExternalReference ref =
Steve Block44f0eee2011-05-26 01:26:41 +01001418 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1419 masm()->isolate());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001420 __ TailCallExternalReference(ref, 5, 1);
1421 }
1422 } else { // !compile_followup_inline
1423 // Call the runtime system to load the interceptor.
1424 // Check that the maps haven't changed.
1425 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001426 scratch1, scratch2, scratch3,
1427 name, miss);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001428 PushInterceptorArguments(masm(), receiver, holder_reg,
1429 name_reg, interceptor_holder);
1430
Steve Block44f0eee2011-05-26 01:26:41 +01001431 ExternalReference ref =
1432 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1433 masm()->isolate());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001434 __ TailCallExternalReference(ref, 5, 1);
1435 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001436}
1437
1438
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001439void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1440 if (kind_ == Code::KEYED_CALL_IC) {
1441 __ cmp(r2, Operand(Handle<String>(name)));
1442 __ b(ne, miss);
1443 }
1444}
1445
1446
Steve Block59151502010-09-22 15:07:15 +01001447void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1448 JSObject* holder,
1449 String* name,
1450 Label* miss) {
1451 ASSERT(holder->IsGlobalObject());
1452
1453 // Get the number of arguments.
1454 const int argc = arguments().immediate();
1455
1456 // Get the receiver from the stack.
1457 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1458
1459 // If the object is the holder then we know that it's a global
1460 // object which can only happen for contextual calls. In this case,
1461 // the receiver cannot be a smi.
1462 if (object != holder) {
1463 __ tst(r0, Operand(kSmiTagMask));
1464 __ b(eq, miss);
1465 }
1466
1467 // Check that the maps haven't changed.
1468 CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1469}
1470
1471
1472void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1473 JSFunction* function,
1474 Label* miss) {
1475 // Get the value from the cell.
1476 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1477 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1478
1479 // Check that the cell contains the same function.
Steve Block44f0eee2011-05-26 01:26:41 +01001480 if (heap()->InNewSpace(function)) {
Steve Block59151502010-09-22 15:07:15 +01001481 // We can't embed a pointer to a function in new space so we have
1482 // to verify that the shared function info is unchanged. This has
1483 // the nice side effect that multiple closures based on the same
1484 // function can all use this call IC. Before we load through the
1485 // function, we have to verify that it still is a function.
1486 __ tst(r1, Operand(kSmiTagMask));
1487 __ b(eq, miss);
1488 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1489 __ b(ne, miss);
1490
1491 // Check the shared function info. Make sure it hasn't changed.
1492 __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1493 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1494 __ cmp(r4, r3);
1495 __ b(ne, miss);
1496 } else {
1497 __ cmp(r1, Operand(Handle<JSFunction>(function)));
1498 __ b(ne, miss);
1499 }
1500}
1501
1502
John Reck59135872010-11-02 12:39:01 -07001503MaybeObject* CallStubCompiler::GenerateMissBranch() {
Steve Block44f0eee2011-05-26 01:26:41 +01001504 MaybeObject* maybe_obj = masm()->isolate()->stub_cache()->ComputeCallMiss(
1505 arguments().immediate(), kind_);
John Reck59135872010-11-02 12:39:01 -07001506 Object* obj;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001507 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
Ben Murdochbb769b22010-08-11 14:56:33 +01001508 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1509 return obj;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001510}
1511
1512
John Reck59135872010-11-02 12:39:01 -07001513MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1514 JSObject* holder,
1515 int index,
1516 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001517 // ----------- S t a t e -------------
Andrei Popescu402d9372010-02-26 13:31:12 +00001518 // -- r2 : name
1519 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00001520 // -----------------------------------
1521 Label miss;
1522
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001523 GenerateNameCheck(name, &miss);
1524
Steve Blocka7e24c12009-10-30 11:49:00 +00001525 const int argc = arguments().immediate();
1526
1527 // Get the receiver of the function from the stack into r0.
1528 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1529 // Check that the receiver isn't a smi.
1530 __ tst(r0, Operand(kSmiTagMask));
1531 __ b(eq, &miss);
1532
1533 // Do the right check and compute the holder register.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001534 Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001535 GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1536
Leon Clarke4515c472010-02-03 11:58:03 +00001537 GenerateCallFunction(masm(), object, arguments(), &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00001538
1539 // Handle call cache miss.
1540 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01001541 MaybeObject* maybe_result = GenerateMissBranch();
1542 if (maybe_result->IsFailure()) return maybe_result;
Steve Blocka7e24c12009-10-30 11:49:00 +00001543
1544 // Return the generated code.
1545 return GetCode(FIELD, name);
1546}
1547
1548
John Reck59135872010-11-02 12:39:01 -07001549MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1550 JSObject* holder,
1551 JSGlobalPropertyCell* cell,
1552 JSFunction* function,
1553 String* name) {
Steve Block6ded16b2010-05-10 14:33:55 +01001554 // ----------- S t a t e -------------
1555 // -- r2 : name
1556 // -- lr : return address
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001557 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1558 // -- ...
1559 // -- sp[argc * 4] : receiver
Steve Block6ded16b2010-05-10 14:33:55 +01001560 // -----------------------------------
1561
Steve Block59151502010-09-22 15:07:15 +01001562 // If object is not an array, bail out to regular call.
Steve Block44f0eee2011-05-26 01:26:41 +01001563 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
Steve Block6ded16b2010-05-10 14:33:55 +01001564
1565 Label miss;
1566
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001567 GenerateNameCheck(name, &miss);
1568
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001569 Register receiver = r1;
1570
Steve Block6ded16b2010-05-10 14:33:55 +01001571 // Get the receiver from the stack
1572 const int argc = arguments().immediate();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001573 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001574
1575 // Check that the receiver isn't a smi.
Steve Block1e0659c2011-05-24 12:43:12 +01001576 __ JumpIfSmi(receiver, &miss);
Steve Block6ded16b2010-05-10 14:33:55 +01001577
1578 // Check that the maps haven't changed.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001579 CheckPrototypes(JSObject::cast(object), receiver,
1580 holder, r3, r0, r4, name, &miss);
Steve Block6ded16b2010-05-10 14:33:55 +01001581
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001582 if (argc == 0) {
1583 // Nothing to do, just return the length.
1584 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1585 __ Drop(argc + 1);
1586 __ Ret();
1587 } else {
1588 Label call_builtin;
1589
1590 Register elements = r3;
1591 Register end_elements = r5;
1592
1593 // Get the elements array of the object.
1594 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1595
1596 // Check that the elements are in fast mode and writable.
1597 __ CheckMap(elements, r0,
1598 Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1599
1600 if (argc == 1) { // Otherwise fall through to call the builtin.
1601 Label exit, with_write_barrier, attempt_to_grow_elements;
1602
1603 // Get the array's length into r0 and calculate new length.
1604 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1605 STATIC_ASSERT(kSmiTagSize == 1);
1606 STATIC_ASSERT(kSmiTag == 0);
1607 __ add(r0, r0, Operand(Smi::FromInt(argc)));
1608
1609 // Get the element's length.
1610 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1611
1612 // Check if we could survive without allocation.
1613 __ cmp(r0, r4);
1614 __ b(gt, &attempt_to_grow_elements);
1615
1616 // Save new length.
1617 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1618
1619 // Push the element.
1620 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1621 // We may need a register containing the address end_elements below,
1622 // so write back the value in end_elements.
1623 __ add(end_elements, elements,
1624 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1625 const int kEndElementsOffset =
1626 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1627 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1628
1629 // Check for a smi.
Steve Block1e0659c2011-05-24 12:43:12 +01001630 __ JumpIfNotSmi(r4, &with_write_barrier);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001631 __ bind(&exit);
1632 __ Drop(argc + 1);
1633 __ Ret();
1634
1635 __ bind(&with_write_barrier);
1636 __ InNewSpace(elements, r4, eq, &exit);
1637 __ RecordWriteHelper(elements, end_elements, r4);
1638 __ Drop(argc + 1);
1639 __ Ret();
1640
1641 __ bind(&attempt_to_grow_elements);
1642 // r0: array's length + 1.
1643 // r4: elements' length.
1644
1645 if (!FLAG_inline_new) {
1646 __ b(&call_builtin);
1647 }
1648
Steve Block44f0eee2011-05-26 01:26:41 +01001649 Isolate* isolate = masm()->isolate();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001650 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01001651 ExternalReference::new_space_allocation_top_address(isolate);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001652 ExternalReference new_space_allocation_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001653 ExternalReference::new_space_allocation_limit_address(isolate);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001654
1655 const int kAllocationDelta = 4;
1656 // Load top and check if it is the end of elements.
1657 __ add(end_elements, elements,
1658 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1659 __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1660 __ mov(r7, Operand(new_space_allocation_top));
1661 __ ldr(r6, MemOperand(r7));
1662 __ cmp(end_elements, r6);
1663 __ b(ne, &call_builtin);
1664
1665 __ mov(r9, Operand(new_space_allocation_limit));
1666 __ ldr(r9, MemOperand(r9));
1667 __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
1668 __ cmp(r6, r9);
1669 __ b(hi, &call_builtin);
1670
1671 // We fit and could grow elements.
1672 // Update new_space_allocation_top.
1673 __ str(r6, MemOperand(r7));
1674 // Push the argument.
1675 __ ldr(r6, MemOperand(sp, (argc - 1) * kPointerSize));
1676 __ str(r6, MemOperand(end_elements));
1677 // Fill the rest with holes.
1678 __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1679 for (int i = 1; i < kAllocationDelta; i++) {
1680 __ str(r6, MemOperand(end_elements, i * kPointerSize));
1681 }
1682
1683 // Update elements' and array's sizes.
1684 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1685 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1686 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1687
1688 // Elements are in new space, so write barrier is not required.
1689 __ Drop(argc + 1);
1690 __ Ret();
1691 }
1692 __ bind(&call_builtin);
Steve Block44f0eee2011-05-26 01:26:41 +01001693 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1694 masm()->isolate()),
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001695 argc + 1,
1696 1);
1697 }
Steve Block6ded16b2010-05-10 14:33:55 +01001698
1699 // Handle call cache miss.
1700 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01001701 MaybeObject* maybe_result = GenerateMissBranch();
1702 if (maybe_result->IsFailure()) return maybe_result;
Steve Block6ded16b2010-05-10 14:33:55 +01001703
1704 // Return the generated code.
Kristian Monsen25f61362010-05-21 11:50:48 +01001705 return GetCode(function);
Steve Block6ded16b2010-05-10 14:33:55 +01001706}
1707
1708
John Reck59135872010-11-02 12:39:01 -07001709MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1710 JSObject* holder,
1711 JSGlobalPropertyCell* cell,
1712 JSFunction* function,
1713 String* name) {
Steve Block6ded16b2010-05-10 14:33:55 +01001714 // ----------- S t a t e -------------
1715 // -- r2 : name
1716 // -- lr : return address
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001717 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1718 // -- ...
1719 // -- sp[argc * 4] : receiver
Steve Block6ded16b2010-05-10 14:33:55 +01001720 // -----------------------------------
1721
Steve Block59151502010-09-22 15:07:15 +01001722 // If object is not an array, bail out to regular call.
Steve Block44f0eee2011-05-26 01:26:41 +01001723 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
Steve Block6ded16b2010-05-10 14:33:55 +01001724
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001725 Label miss, return_undefined, call_builtin;
1726
1727 Register receiver = r1;
1728 Register elements = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01001729
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001730 GenerateNameCheck(name, &miss);
1731
Steve Block6ded16b2010-05-10 14:33:55 +01001732 // Get the receiver from the stack
1733 const int argc = arguments().immediate();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001734 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001735
1736 // Check that the receiver isn't a smi.
Steve Block1e0659c2011-05-24 12:43:12 +01001737 __ JumpIfSmi(receiver, &miss);
Steve Block6ded16b2010-05-10 14:33:55 +01001738
1739 // Check that the maps haven't changed.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001740 CheckPrototypes(JSObject::cast(object),
1741 receiver, holder, elements, r4, r0, name, &miss);
Steve Block6ded16b2010-05-10 14:33:55 +01001742
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001743 // Get the elements array of the object.
1744 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1745
1746 // Check that the elements are in fast mode and writable.
1747 __ CheckMap(elements, r0, Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1748
1749 // Get the array's length into r4 and calculate new length.
1750 __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1751 __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1752 __ b(lt, &return_undefined);
1753
1754 // Get the last element.
1755 __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1756 STATIC_ASSERT(kSmiTagSize == 1);
1757 STATIC_ASSERT(kSmiTag == 0);
1758 // We can't address the last element in one operation. Compute the more
1759 // expensive shift first, and use an offset later on.
1760 __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1761 __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1762 __ cmp(r0, r6);
1763 __ b(eq, &call_builtin);
1764
1765 // Set the array's length.
1766 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1767
1768 // Fill with the hole.
1769 __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1770 __ Drop(argc + 1);
1771 __ Ret();
1772
1773 __ bind(&return_undefined);
1774 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1775 __ Drop(argc + 1);
1776 __ Ret();
1777
1778 __ bind(&call_builtin);
Steve Block44f0eee2011-05-26 01:26:41 +01001779 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1780 masm()->isolate()),
Steve Block6ded16b2010-05-10 14:33:55 +01001781 argc + 1,
1782 1);
1783
1784 // Handle call cache miss.
1785 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01001786 MaybeObject* maybe_result = GenerateMissBranch();
1787 if (maybe_result->IsFailure()) return maybe_result;
Steve Block6ded16b2010-05-10 14:33:55 +01001788
1789 // Return the generated code.
Kristian Monsen25f61362010-05-21 11:50:48 +01001790 return GetCode(function);
Steve Block6ded16b2010-05-10 14:33:55 +01001791}
1792
1793
John Reck59135872010-11-02 12:39:01 -07001794MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
Steve Block59151502010-09-22 15:07:15 +01001795 Object* object,
1796 JSObject* holder,
1797 JSGlobalPropertyCell* cell,
1798 JSFunction* function,
1799 String* name) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001800 // ----------- S t a t e -------------
1801 // -- r2 : function name
1802 // -- lr : return address
1803 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1804 // -- ...
1805 // -- sp[argc * 4] : receiver
1806 // -----------------------------------
1807
1808 // If object is not a string, bail out to regular call.
Steve Block44f0eee2011-05-26 01:26:41 +01001809 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001810
1811 const int argc = arguments().immediate();
1812
1813 Label miss;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001814 Label name_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001815 Label index_out_of_range;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001816 Label* index_out_of_range_label = &index_out_of_range;
1817
1818 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1819 index_out_of_range_label = &miss;
1820 }
1821
1822 GenerateNameCheck(name, &name_miss);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001823
1824 // Check that the maps starting from the prototype haven't changed.
1825 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1826 Context::STRING_FUNCTION_INDEX,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001827 r0,
1828 &miss);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001829 ASSERT(object != holder);
1830 CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1831 r1, r3, r4, name, &miss);
1832
1833 Register receiver = r1;
1834 Register index = r4;
1835 Register scratch = r3;
1836 Register result = r0;
1837 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1838 if (argc > 0) {
1839 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1840 } else {
1841 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1842 }
1843
1844 StringCharCodeAtGenerator char_code_at_generator(receiver,
1845 index,
1846 scratch,
1847 result,
1848 &miss, // When not a string.
1849 &miss, // When not a number.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001850 index_out_of_range_label,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001851 STRING_INDEX_IS_NUMBER);
1852 char_code_at_generator.GenerateFast(masm());
1853 __ Drop(argc + 1);
1854 __ Ret();
1855
Ben Murdochb0fe1622011-05-05 13:52:32 +01001856 StubRuntimeCallHelper call_helper;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001857 char_code_at_generator.GenerateSlow(masm(), call_helper);
1858
Ben Murdochb8e0da22011-05-16 14:20:40 +01001859 if (index_out_of_range.is_linked()) {
1860 __ bind(&index_out_of_range);
1861 __ LoadRoot(r0, Heap::kNanValueRootIndex);
1862 __ Drop(argc + 1);
1863 __ Ret();
1864 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001865
1866 __ bind(&miss);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001867 // Restore function name in r2.
1868 __ Move(r2, Handle<String>(name));
1869 __ bind(&name_miss);
Steve Block44f0eee2011-05-26 01:26:41 +01001870 MaybeObject* maybe_result = GenerateMissBranch();
1871 if (maybe_result->IsFailure()) return maybe_result;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001872
1873 // Return the generated code.
1874 return GetCode(function);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001875}
1876
1877
John Reck59135872010-11-02 12:39:01 -07001878MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1879 Object* object,
1880 JSObject* holder,
1881 JSGlobalPropertyCell* cell,
1882 JSFunction* function,
1883 String* name) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001884 // ----------- S t a t e -------------
1885 // -- r2 : function name
1886 // -- lr : return address
1887 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1888 // -- ...
1889 // -- sp[argc * 4] : receiver
1890 // -----------------------------------
1891
1892 // If object is not a string, bail out to regular call.
Steve Block44f0eee2011-05-26 01:26:41 +01001893 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001894
1895 const int argc = arguments().immediate();
1896
1897 Label miss;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001898 Label name_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001899 Label index_out_of_range;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001900 Label* index_out_of_range_label = &index_out_of_range;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001901
Ben Murdochb8e0da22011-05-16 14:20:40 +01001902 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1903 index_out_of_range_label = &miss;
1904 }
1905
1906 GenerateNameCheck(name, &name_miss);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001907
1908 // Check that the maps starting from the prototype haven't changed.
1909 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1910 Context::STRING_FUNCTION_INDEX,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001911 r0,
1912 &miss);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001913 ASSERT(object != holder);
1914 CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1915 r1, r3, r4, name, &miss);
1916
1917 Register receiver = r0;
1918 Register index = r4;
1919 Register scratch1 = r1;
1920 Register scratch2 = r3;
1921 Register result = r0;
1922 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1923 if (argc > 0) {
1924 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1925 } else {
1926 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1927 }
1928
1929 StringCharAtGenerator char_at_generator(receiver,
1930 index,
1931 scratch1,
1932 scratch2,
1933 result,
1934 &miss, // When not a string.
1935 &miss, // When not a number.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001936 index_out_of_range_label,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001937 STRING_INDEX_IS_NUMBER);
1938 char_at_generator.GenerateFast(masm());
1939 __ Drop(argc + 1);
1940 __ Ret();
1941
Ben Murdochb0fe1622011-05-05 13:52:32 +01001942 StubRuntimeCallHelper call_helper;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001943 char_at_generator.GenerateSlow(masm(), call_helper);
1944
Ben Murdochb8e0da22011-05-16 14:20:40 +01001945 if (index_out_of_range.is_linked()) {
1946 __ bind(&index_out_of_range);
1947 __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
1948 __ Drop(argc + 1);
1949 __ Ret();
1950 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001951
1952 __ bind(&miss);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001953 // Restore function name in r2.
1954 __ Move(r2, Handle<String>(name));
1955 __ bind(&name_miss);
Steve Block44f0eee2011-05-26 01:26:41 +01001956 MaybeObject* maybe_result = GenerateMissBranch();
1957 if (maybe_result->IsFailure()) return maybe_result;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001958
1959 // Return the generated code.
1960 return GetCode(function);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001961}
1962
1963
John Reck59135872010-11-02 12:39:01 -07001964MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
Steve Block59151502010-09-22 15:07:15 +01001965 Object* object,
1966 JSObject* holder,
1967 JSGlobalPropertyCell* cell,
1968 JSFunction* function,
1969 String* name) {
1970 // ----------- S t a t e -------------
1971 // -- r2 : function name
1972 // -- lr : return address
1973 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1974 // -- ...
1975 // -- sp[argc * 4] : receiver
1976 // -----------------------------------
1977
1978 const int argc = arguments().immediate();
1979
1980 // If the object is not a JSObject or we got an unexpected number of
1981 // arguments, bail out to the regular call.
Steve Block44f0eee2011-05-26 01:26:41 +01001982 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
Steve Block59151502010-09-22 15:07:15 +01001983
1984 Label miss;
1985 GenerateNameCheck(name, &miss);
1986
1987 if (cell == NULL) {
1988 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1989
1990 STATIC_ASSERT(kSmiTag == 0);
1991 __ tst(r1, Operand(kSmiTagMask));
1992 __ b(eq, &miss);
1993
1994 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
1995 &miss);
1996 } else {
1997 ASSERT(cell->value() == function);
1998 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1999 GenerateLoadFunctionFromCell(cell, function, &miss);
2000 }
2001
2002 // Load the char code argument.
2003 Register code = r1;
2004 __ ldr(code, MemOperand(sp, 0 * kPointerSize));
2005
2006 // Check the code is a smi.
2007 Label slow;
2008 STATIC_ASSERT(kSmiTag == 0);
2009 __ tst(code, Operand(kSmiTagMask));
2010 __ b(ne, &slow);
2011
2012 // Convert the smi code to uint16.
2013 __ and_(code, code, Operand(Smi::FromInt(0xffff)));
2014
2015 StringCharFromCodeGenerator char_from_code_generator(code, r0);
2016 char_from_code_generator.GenerateFast(masm());
2017 __ Drop(argc + 1);
2018 __ Ret();
2019
Ben Murdochb0fe1622011-05-05 13:52:32 +01002020 StubRuntimeCallHelper call_helper;
Steve Block59151502010-09-22 15:07:15 +01002021 char_from_code_generator.GenerateSlow(masm(), call_helper);
2022
2023 // Tail call the full function. We do not have to patch the receiver
2024 // because the function makes no use of it.
2025 __ bind(&slow);
2026 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2027
2028 __ bind(&miss);
2029 // r2: function name.
Steve Block44f0eee2011-05-26 01:26:41 +01002030 MaybeObject* maybe_result = GenerateMissBranch();
2031 if (maybe_result->IsFailure()) return maybe_result;
Steve Block59151502010-09-22 15:07:15 +01002032
2033 // Return the generated code.
2034 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2035}
2036
2037
John Reck59135872010-11-02 12:39:01 -07002038MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2039 JSObject* holder,
2040 JSGlobalPropertyCell* cell,
2041 JSFunction* function,
2042 String* name) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002043 // ----------- S t a t e -------------
2044 // -- r2 : function name
2045 // -- lr : return address
2046 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2047 // -- ...
2048 // -- sp[argc * 4] : receiver
2049 // -----------------------------------
2050
Ben Murdoch8b112d22011-06-08 16:22:53 +01002051 if (!CpuFeatures::IsSupported(VFP3)) {
Steve Block44f0eee2011-05-26 01:26:41 +01002052 return heap()->undefined_value();
2053 }
2054
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002055 CpuFeatures::Scope scope_vfp3(VFP3);
2056
2057 const int argc = arguments().immediate();
2058
2059 // If the object is not a JSObject or we got an unexpected number of
2060 // arguments, bail out to the regular call.
Steve Block44f0eee2011-05-26 01:26:41 +01002061 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002062
2063 Label miss, slow;
2064 GenerateNameCheck(name, &miss);
2065
2066 if (cell == NULL) {
2067 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2068
2069 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01002070 __ JumpIfSmi(r1, &miss);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002071
2072 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2073 &miss);
2074 } else {
2075 ASSERT(cell->value() == function);
2076 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2077 GenerateLoadFunctionFromCell(cell, function, &miss);
2078 }
2079
2080 // Load the (only) argument into r0.
2081 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2082
2083 // If the argument is a smi, just return.
2084 STATIC_ASSERT(kSmiTag == 0);
2085 __ tst(r0, Operand(kSmiTagMask));
2086 __ Drop(argc + 1, eq);
2087 __ Ret(eq);
2088
2089 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
2090
2091 Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
2092
2093 // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
2094 // minus infinity) mode.
2095
2096 // Load the HeapNumber value.
2097 // We will need access to the value in the core registers, so we load it
2098 // with ldrd and move it to the fpu. It also spares a sub instruction for
2099 // updating the HeapNumber value address, as vldr expects a multiple
2100 // of 4 offset.
2101 __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
2102 __ vmov(d1, r4, r5);
2103
2104 // Backup FPSCR.
2105 __ vmrs(r3);
2106 // Set custom FPCSR:
2107 // - Set rounding mode to "Round towards Minus Infinity"
2108 // (ie bits [23:22] = 0b10).
2109 // - Clear vfp cumulative exception flags (bits [3:0]).
2110 // - Make sure Flush-to-zero mode control bit is unset (bit 22).
2111 __ bic(r9, r3,
2112 Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
Steve Block1e0659c2011-05-24 12:43:12 +01002113 __ orr(r9, r9, Operand(kRoundToMinusInf));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002114 __ vmsr(r9);
2115
2116 // Convert the argument to an integer.
Steve Block1e0659c2011-05-24 12:43:12 +01002117 __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002118
2119 // Use vcvt latency to start checking for special cases.
2120 // Get the argument exponent and clear the sign bit.
2121 __ bic(r6, r5, Operand(HeapNumber::kSignMask));
2122 __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
2123
2124 // Retrieve FPSCR and check for vfp exceptions.
2125 __ vmrs(r9);
2126 __ tst(r9, Operand(kVFPExceptionMask));
2127 __ b(&no_vfp_exception, eq);
2128
2129 // Check for NaN, Infinity, and -Infinity.
2130 // They are invariant through a Math.Floor call, so just
2131 // return the original argument.
2132 __ sub(r7, r6, Operand(HeapNumber::kExponentMask
2133 >> HeapNumber::kMantissaBitsInTopWord), SetCC);
2134 __ b(&restore_fpscr_and_return, eq);
2135 // We had an overflow or underflow in the conversion. Check if we
2136 // have a big exponent.
2137 __ cmp(r7, Operand(HeapNumber::kMantissaBits));
2138 // If greater or equal, the argument is already round and in r0.
2139 __ b(&restore_fpscr_and_return, ge);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002140 __ b(&wont_fit_smi);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002141
2142 __ bind(&no_vfp_exception);
2143 // Move the result back to general purpose register r0.
2144 __ vmov(r0, s0);
2145 // Check if the result fits into a smi.
2146 __ add(r1, r0, Operand(0x40000000), SetCC);
2147 __ b(&wont_fit_smi, mi);
2148 // Tag the result.
2149 STATIC_ASSERT(kSmiTag == 0);
2150 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
2151
2152 // Check for -0.
Ben Murdochb8e0da22011-05-16 14:20:40 +01002153 __ cmp(r0, Operand(0, RelocInfo::NONE));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002154 __ b(&restore_fpscr_and_return, ne);
2155 // r5 already holds the HeapNumber exponent.
2156 __ tst(r5, Operand(HeapNumber::kSignMask));
2157 // If our HeapNumber is negative it was -0, so load its address and return.
2158 // Else r0 is loaded with 0, so we can also just return.
2159 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
2160
2161 __ bind(&restore_fpscr_and_return);
2162 // Restore FPSCR and return.
2163 __ vmsr(r3);
2164 __ Drop(argc + 1);
2165 __ Ret();
2166
2167 __ bind(&wont_fit_smi);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002168 // Restore FPCSR and fall to slow case.
2169 __ vmsr(r3);
2170
Ben Murdochb8e0da22011-05-16 14:20:40 +01002171 __ bind(&slow);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002172 // Tail call the full function. We do not have to patch the receiver
2173 // because the function makes no use of it.
2174 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2175
2176 __ bind(&miss);
2177 // r2: function name.
Steve Block44f0eee2011-05-26 01:26:41 +01002178 MaybeObject* maybe_result = GenerateMissBranch();
2179 if (maybe_result->IsFailure()) return maybe_result;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002180
2181 // Return the generated code.
2182 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002183}
2184
2185
John Reck59135872010-11-02 12:39:01 -07002186MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2187 JSObject* holder,
2188 JSGlobalPropertyCell* cell,
2189 JSFunction* function,
2190 String* name) {
Ben Murdochf87a2032010-10-22 12:50:53 +01002191 // ----------- S t a t e -------------
2192 // -- r2 : function name
2193 // -- lr : return address
2194 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2195 // -- ...
2196 // -- sp[argc * 4] : receiver
2197 // -----------------------------------
2198
2199 const int argc = arguments().immediate();
2200
2201 // If the object is not a JSObject or we got an unexpected number of
2202 // arguments, bail out to the regular call.
Steve Block44f0eee2011-05-26 01:26:41 +01002203 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
Ben Murdochf87a2032010-10-22 12:50:53 +01002204
2205 Label miss;
2206 GenerateNameCheck(name, &miss);
2207
2208 if (cell == NULL) {
2209 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2210
2211 STATIC_ASSERT(kSmiTag == 0);
2212 __ tst(r1, Operand(kSmiTagMask));
2213 __ b(eq, &miss);
2214
2215 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2216 &miss);
2217 } else {
2218 ASSERT(cell->value() == function);
2219 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2220 GenerateLoadFunctionFromCell(cell, function, &miss);
2221 }
2222
2223 // Load the (only) argument into r0.
2224 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2225
2226 // Check if the argument is a smi.
2227 Label not_smi;
2228 STATIC_ASSERT(kSmiTag == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01002229 __ JumpIfNotSmi(r0, &not_smi);
Ben Murdochf87a2032010-10-22 12:50:53 +01002230
2231 // Do bitwise not or do nothing depending on the sign of the
2232 // argument.
2233 __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2234
2235 // Add 1 or do nothing depending on the sign of the argument.
2236 __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2237
2238 // If the result is still negative, go to the slow case.
2239 // This only happens for the most negative smi.
2240 Label slow;
2241 __ b(mi, &slow);
2242
2243 // Smi case done.
2244 __ Drop(argc + 1);
2245 __ Ret();
2246
2247 // Check if the argument is a heap number and load its exponent and
2248 // sign.
2249 __ bind(&not_smi);
2250 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
2251 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2252
2253 // Check the sign of the argument. If the argument is positive,
2254 // just return it.
2255 Label negative_sign;
2256 __ tst(r1, Operand(HeapNumber::kSignMask));
2257 __ b(ne, &negative_sign);
2258 __ Drop(argc + 1);
2259 __ Ret();
2260
2261 // If the argument is negative, clear the sign, and return a new
2262 // number.
2263 __ bind(&negative_sign);
2264 __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2265 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2266 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2267 __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2268 __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2269 __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2270 __ Drop(argc + 1);
2271 __ Ret();
2272
2273 // Tail call the full function. We do not have to patch the receiver
2274 // because the function makes no use of it.
2275 __ bind(&slow);
2276 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2277
2278 __ bind(&miss);
2279 // r2: function name.
Steve Block44f0eee2011-05-26 01:26:41 +01002280 MaybeObject* maybe_result = GenerateMissBranch();
2281 if (maybe_result->IsFailure()) return maybe_result;
Ben Murdochf87a2032010-10-22 12:50:53 +01002282
2283 // Return the generated code.
2284 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2285}
2286
2287
Steve Block44f0eee2011-05-26 01:26:41 +01002288MaybeObject* CallStubCompiler::CompileFastApiCall(
2289 const CallOptimization& optimization,
2290 Object* object,
2291 JSObject* holder,
2292 JSGlobalPropertyCell* cell,
2293 JSFunction* function,
2294 String* name) {
2295 Counters* counters = isolate()->counters();
2296
2297 ASSERT(optimization.is_simple_api_call());
2298 // Bail out if object is a global object as we don't want to
2299 // repatch it to global receiver.
2300 if (object->IsGlobalObject()) return heap()->undefined_value();
2301 if (cell != NULL) return heap()->undefined_value();
2302 int depth = optimization.GetPrototypeDepthOfExpectedType(
2303 JSObject::cast(object), holder);
2304 if (depth == kInvalidProtoDepth) return heap()->undefined_value();
2305
2306 Label miss, miss_before_stack_reserved;
2307
2308 GenerateNameCheck(name, &miss_before_stack_reserved);
2309
2310 // Get the receiver from the stack.
2311 const int argc = arguments().immediate();
2312 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2313
2314 // Check that the receiver isn't a smi.
2315 __ tst(r1, Operand(kSmiTagMask));
2316 __ b(eq, &miss_before_stack_reserved);
2317
2318 __ IncrementCounter(counters->call_const(), 1, r0, r3);
2319 __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2320
2321 ReserveSpaceForFastApiCall(masm(), r0);
2322
2323 // Check that the maps haven't changed and find a Holder as a side effect.
2324 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2325 depth, &miss);
2326
2327 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2328 if (result->IsFailure()) return result;
2329
2330 __ bind(&miss);
2331 FreeSpaceForFastApiCall(masm());
2332
2333 __ bind(&miss_before_stack_reserved);
2334 MaybeObject* maybe_result = GenerateMissBranch();
2335 if (maybe_result->IsFailure()) return maybe_result;
2336
2337 // Return the generated code.
2338 return GetCode(function);
2339}
2340
2341
John Reck59135872010-11-02 12:39:01 -07002342MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2343 JSObject* holder,
2344 JSFunction* function,
2345 String* name,
2346 CheckType check) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002347 // ----------- S t a t e -------------
Andrei Popescu402d9372010-02-26 13:31:12 +00002348 // -- r2 : name
2349 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002350 // -----------------------------------
Steve Block44f0eee2011-05-26 01:26:41 +01002351 if (HasCustomCallGenerator(function)) {
John Reck59135872010-11-02 12:39:01 -07002352 MaybeObject* maybe_result = CompileCustomCall(
Steve Block44f0eee2011-05-26 01:26:41 +01002353 object, holder, NULL, function, name);
John Reck59135872010-11-02 12:39:01 -07002354 Object* result;
2355 if (!maybe_result->ToObject(&result)) return maybe_result;
Steve Block6ded16b2010-05-10 14:33:55 +01002356 // undefined means bail out to regular compiler.
Steve Block44f0eee2011-05-26 01:26:41 +01002357 if (!result->IsUndefined()) return result;
Steve Block6ded16b2010-05-10 14:33:55 +01002358 }
2359
Steve Block44f0eee2011-05-26 01:26:41 +01002360 Label miss;
Steve Blocka7e24c12009-10-30 11:49:00 +00002361
Steve Block44f0eee2011-05-26 01:26:41 +01002362 GenerateNameCheck(name, &miss);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002363
Steve Blocka7e24c12009-10-30 11:49:00 +00002364 // Get the receiver from the stack
2365 const int argc = arguments().immediate();
2366 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2367
2368 // Check that the receiver isn't a smi.
2369 if (check != NUMBER_CHECK) {
2370 __ tst(r1, Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01002371 __ b(eq, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002372 }
2373
2374 // Make sure that it's okay not to patch the on stack receiver
2375 // unless we're doing a receiver map check.
2376 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2377
Steve Block44f0eee2011-05-26 01:26:41 +01002378 SharedFunctionInfo* function_info = function->shared();
Steve Blocka7e24c12009-10-30 11:49:00 +00002379 switch (check) {
2380 case RECEIVER_MAP_CHECK:
Steve Block44f0eee2011-05-26 01:26:41 +01002381 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2382 1, r0, r3);
Steve Block6ded16b2010-05-10 14:33:55 +01002383
Steve Blocka7e24c12009-10-30 11:49:00 +00002384 // Check that the maps haven't changed.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002385 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
Steve Block44f0eee2011-05-26 01:26:41 +01002386 &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002387
2388 // Patch the receiver on the stack with the global proxy if
2389 // necessary.
2390 if (object->IsGlobalObject()) {
2391 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2392 __ str(r3, MemOperand(sp, argc * kPointerSize));
2393 }
2394 break;
2395
2396 case STRING_CHECK:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002397 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2398 // Calling non-strict non-builtins with a value as the receiver
2399 // requires boxing.
Leon Clarkee46be812010-01-19 14:06:41 +00002400 __ jmp(&miss);
2401 } else {
2402 // Check that the object is a two-byte string or a symbol.
Andrei Popescu402d9372010-02-26 13:31:12 +00002403 __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
Leon Clarkee46be812010-01-19 14:06:41 +00002404 __ b(hs, &miss);
2405 // Check that the maps starting from the prototype haven't changed.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002406 GenerateDirectLoadGlobalFunctionPrototype(
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002407 masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
Andrei Popescu402d9372010-02-26 13:31:12 +00002408 CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002409 r1, r4, name, &miss);
Leon Clarkee46be812010-01-19 14:06:41 +00002410 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002411 break;
2412
2413 case NUMBER_CHECK: {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002414 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2415 // Calling non-strict non-builtins with a value as the receiver
2416 // requires boxing.
Leon Clarkee46be812010-01-19 14:06:41 +00002417 __ jmp(&miss);
2418 } else {
2419 Label fast;
2420 // Check that the object is a smi or a heap number.
2421 __ tst(r1, Operand(kSmiTagMask));
2422 __ b(eq, &fast);
Andrei Popescu402d9372010-02-26 13:31:12 +00002423 __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
Leon Clarkee46be812010-01-19 14:06:41 +00002424 __ b(ne, &miss);
2425 __ bind(&fast);
2426 // Check that the maps starting from the prototype haven't changed.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002427 GenerateDirectLoadGlobalFunctionPrototype(
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002428 masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
Andrei Popescu402d9372010-02-26 13:31:12 +00002429 CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002430 r1, r4, name, &miss);
Leon Clarkee46be812010-01-19 14:06:41 +00002431 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002432 break;
2433 }
2434
2435 case BOOLEAN_CHECK: {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002436 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2437 // Calling non-strict non-builtins with a value as the receiver
2438 // requires boxing.
Leon Clarkee46be812010-01-19 14:06:41 +00002439 __ jmp(&miss);
2440 } else {
2441 Label fast;
2442 // Check that the object is a boolean.
2443 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2444 __ cmp(r1, ip);
2445 __ b(eq, &fast);
2446 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2447 __ cmp(r1, ip);
2448 __ b(ne, &miss);
2449 __ bind(&fast);
2450 // Check that the maps starting from the prototype haven't changed.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002451 GenerateDirectLoadGlobalFunctionPrototype(
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002452 masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
Andrei Popescu402d9372010-02-26 13:31:12 +00002453 CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002454 r1, r4, name, &miss);
Leon Clarkee46be812010-01-19 14:06:41 +00002455 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002456 break;
2457 }
2458
Steve Blocka7e24c12009-10-30 11:49:00 +00002459 default:
2460 UNREACHABLE();
2461 }
2462
Steve Block44f0eee2011-05-26 01:26:41 +01002463 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00002464
2465 // Handle call cache miss.
2466 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002467 MaybeObject* maybe_result = GenerateMissBranch();
2468 if (maybe_result->IsFailure()) return maybe_result;
Steve Blocka7e24c12009-10-30 11:49:00 +00002469
2470 // Return the generated code.
Kristian Monsen25f61362010-05-21 11:50:48 +01002471 return GetCode(function);
Steve Blocka7e24c12009-10-30 11:49:00 +00002472}
2473
2474
John Reck59135872010-11-02 12:39:01 -07002475MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2476 JSObject* holder,
2477 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002478 // ----------- S t a t e -------------
Andrei Popescu402d9372010-02-26 13:31:12 +00002479 // -- r2 : name
2480 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002481 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00002482
Steve Block6ded16b2010-05-10 14:33:55 +01002483 Label miss;
Andrei Popescu402d9372010-02-26 13:31:12 +00002484
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002485 GenerateNameCheck(name, &miss);
2486
Leon Clarke4515c472010-02-03 11:58:03 +00002487 // Get the number of arguments.
2488 const int argc = arguments().immediate();
2489
2490 LookupResult lookup;
2491 LookupPostInterceptor(holder, name, &lookup);
2492
Steve Block6ded16b2010-05-10 14:33:55 +01002493 // Get the receiver from the stack.
2494 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
Leon Clarke4515c472010-02-03 11:58:03 +00002495
Steve Block6ded16b2010-05-10 14:33:55 +01002496 CallInterceptorCompiler compiler(this, arguments(), r2);
Steve Block1e0659c2011-05-24 12:43:12 +01002497 MaybeObject* result = compiler.Compile(masm(),
2498 object,
2499 holder,
2500 name,
2501 &lookup,
2502 r1,
2503 r3,
2504 r4,
2505 r0,
2506 &miss);
2507 if (result->IsFailure()) {
2508 return result;
2509 }
Andrei Popescu402d9372010-02-26 13:31:12 +00002510
2511 // Move returned value, the function to call, to r1.
2512 __ mov(r1, r0);
Leon Clarke4515c472010-02-03 11:58:03 +00002513 // Restore receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01002514 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
Leon Clarke4515c472010-02-03 11:58:03 +00002515
2516 GenerateCallFunction(masm(), object, arguments(), &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002517
2518 // Handle call cache miss.
2519 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002520 MaybeObject* maybe_result = GenerateMissBranch();
2521 if (maybe_result->IsFailure()) return maybe_result;
Steve Blocka7e24c12009-10-30 11:49:00 +00002522
2523 // Return the generated code.
2524 return GetCode(INTERCEPTOR, name);
2525}
2526
2527
John Reck59135872010-11-02 12:39:01 -07002528MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2529 GlobalObject* holder,
2530 JSGlobalPropertyCell* cell,
2531 JSFunction* function,
2532 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002533 // ----------- S t a t e -------------
Andrei Popescu402d9372010-02-26 13:31:12 +00002534 // -- r2 : name
2535 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002536 // -----------------------------------
Steve Block59151502010-09-22 15:07:15 +01002537
Steve Block44f0eee2011-05-26 01:26:41 +01002538 if (HasCustomCallGenerator(function)) {
John Reck59135872010-11-02 12:39:01 -07002539 MaybeObject* maybe_result = CompileCustomCall(
Steve Block44f0eee2011-05-26 01:26:41 +01002540 object, holder, cell, function, name);
John Reck59135872010-11-02 12:39:01 -07002541 Object* result;
2542 if (!maybe_result->ToObject(&result)) return maybe_result;
Steve Block59151502010-09-22 15:07:15 +01002543 // undefined means bail out to regular compiler.
2544 if (!result->IsUndefined()) return result;
2545 }
2546
Steve Blocka7e24c12009-10-30 11:49:00 +00002547 Label miss;
2548
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002549 GenerateNameCheck(name, &miss);
2550
Steve Blocka7e24c12009-10-30 11:49:00 +00002551 // Get the number of arguments.
2552 const int argc = arguments().immediate();
2553
Steve Block59151502010-09-22 15:07:15 +01002554 GenerateGlobalReceiverCheck(object, holder, name, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002555
Steve Block59151502010-09-22 15:07:15 +01002556 GenerateLoadFunctionFromCell(cell, function, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002557
2558 // Patch the receiver on the stack with the global proxy if
2559 // necessary.
2560 if (object->IsGlobalObject()) {
2561 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2562 __ str(r3, MemOperand(sp, argc * kPointerSize));
2563 }
2564
2565 // Setup the context (function already in r1).
2566 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2567
2568 // Jump to the cached code (tail call).
Steve Block44f0eee2011-05-26 01:26:41 +01002569 Counters* counters = masm()->isolate()->counters();
2570 __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002571 ASSERT(function->is_compiled());
2572 Handle<Code> code(function->code());
2573 ParameterCount expected(function->shared()->formal_parameter_count());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002574 if (V8::UseCrankshaft()) {
2575 // TODO(kasperl): For now, we always call indirectly through the
2576 // code field in the function to allow recompilation to take effect
2577 // without changing any of the call sites.
2578 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2579 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION);
2580 } else {
2581 __ InvokeCode(code, expected, arguments(),
2582 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2583 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002584
2585 // Handle call cache miss.
2586 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002587 __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2588 MaybeObject* maybe_result = GenerateMissBranch();
2589 if (maybe_result->IsFailure()) return maybe_result;
Steve Blocka7e24c12009-10-30 11:49:00 +00002590
2591 // Return the generated code.
2592 return GetCode(NORMAL, name);
2593}
2594
2595
John Reck59135872010-11-02 12:39:01 -07002596MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2597 int index,
2598 Map* transition,
2599 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002600 // ----------- S t a t e -------------
2601 // -- r0 : value
Andrei Popescu402d9372010-02-26 13:31:12 +00002602 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002603 // -- r2 : name
2604 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002605 // -----------------------------------
2606 Label miss;
2607
Steve Blocka7e24c12009-10-30 11:49:00 +00002608 GenerateStoreField(masm(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002609 object,
2610 index,
2611 transition,
Andrei Popescu402d9372010-02-26 13:31:12 +00002612 r1, r2, r3,
Steve Blocka7e24c12009-10-30 11:49:00 +00002613 &miss);
2614 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002615 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
Steve Blocka7e24c12009-10-30 11:49:00 +00002616 __ Jump(ic, RelocInfo::CODE_TARGET);
2617
2618 // Return the generated code.
2619 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2620}
2621
2622
John Reck59135872010-11-02 12:39:01 -07002623MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2624 AccessorInfo* callback,
2625 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002626 // ----------- S t a t e -------------
2627 // -- r0 : value
Andrei Popescu402d9372010-02-26 13:31:12 +00002628 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002629 // -- r2 : name
2630 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002631 // -----------------------------------
2632 Label miss;
2633
Steve Blocka7e24c12009-10-30 11:49:00 +00002634 // Check that the object isn't a smi.
Andrei Popescu402d9372010-02-26 13:31:12 +00002635 __ tst(r1, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002636 __ b(eq, &miss);
2637
2638 // Check that the map of the object hasn't changed.
Andrei Popescu402d9372010-02-26 13:31:12 +00002639 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2640 __ cmp(r3, Operand(Handle<Map>(object->map())));
Steve Blocka7e24c12009-10-30 11:49:00 +00002641 __ b(ne, &miss);
2642
2643 // Perform global security token check if needed.
2644 if (object->IsJSGlobalProxy()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002645 __ CheckAccessGlobalProxy(r1, r3, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002646 }
2647
2648 // Stub never generated for non-global objects that require access
2649 // checks.
2650 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2651
Andrei Popescu402d9372010-02-26 13:31:12 +00002652 __ push(r1); // receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002653 __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback info
Steve Block6ded16b2010-05-10 14:33:55 +01002654 __ Push(ip, r2, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002655
2656 // Do tail-call to the runtime system.
2657 ExternalReference store_callback_property =
Steve Block44f0eee2011-05-26 01:26:41 +01002658 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2659 masm()->isolate());
Steve Block6ded16b2010-05-10 14:33:55 +01002660 __ TailCallExternalReference(store_callback_property, 4, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002661
2662 // Handle store cache miss.
2663 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002664 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
Steve Blocka7e24c12009-10-30 11:49:00 +00002665 __ Jump(ic, RelocInfo::CODE_TARGET);
2666
2667 // Return the generated code.
2668 return GetCode(CALLBACKS, name);
2669}
2670
2671
John Reck59135872010-11-02 12:39:01 -07002672MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2673 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002674 // ----------- S t a t e -------------
2675 // -- r0 : value
Andrei Popescu402d9372010-02-26 13:31:12 +00002676 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002677 // -- r2 : name
2678 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002679 // -----------------------------------
2680 Label miss;
2681
Steve Blocka7e24c12009-10-30 11:49:00 +00002682 // Check that the object isn't a smi.
Andrei Popescu402d9372010-02-26 13:31:12 +00002683 __ tst(r1, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002684 __ b(eq, &miss);
2685
2686 // Check that the map of the object hasn't changed.
Andrei Popescu402d9372010-02-26 13:31:12 +00002687 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2688 __ cmp(r3, Operand(Handle<Map>(receiver->map())));
Steve Blocka7e24c12009-10-30 11:49:00 +00002689 __ b(ne, &miss);
2690
2691 // Perform global security token check if needed.
2692 if (receiver->IsJSGlobalProxy()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00002693 __ CheckAccessGlobalProxy(r1, r3, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002694 }
2695
Andrei Popescu402d9372010-02-26 13:31:12 +00002696 // Stub is never generated for non-global objects that require access
Steve Blocka7e24c12009-10-30 11:49:00 +00002697 // checks.
2698 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2699
Steve Block6ded16b2010-05-10 14:33:55 +01002700 __ Push(r1, r2, r0); // Receiver, name, value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002701
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002702 __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2703 __ push(r0); // strict mode
2704
Steve Blocka7e24c12009-10-30 11:49:00 +00002705 // Do tail-call to the runtime system.
2706 ExternalReference store_ic_property =
Steve Block44f0eee2011-05-26 01:26:41 +01002707 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2708 masm()->isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002709 __ TailCallExternalReference(store_ic_property, 4, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002710
2711 // Handle store cache miss.
2712 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002713 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
Steve Blocka7e24c12009-10-30 11:49:00 +00002714 __ Jump(ic, RelocInfo::CODE_TARGET);
2715
2716 // Return the generated code.
2717 return GetCode(INTERCEPTOR, name);
2718}
2719
2720
John Reck59135872010-11-02 12:39:01 -07002721MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2722 JSGlobalPropertyCell* cell,
2723 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002724 // ----------- S t a t e -------------
2725 // -- r0 : value
Andrei Popescu402d9372010-02-26 13:31:12 +00002726 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002727 // -- r2 : name
2728 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002729 // -----------------------------------
2730 Label miss;
2731
2732 // Check that the map of the global has not changed.
Steve Blocka7e24c12009-10-30 11:49:00 +00002733 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2734 __ cmp(r3, Operand(Handle<Map>(object->map())));
2735 __ b(ne, &miss);
2736
Steve Block1e0659c2011-05-24 12:43:12 +01002737 // Check that the value in the cell is not the hole. If it is, this
2738 // cell could have been deleted and reintroducing the global needs
2739 // to update the property details in the property dictionary of the
2740 // global object. We bail out to the runtime system to do that.
2741 __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell)));
2742 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2743 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2744 __ cmp(r5, r6);
2745 __ b(eq, &miss);
2746
Steve Blocka7e24c12009-10-30 11:49:00 +00002747 // Store the value in the cell.
Steve Block1e0659c2011-05-24 12:43:12 +01002748 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002749
Steve Block44f0eee2011-05-26 01:26:41 +01002750 Counters* counters = masm()->isolate()->counters();
2751 __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002752 __ Ret();
2753
2754 // Handle store cache miss.
2755 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002756 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2757 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
Steve Blocka7e24c12009-10-30 11:49:00 +00002758 __ Jump(ic, RelocInfo::CODE_TARGET);
2759
2760 // Return the generated code.
2761 return GetCode(NORMAL, name);
2762}
2763
2764
John Reck59135872010-11-02 12:39:01 -07002765MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2766 JSObject* object,
2767 JSObject* last) {
Steve Block6ded16b2010-05-10 14:33:55 +01002768 // ----------- S t a t e -------------
Leon Clarkef7060e22010-06-03 12:02:55 +01002769 // -- r0 : receiver
Steve Block6ded16b2010-05-10 14:33:55 +01002770 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01002771 // -----------------------------------
2772 Label miss;
2773
Steve Block6ded16b2010-05-10 14:33:55 +01002774 // Check that receiver is not a smi.
2775 __ tst(r0, Operand(kSmiTagMask));
2776 __ b(eq, &miss);
2777
2778 // Check the maps of the full prototype chain.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002779 CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
Steve Block6ded16b2010-05-10 14:33:55 +01002780
2781 // If the last object in the prototype chain is a global object,
2782 // check that the global property cell is empty.
2783 if (last->IsGlobalObject()) {
John Reck59135872010-11-02 12:39:01 -07002784 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2785 GlobalObject::cast(last),
2786 name,
2787 r1,
2788 &miss);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002789 if (cell->IsFailure()) {
2790 miss.Unuse();
2791 return cell;
2792 }
Steve Block6ded16b2010-05-10 14:33:55 +01002793 }
2794
2795 // Return undefined if maps of the full prototype chain are still the
2796 // same and no global property with this name contains a value.
2797 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2798 __ Ret();
2799
2800 __ bind(&miss);
2801 GenerateLoadMiss(masm(), Code::LOAD_IC);
2802
2803 // Return the generated code.
Steve Block44f0eee2011-05-26 01:26:41 +01002804 return GetCode(NONEXISTENT, heap()->empty_string());
Steve Block6ded16b2010-05-10 14:33:55 +01002805}
2806
2807
John Reck59135872010-11-02 12:39:01 -07002808MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2809 JSObject* holder,
2810 int index,
2811 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002812 // ----------- S t a t e -------------
Leon Clarkef7060e22010-06-03 12:02:55 +01002813 // -- r0 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002814 // -- r2 : name
2815 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002816 // -----------------------------------
2817 Label miss;
2818
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002819 GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002820 __ bind(&miss);
2821 GenerateLoadMiss(masm(), Code::LOAD_IC);
2822
2823 // Return the generated code.
2824 return GetCode(FIELD, name);
2825}
2826
2827
John Reck59135872010-11-02 12:39:01 -07002828MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2829 JSObject* object,
2830 JSObject* holder,
2831 AccessorInfo* callback) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002832 // ----------- S t a t e -------------
Leon Clarkef7060e22010-06-03 12:02:55 +01002833 // -- r0 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002834 // -- r2 : name
2835 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002836 // -----------------------------------
2837 Label miss;
2838
Steve Block1e0659c2011-05-24 12:43:12 +01002839 MaybeObject* result = GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4,
2840 callback, name, &miss);
2841 if (result->IsFailure()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002842 miss.Unuse();
Steve Block1e0659c2011-05-24 12:43:12 +01002843 return result;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002844 }
Leon Clarkee46be812010-01-19 14:06:41 +00002845
Steve Blocka7e24c12009-10-30 11:49:00 +00002846 __ bind(&miss);
2847 GenerateLoadMiss(masm(), Code::LOAD_IC);
2848
2849 // Return the generated code.
2850 return GetCode(CALLBACKS, name);
2851}
2852
2853
John Reck59135872010-11-02 12:39:01 -07002854MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2855 JSObject* holder,
2856 Object* value,
2857 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002858 // ----------- S t a t e -------------
Leon Clarkef7060e22010-06-03 12:02:55 +01002859 // -- r0 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002860 // -- r2 : name
2861 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002862 // -----------------------------------
2863 Label miss;
2864
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002865 GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002866 __ bind(&miss);
2867 GenerateLoadMiss(masm(), Code::LOAD_IC);
2868
2869 // Return the generated code.
2870 return GetCode(CONSTANT_FUNCTION, name);
2871}
2872
2873
John Reck59135872010-11-02 12:39:01 -07002874MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
2875 JSObject* holder,
2876 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002877 // ----------- S t a t e -------------
Leon Clarkef7060e22010-06-03 12:02:55 +01002878 // -- r0 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002879 // -- r2 : name
2880 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002881 // -----------------------------------
2882 Label miss;
2883
Steve Blocka7e24c12009-10-30 11:49:00 +00002884 LookupResult lookup;
Leon Clarke4515c472010-02-03 11:58:03 +00002885 LookupPostInterceptor(holder, name, &lookup);
Steve Blocka7e24c12009-10-30 11:49:00 +00002886 GenerateLoadInterceptor(object,
2887 holder,
2888 &lookup,
2889 r0,
2890 r2,
2891 r3,
2892 r1,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002893 r4,
Steve Blocka7e24c12009-10-30 11:49:00 +00002894 name,
2895 &miss);
2896 __ bind(&miss);
2897 GenerateLoadMiss(masm(), Code::LOAD_IC);
2898
2899 // Return the generated code.
2900 return GetCode(INTERCEPTOR, name);
2901}
2902
2903
John Reck59135872010-11-02 12:39:01 -07002904MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2905 GlobalObject* holder,
2906 JSGlobalPropertyCell* cell,
2907 String* name,
2908 bool is_dont_delete) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002909 // ----------- S t a t e -------------
Leon Clarkef7060e22010-06-03 12:02:55 +01002910 // -- r0 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002911 // -- r2 : name
2912 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00002913 // -----------------------------------
2914 Label miss;
2915
Steve Blocka7e24c12009-10-30 11:49:00 +00002916 // If the object is the holder then we know that it's a global
2917 // object which can only happen for contextual calls. In this case,
2918 // the receiver cannot be a smi.
2919 if (object != holder) {
Steve Block6ded16b2010-05-10 14:33:55 +01002920 __ tst(r0, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002921 __ b(eq, &miss);
2922 }
2923
2924 // Check that the map of the global has not changed.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002925 CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002926
2927 // Get the value from the cell.
2928 __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
Steve Block6ded16b2010-05-10 14:33:55 +01002929 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002930
2931 // Check for deleted property if property can actually be deleted.
2932 if (!is_dont_delete) {
2933 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01002934 __ cmp(r4, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00002935 __ b(eq, &miss);
2936 }
2937
Steve Block6ded16b2010-05-10 14:33:55 +01002938 __ mov(r0, r4);
Steve Block44f0eee2011-05-26 01:26:41 +01002939 Counters* counters = masm()->isolate()->counters();
2940 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002941 __ Ret();
2942
2943 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01002944 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002945 GenerateLoadMiss(masm(), Code::LOAD_IC);
2946
2947 // Return the generated code.
2948 return GetCode(NORMAL, name);
2949}
2950
2951
John Reck59135872010-11-02 12:39:01 -07002952MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2953 JSObject* receiver,
2954 JSObject* holder,
2955 int index) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002956 // ----------- S t a t e -------------
2957 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01002958 // -- r0 : key
Kristian Monsen25f61362010-05-21 11:50:48 +01002959 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002960 // -----------------------------------
2961 Label miss;
2962
Steve Block6ded16b2010-05-10 14:33:55 +01002963 // Check the key is the cached one.
2964 __ cmp(r0, Operand(Handle<String>(name)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002965 __ b(ne, &miss);
2966
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002967 GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00002968 __ bind(&miss);
2969 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2970
2971 return GetCode(FIELD, name);
2972}
2973
2974
John Reck59135872010-11-02 12:39:01 -07002975MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2976 String* name,
2977 JSObject* receiver,
2978 JSObject* holder,
2979 AccessorInfo* callback) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002980 // ----------- S t a t e -------------
2981 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01002982 // -- r0 : key
Kristian Monsen25f61362010-05-21 11:50:48 +01002983 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00002984 // -----------------------------------
2985 Label miss;
2986
Steve Block6ded16b2010-05-10 14:33:55 +01002987 // Check the key is the cached one.
2988 __ cmp(r0, Operand(Handle<String>(name)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002989 __ b(ne, &miss);
2990
Steve Block1e0659c2011-05-24 12:43:12 +01002991 MaybeObject* result = GenerateLoadCallback(receiver, holder, r1, r0, r2, r3,
2992 r4, callback, name, &miss);
2993 if (result->IsFailure()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002994 miss.Unuse();
Steve Block1e0659c2011-05-24 12:43:12 +01002995 return result;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002996 }
Leon Clarkee46be812010-01-19 14:06:41 +00002997
Steve Blocka7e24c12009-10-30 11:49:00 +00002998 __ bind(&miss);
2999 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3000
3001 return GetCode(CALLBACKS, name);
3002}
3003
3004
John Reck59135872010-11-02 12:39:01 -07003005MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
3006 JSObject* receiver,
3007 JSObject* holder,
3008 Object* value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003009 // ----------- S t a t e -------------
3010 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01003011 // -- r0 : key
Kristian Monsen25f61362010-05-21 11:50:48 +01003012 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003013 // -----------------------------------
3014 Label miss;
3015
Steve Block6ded16b2010-05-10 14:33:55 +01003016 // Check the key is the cached one.
3017 __ cmp(r0, Operand(Handle<String>(name)));
Steve Blocka7e24c12009-10-30 11:49:00 +00003018 __ b(ne, &miss);
3019
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003020 GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00003021 __ bind(&miss);
3022 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3023
3024 // Return the generated code.
3025 return GetCode(CONSTANT_FUNCTION, name);
3026}
3027
3028
John Reck59135872010-11-02 12:39:01 -07003029MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
3030 JSObject* holder,
3031 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003032 // ----------- S t a t e -------------
3033 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01003034 // -- r0 : key
Kristian Monsen25f61362010-05-21 11:50:48 +01003035 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003036 // -----------------------------------
3037 Label miss;
3038
Steve Block6ded16b2010-05-10 14:33:55 +01003039 // Check the key is the cached one.
3040 __ cmp(r0, Operand(Handle<String>(name)));
Steve Blocka7e24c12009-10-30 11:49:00 +00003041 __ b(ne, &miss);
3042
3043 LookupResult lookup;
Leon Clarke4515c472010-02-03 11:58:03 +00003044 LookupPostInterceptor(holder, name, &lookup);
Steve Blocka7e24c12009-10-30 11:49:00 +00003045 GenerateLoadInterceptor(receiver,
3046 holder,
3047 &lookup,
Steve Block6ded16b2010-05-10 14:33:55 +01003048 r1,
Steve Blocka7e24c12009-10-30 11:49:00 +00003049 r0,
3050 r2,
3051 r3,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003052 r4,
Steve Blocka7e24c12009-10-30 11:49:00 +00003053 name,
3054 &miss);
3055 __ bind(&miss);
3056 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3057
3058 return GetCode(INTERCEPTOR, name);
3059}
3060
3061
John Reck59135872010-11-02 12:39:01 -07003062MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003063 // ----------- S t a t e -------------
3064 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01003065 // -- r0 : key
Kristian Monsen25f61362010-05-21 11:50:48 +01003066 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003067 // -----------------------------------
3068 Label miss;
3069
Steve Block6ded16b2010-05-10 14:33:55 +01003070 // Check the key is the cached one.
3071 __ cmp(r0, Operand(Handle<String>(name)));
Steve Blocka7e24c12009-10-30 11:49:00 +00003072 __ b(ne, &miss);
3073
Steve Block6ded16b2010-05-10 14:33:55 +01003074 GenerateLoadArrayLength(masm(), r1, r2, &miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00003075 __ bind(&miss);
3076 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3077
3078 return GetCode(CALLBACKS, name);
3079}
3080
3081
John Reck59135872010-11-02 12:39:01 -07003082MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003083 // ----------- S t a t e -------------
3084 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01003085 // -- r0 : key
Kristian Monsen25f61362010-05-21 11:50:48 +01003086 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003087 // -----------------------------------
3088 Label miss;
Steve Block44f0eee2011-05-26 01:26:41 +01003089
3090 Counters* counters = masm()->isolate()->counters();
3091 __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003092
Steve Block6ded16b2010-05-10 14:33:55 +01003093 // Check the key is the cached one.
3094 __ cmp(r0, Operand(Handle<String>(name)));
Steve Blocka7e24c12009-10-30 11:49:00 +00003095 __ b(ne, &miss);
3096
Steve Block1e0659c2011-05-24 12:43:12 +01003097 GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003098 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01003099 __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003100
3101 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3102
3103 return GetCode(CALLBACKS, name);
3104}
3105
3106
John Reck59135872010-11-02 12:39:01 -07003107MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003108 // ----------- S t a t e -------------
3109 // -- lr : return address
Steve Block6ded16b2010-05-10 14:33:55 +01003110 // -- r0 : key
Kristian Monsen25f61362010-05-21 11:50:48 +01003111 // -- r1 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003112 // -----------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003113 Label miss;
3114
Steve Block44f0eee2011-05-26 01:26:41 +01003115 Counters* counters = masm()->isolate()->counters();
3116 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003117
3118 // Check the name hasn't changed.
3119 __ cmp(r0, Operand(Handle<String>(name)));
3120 __ b(ne, &miss);
3121
3122 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
3123 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01003124 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003125 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3126
3127 return GetCode(CALLBACKS, name);
3128}
3129
3130
Ben Murdochb0fe1622011-05-05 13:52:32 +01003131MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
3132 // ----------- S t a t e -------------
3133 // -- lr : return address
3134 // -- r0 : key
3135 // -- r1 : receiver
3136 // -----------------------------------
3137 Label miss;
3138
3139 // Check that the receiver isn't a smi.
3140 __ tst(r1, Operand(kSmiTagMask));
3141 __ b(eq, &miss);
3142
3143 // Check that the map matches.
3144 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3145 __ cmp(r2, Operand(Handle<Map>(receiver->map())));
3146 __ b(ne, &miss);
3147
3148 // Check that the key is a smi.
3149 __ tst(r0, Operand(kSmiTagMask));
3150 __ b(ne, &miss);
3151
3152 // Get the elements array.
3153 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
3154 __ AssertFastElements(r2);
3155
3156 // Check that the key is within bounds.
3157 __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
3158 __ cmp(r0, Operand(r3));
3159 __ b(hs, &miss);
3160
3161 // Load the result and make sure it's not the hole.
3162 __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3163 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3164 __ ldr(r4,
3165 MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
3166 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3167 __ cmp(r4, ip);
3168 __ b(eq, &miss);
3169 __ mov(r0, r4);
3170 __ Ret();
3171
3172 __ bind(&miss);
3173 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3174
3175 // Return the generated code.
3176 return GetCode(NORMAL, NULL);
3177}
3178
3179
John Reck59135872010-11-02 12:39:01 -07003180MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3181 int index,
3182 Map* transition,
3183 String* name) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003184 // ----------- S t a t e -------------
3185 // -- r0 : value
Ben Murdochb0fe1622011-05-05 13:52:32 +01003186 // -- r1 : name
Leon Clarkef7060e22010-06-03 12:02:55 +01003187 // -- r2 : receiver
Steve Blocka7e24c12009-10-30 11:49:00 +00003188 // -- lr : return address
Steve Blocka7e24c12009-10-30 11:49:00 +00003189 // -----------------------------------
3190 Label miss;
3191
Steve Block44f0eee2011-05-26 01:26:41 +01003192 Counters* counters = masm()->isolate()->counters();
3193 __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003194
3195 // Check that the name has not changed.
Leon Clarkef7060e22010-06-03 12:02:55 +01003196 __ cmp(r1, Operand(Handle<String>(name)));
Steve Blocka7e24c12009-10-30 11:49:00 +00003197 __ b(ne, &miss);
3198
Leon Clarkef7060e22010-06-03 12:02:55 +01003199 // r3 is used as scratch register. r1 and r2 keep their values if a jump to
3200 // the miss label is generated.
Steve Blocka7e24c12009-10-30 11:49:00 +00003201 GenerateStoreField(masm(),
Steve Blocka7e24c12009-10-30 11:49:00 +00003202 object,
3203 index,
3204 transition,
Leon Clarkef7060e22010-06-03 12:02:55 +01003205 r2, r1, r3,
Steve Blocka7e24c12009-10-30 11:49:00 +00003206 &miss);
3207 __ bind(&miss);
3208
Steve Block44f0eee2011-05-26 01:26:41 +01003209 __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3210 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
Steve Blocka7e24c12009-10-30 11:49:00 +00003211 __ Jump(ic, RelocInfo::CODE_TARGET);
3212
3213 // Return the generated code.
3214 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
3215}
3216
3217
Ben Murdochb0fe1622011-05-05 13:52:32 +01003218MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
3219 JSObject* receiver) {
3220 // ----------- S t a t e -------------
3221 // -- r0 : value
3222 // -- r1 : key
3223 // -- r2 : receiver
3224 // -- lr : return address
3225 // -- r3 : scratch
3226 // -- r4 : scratch (elements)
3227 // -----------------------------------
3228 Label miss;
3229
3230 Register value_reg = r0;
3231 Register key_reg = r1;
3232 Register receiver_reg = r2;
3233 Register scratch = r3;
3234 Register elements_reg = r4;
3235
3236 // Check that the receiver isn't a smi.
3237 __ tst(receiver_reg, Operand(kSmiTagMask));
3238 __ b(eq, &miss);
3239
3240 // Check that the map matches.
3241 __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
3242 __ cmp(scratch, Operand(Handle<Map>(receiver->map())));
3243 __ b(ne, &miss);
3244
3245 // Check that the key is a smi.
3246 __ tst(key_reg, Operand(kSmiTagMask));
3247 __ b(ne, &miss);
3248
3249 // Get the elements array and make sure it is a fast element array, not 'cow'.
3250 __ ldr(elements_reg,
3251 FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
3252 __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003253 __ cmp(scratch, Operand(Handle<Map>(factory()->fixed_array_map())));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003254 __ b(ne, &miss);
3255
3256 // Check that the key is within bounds.
3257 if (receiver->IsJSArray()) {
3258 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
3259 } else {
3260 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
3261 }
3262 // Compare smis.
3263 __ cmp(key_reg, scratch);
3264 __ b(hs, &miss);
3265
3266 __ add(scratch,
3267 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3268 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3269 __ str(value_reg,
3270 MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
3271 __ RecordWrite(scratch,
3272 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
3273 receiver_reg , elements_reg);
3274
3275 // value_reg (r0) is preserved.
3276 // Done.
3277 __ Ret();
3278
3279 __ bind(&miss);
Steve Block44f0eee2011-05-26 01:26:41 +01003280 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003281 __ Jump(ic, RelocInfo::CODE_TARGET);
3282
3283 // Return the generated code.
3284 return GetCode(NORMAL, NULL);
3285}
3286
3287
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003288MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003289 // ----------- S t a t e -------------
3290 // -- r0 : argc
3291 // -- r1 : constructor
3292 // -- lr : return address
3293 // -- [sp] : last argument
3294 // -----------------------------------
3295 Label generic_stub_call;
3296
3297 // Use r7 for holding undefined which is used in several places below.
3298 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3299
3300#ifdef ENABLE_DEBUGGER_SUPPORT
3301 // Check to see whether there are any break points in the function code. If
3302 // there are jump to the generic constructor stub which calls the actual
3303 // code for the function thereby hitting the break points.
3304 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3305 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
3306 __ cmp(r2, r7);
3307 __ b(ne, &generic_stub_call);
3308#endif
3309
3310 // Load the initial map and verify that it is in fact a map.
3311 // r1: constructor function
3312 // r7: undefined
3313 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
3314 __ tst(r2, Operand(kSmiTagMask));
3315 __ b(eq, &generic_stub_call);
3316 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3317 __ b(ne, &generic_stub_call);
3318
3319#ifdef DEBUG
3320 // Cannot construct functions this way.
3321 // r0: argc
3322 // r1: constructor function
3323 // r2: initial map
3324 // r7: undefined
3325 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
3326 __ Check(ne, "Function constructed by construct stub.");
3327#endif
3328
3329 // Now allocate the JSObject in new space.
3330 // r0: argc
3331 // r1: constructor function
3332 // r2: initial map
3333 // r7: undefined
3334 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
3335 __ AllocateInNewSpace(r3,
3336 r4,
3337 r5,
3338 r6,
3339 &generic_stub_call,
Kristian Monsen25f61362010-05-21 11:50:48 +01003340 SIZE_IN_WORDS);
Steve Blocka7e24c12009-10-30 11:49:00 +00003341
3342 // Allocated the JSObject, now initialize the fields. Map is set to initial
3343 // map and properties and elements are set to empty fixed array.
3344 // r0: argc
3345 // r1: constructor function
3346 // r2: initial map
3347 // r3: object size (in words)
3348 // r4: JSObject (not tagged)
3349 // r7: undefined
3350 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3351 __ mov(r5, r4);
3352 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3353 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3354 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3355 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3356 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3357 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3358
3359 // Calculate the location of the first argument. The stack contains only the
3360 // argc arguments.
3361 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
3362
3363 // Fill all the in-object properties with undefined.
3364 // r0: argc
3365 // r1: first argument
3366 // r3: object size (in words)
3367 // r4: JSObject (not tagged)
3368 // r5: First in-object property of JSObject (not tagged)
3369 // r7: undefined
3370 // Fill the initialized properties with a constant value or a passed argument
3371 // depending on the this.x = ...; assignment in the function.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003372 SharedFunctionInfo* shared = function->shared();
Steve Blocka7e24c12009-10-30 11:49:00 +00003373 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3374 if (shared->IsThisPropertyAssignmentArgument(i)) {
3375 Label not_passed, next;
3376 // Check if the argument assigned to the property is actually passed.
3377 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3378 __ cmp(r0, Operand(arg_number));
3379 __ b(le, &not_passed);
3380 // Argument passed - find it on the stack.
3381 __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
3382 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3383 __ b(&next);
3384 __ bind(&not_passed);
3385 // Set the property to undefined.
3386 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3387 __ bind(&next);
3388 } else {
3389 // Set the property to the constant value.
3390 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3391 __ mov(r2, Operand(constant));
3392 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3393 }
3394 }
3395
3396 // Fill the unused in-object property fields with undefined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003397 ASSERT(function->has_initial_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00003398 for (int i = shared->this_property_assignments_count();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003399 i < function->initial_map()->inobject_properties();
Steve Blocka7e24c12009-10-30 11:49:00 +00003400 i++) {
3401 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3402 }
3403
3404 // r0: argc
3405 // r4: JSObject (not tagged)
3406 // Move argc to r1 and the JSObject to return to r0 and tag it.
3407 __ mov(r1, r0);
3408 __ mov(r0, r4);
3409 __ orr(r0, r0, Operand(kHeapObjectTag));
3410
3411 // r0: JSObject
3412 // r1: argc
3413 // Remove caller arguments and receiver from the stack and return.
3414 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3415 __ add(sp, sp, Operand(kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003416 Counters* counters = masm()->isolate()->counters();
3417 __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3418 __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00003419 __ Jump(lr);
3420
3421 // Jump to the generic stub in case the specialized code cannot handle the
3422 // construction.
3423 __ bind(&generic_stub_call);
Steve Block44f0eee2011-05-26 01:26:41 +01003424 Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3425 __ Jump(code, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00003426
3427 // Return the generated code.
3428 return GetCode();
3429}
3430
3431
Steve Block1e0659c2011-05-24 12:43:12 +01003432static bool IsElementTypeSigned(ExternalArrayType array_type) {
3433 switch (array_type) {
3434 case kExternalByteArray:
3435 case kExternalShortArray:
3436 case kExternalIntArray:
3437 return true;
3438
3439 case kExternalUnsignedByteArray:
3440 case kExternalUnsignedShortArray:
3441 case kExternalUnsignedIntArray:
3442 return false;
3443
3444 default:
3445 UNREACHABLE();
3446 return false;
3447 }
3448}
3449
3450
3451MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
Steve Block44f0eee2011-05-26 01:26:41 +01003452 JSObject* receiver_object,
3453 ExternalArrayType array_type,
3454 Code::Flags flags) {
Steve Block1e0659c2011-05-24 12:43:12 +01003455 // ---------- S t a t e --------------
3456 // -- lr : return address
3457 // -- r0 : key
3458 // -- r1 : receiver
3459 // -----------------------------------
3460 Label slow, failed_allocation;
3461
3462 Register key = r0;
3463 Register receiver = r1;
3464
3465 // Check that the object isn't a smi
3466 __ JumpIfSmi(receiver, &slow);
3467
3468 // Check that the key is a smi.
3469 __ JumpIfNotSmi(key, &slow);
3470
Steve Block44f0eee2011-05-26 01:26:41 +01003471 // Make sure that we've got the right map.
3472 __ ldr(r2, FieldMemOperand(receiver, HeapObject::kMapOffset));
3473 __ cmp(r2, Operand(Handle<Map>(receiver_object->map())));
Steve Block1e0659c2011-05-24 12:43:12 +01003474 __ b(ne, &slow);
3475
Steve Block1e0659c2011-05-24 12:43:12 +01003476 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003477 // r3: elements array
Steve Block1e0659c2011-05-24 12:43:12 +01003478
3479 // Check that the index is in range.
3480 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3481 __ cmp(ip, Operand(key, ASR, kSmiTagSize));
3482 // Unsigned comparison catches both negative and too-large values.
3483 __ b(lo, &slow);
3484
Steve Block1e0659c2011-05-24 12:43:12 +01003485 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3486 // r3: base pointer of external storage
3487
3488 // We are not untagging smi key and instead work with it
3489 // as if it was premultiplied by 2.
3490 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3491
3492 Register value = r2;
3493 switch (array_type) {
3494 case kExternalByteArray:
3495 __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3496 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003497 case kExternalPixelArray:
Steve Block1e0659c2011-05-24 12:43:12 +01003498 case kExternalUnsignedByteArray:
3499 __ ldrb(value, MemOperand(r3, key, LSR, 1));
3500 break;
3501 case kExternalShortArray:
3502 __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3503 break;
3504 case kExternalUnsignedShortArray:
3505 __ ldrh(value, MemOperand(r3, key, LSL, 0));
3506 break;
3507 case kExternalIntArray:
3508 case kExternalUnsignedIntArray:
3509 __ ldr(value, MemOperand(r3, key, LSL, 1));
3510 break;
3511 case kExternalFloatArray:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003512 if (CpuFeatures::IsSupported(VFP3)) {
Steve Block1e0659c2011-05-24 12:43:12 +01003513 CpuFeatures::Scope scope(VFP3);
3514 __ add(r2, r3, Operand(key, LSL, 1));
3515 __ vldr(s0, r2, 0);
3516 } else {
3517 __ ldr(value, MemOperand(r3, key, LSL, 1));
3518 }
3519 break;
3520 default:
3521 UNREACHABLE();
3522 break;
3523 }
3524
3525 // For integer array types:
3526 // r2: value
3527 // For floating-point array type
3528 // s0: value (if VFP3 is supported)
3529 // r2: value (if VFP3 is not supported)
3530
3531 if (array_type == kExternalIntArray) {
3532 // For the Int and UnsignedInt array types, we need to see whether
3533 // the value can be represented in a Smi. If not, we need to convert
3534 // it to a HeapNumber.
3535 Label box_int;
3536 __ cmp(value, Operand(0xC0000000));
3537 __ b(mi, &box_int);
3538 // Tag integer as smi and return it.
3539 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3540 __ Ret();
3541
3542 __ bind(&box_int);
3543 // Allocate a HeapNumber for the result and perform int-to-double
3544 // conversion. Don't touch r0 or r1 as they are needed if allocation
3545 // fails.
3546 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3547 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3548 // Now we can use r0 for the result as key is not needed any more.
3549 __ mov(r0, r5);
3550
Ben Murdoch8b112d22011-06-08 16:22:53 +01003551 if (CpuFeatures::IsSupported(VFP3)) {
Steve Block1e0659c2011-05-24 12:43:12 +01003552 CpuFeatures::Scope scope(VFP3);
3553 __ vmov(s0, value);
3554 __ vcvt_f64_s32(d0, s0);
3555 __ sub(r3, r0, Operand(kHeapObjectTag));
3556 __ vstr(d0, r3, HeapNumber::kValueOffset);
3557 __ Ret();
3558 } else {
3559 WriteInt32ToHeapNumberStub stub(value, r0, r3);
3560 __ TailCallStub(&stub);
3561 }
3562 } else if (array_type == kExternalUnsignedIntArray) {
3563 // The test is different for unsigned int values. Since we need
3564 // the value to be in the range of a positive smi, we can't
3565 // handle either of the top two bits being set in the value.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003566 if (CpuFeatures::IsSupported(VFP3)) {
Steve Block1e0659c2011-05-24 12:43:12 +01003567 CpuFeatures::Scope scope(VFP3);
3568 Label box_int, done;
3569 __ tst(value, Operand(0xC0000000));
3570 __ b(ne, &box_int);
3571 // Tag integer as smi and return it.
3572 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3573 __ Ret();
3574
3575 __ bind(&box_int);
3576 __ vmov(s0, value);
3577 // Allocate a HeapNumber for the result and perform int-to-double
3578 // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
3579 // registers - also when jumping due to exhausted young space.
3580 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3581 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3582
3583 __ vcvt_f64_u32(d0, s0);
3584 __ sub(r1, r2, Operand(kHeapObjectTag));
3585 __ vstr(d0, r1, HeapNumber::kValueOffset);
3586
3587 __ mov(r0, r2);
3588 __ Ret();
3589 } else {
3590 // Check whether unsigned integer fits into smi.
3591 Label box_int_0, box_int_1, done;
3592 __ tst(value, Operand(0x80000000));
3593 __ b(ne, &box_int_0);
3594 __ tst(value, Operand(0x40000000));
3595 __ b(ne, &box_int_1);
3596 // Tag integer as smi and return it.
3597 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3598 __ Ret();
3599
3600 Register hiword = value; // r2.
3601 Register loword = r3;
3602
3603 __ bind(&box_int_0);
3604 // Integer does not have leading zeros.
3605 GenerateUInt2Double(masm(), hiword, loword, r4, 0);
3606 __ b(&done);
3607
3608 __ bind(&box_int_1);
3609 // Integer has one leading zero.
3610 GenerateUInt2Double(masm(), hiword, loword, r4, 1);
3611
3612
3613 __ bind(&done);
3614 // Integer was converted to double in registers hiword:loword.
3615 // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
3616 // clobbers all registers - also when jumping due to exhausted young
3617 // space.
3618 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3619 __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
3620
3621 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3622 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3623
3624 __ mov(r0, r4);
3625 __ Ret();
3626 }
3627 } else if (array_type == kExternalFloatArray) {
3628 // For the floating-point array type, we need to always allocate a
3629 // HeapNumber.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003630 if (CpuFeatures::IsSupported(VFP3)) {
Steve Block1e0659c2011-05-24 12:43:12 +01003631 CpuFeatures::Scope scope(VFP3);
3632 // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3633 // AllocateHeapNumber clobbers all registers - also when jumping due to
3634 // exhausted young space.
3635 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3636 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3637 __ vcvt_f64_f32(d0, s0);
3638 __ sub(r1, r2, Operand(kHeapObjectTag));
3639 __ vstr(d0, r1, HeapNumber::kValueOffset);
3640
3641 __ mov(r0, r2);
3642 __ Ret();
3643 } else {
3644 // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3645 // AllocateHeapNumber clobbers all registers - also when jumping due to
3646 // exhausted young space.
3647 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3648 __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
3649 // VFP is not available, do manual single to double conversion.
3650
3651 // r2: floating point value (binary32)
3652 // r3: heap number for result
3653
3654 // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
3655 // the slow case from here.
3656 __ and_(r0, value, Operand(kBinary32MantissaMask));
3657
3658 // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
3659 // the slow case from here.
3660 __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
3661 __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3662
3663 Label exponent_rebiased;
3664 __ teq(r1, Operand(0x00));
3665 __ b(eq, &exponent_rebiased);
3666
3667 __ teq(r1, Operand(0xff));
3668 __ mov(r1, Operand(0x7ff), LeaveCC, eq);
3669 __ b(eq, &exponent_rebiased);
3670
3671 // Rebias exponent.
3672 __ add(r1,
3673 r1,
3674 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3675
3676 __ bind(&exponent_rebiased);
3677 __ and_(r2, value, Operand(kBinary32SignMask));
3678 value = no_reg;
3679 __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
3680
3681 // Shift mantissa.
3682 static const int kMantissaShiftForHiWord =
3683 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3684
3685 static const int kMantissaShiftForLoWord =
3686 kBitsPerInt - kMantissaShiftForHiWord;
3687
3688 __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
3689 __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
3690
3691 __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3692 __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3693
3694 __ mov(r0, r3);
3695 __ Ret();
3696 }
3697
3698 } else {
3699 // Tag integer as smi and return it.
3700 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3701 __ Ret();
3702 }
3703
3704 // Slow case, key and receiver still in r0 and r1.
3705 __ bind(&slow);
Steve Block44f0eee2011-05-26 01:26:41 +01003706 __ IncrementCounter(
3707 masm()->isolate()->counters()->keyed_load_external_array_slow(),
3708 1, r2, r3);
Steve Block1e0659c2011-05-24 12:43:12 +01003709
3710 // ---------- S t a t e --------------
3711 // -- lr : return address
3712 // -- r0 : key
3713 // -- r1 : receiver
3714 // -----------------------------------
3715
3716 __ Push(r1, r0);
3717
3718 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3719
3720 return GetCode(flags);
3721}
3722
3723
3724MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
Steve Block44f0eee2011-05-26 01:26:41 +01003725 JSObject* receiver_object,
3726 ExternalArrayType array_type,
3727 Code::Flags flags) {
Steve Block1e0659c2011-05-24 12:43:12 +01003728 // ---------- S t a t e --------------
3729 // -- r0 : value
3730 // -- r1 : key
3731 // -- r2 : receiver
3732 // -- lr : return address
3733 // -----------------------------------
3734 Label slow, check_heap_number;
3735
3736 // Register usage.
3737 Register value = r0;
3738 Register key = r1;
3739 Register receiver = r2;
3740 // r3 mostly holds the elements array or the destination external array.
3741
3742 // Check that the object isn't a smi.
3743 __ JumpIfSmi(receiver, &slow);
3744
Steve Block44f0eee2011-05-26 01:26:41 +01003745 // Make sure that we've got the right map.
3746 __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
3747 __ cmp(r3, Operand(Handle<Map>(receiver_object->map())));
Steve Block1e0659c2011-05-24 12:43:12 +01003748 __ b(ne, &slow);
3749
Steve Block44f0eee2011-05-26 01:26:41 +01003750 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3751
Steve Block1e0659c2011-05-24 12:43:12 +01003752 // Check that the key is a smi.
3753 __ JumpIfNotSmi(key, &slow);
3754
Steve Block44f0eee2011-05-26 01:26:41 +01003755 // Check that the index is in range
3756 __ SmiUntag(r4, key);
Steve Block1e0659c2011-05-24 12:43:12 +01003757 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3758 __ cmp(r4, ip);
3759 // Unsigned comparison catches both negative and too-large values.
3760 __ b(hs, &slow);
3761
3762 // Handle both smis and HeapNumbers in the fast path. Go to the
3763 // runtime for all other kinds of values.
3764 // r3: external array.
3765 // r4: key (integer).
Steve Block44f0eee2011-05-26 01:26:41 +01003766 if (array_type == kExternalPixelArray) {
3767 // Double to pixel conversion is only implemented in the runtime for now.
3768 __ JumpIfNotSmi(value, &slow);
3769 } else {
3770 __ JumpIfNotSmi(value, &check_heap_number);
3771 }
3772 __ SmiUntag(r5, value);
Steve Block1e0659c2011-05-24 12:43:12 +01003773 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3774
3775 // r3: base pointer of external storage.
3776 // r4: key (integer).
3777 // r5: value (integer).
3778 switch (array_type) {
Steve Block44f0eee2011-05-26 01:26:41 +01003779 case kExternalPixelArray:
3780 // Clamp the value to [0..255].
3781 __ Usat(r5, 8, Operand(r5));
3782 __ strb(r5, MemOperand(r3, r4, LSL, 0));
3783 break;
Steve Block1e0659c2011-05-24 12:43:12 +01003784 case kExternalByteArray:
3785 case kExternalUnsignedByteArray:
3786 __ strb(r5, MemOperand(r3, r4, LSL, 0));
3787 break;
3788 case kExternalShortArray:
3789 case kExternalUnsignedShortArray:
3790 __ strh(r5, MemOperand(r3, r4, LSL, 1));
3791 break;
3792 case kExternalIntArray:
3793 case kExternalUnsignedIntArray:
3794 __ str(r5, MemOperand(r3, r4, LSL, 2));
3795 break;
3796 case kExternalFloatArray:
3797 // Perform int-to-float conversion and store to memory.
3798 StoreIntAsFloat(masm(), r3, r4, r5, r6, r7, r9);
3799 break;
3800 default:
3801 UNREACHABLE();
3802 break;
3803 }
3804
3805 // Entry registers are intact, r0 holds the value which is the return value.
3806 __ Ret();
3807
Steve Block44f0eee2011-05-26 01:26:41 +01003808 if (array_type != kExternalPixelArray) {
3809 // r3: external array.
3810 // r4: index (integer).
3811 __ bind(&check_heap_number);
3812 __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3813 __ b(ne, &slow);
Steve Block1e0659c2011-05-24 12:43:12 +01003814
Steve Block44f0eee2011-05-26 01:26:41 +01003815 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003816
Steve Block44f0eee2011-05-26 01:26:41 +01003817 // r3: base pointer of external storage.
3818 // r4: key (integer).
Steve Block1e0659c2011-05-24 12:43:12 +01003819
Steve Block44f0eee2011-05-26 01:26:41 +01003820 // The WebGL specification leaves the behavior of storing NaN and
3821 // +/-Infinity into integer arrays basically undefined. For more
3822 // reproducible behavior, convert these to zero.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003823 if (CpuFeatures::IsSupported(VFP3)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003824 CpuFeatures::Scope scope(VFP3);
Steve Block1e0659c2011-05-24 12:43:12 +01003825
Steve Block44f0eee2011-05-26 01:26:41 +01003826 if (array_type == kExternalFloatArray) {
3827 // vldr requires offset to be a multiple of 4 so we can not
3828 // include -kHeapObjectTag into it.
3829 __ sub(r5, r0, Operand(kHeapObjectTag));
3830 __ vldr(d0, r5, HeapNumber::kValueOffset);
3831 __ add(r5, r3, Operand(r4, LSL, 2));
3832 __ vcvt_f32_f64(s0, d0);
3833 __ vstr(s0, r5, 0);
Steve Block1e0659c2011-05-24 12:43:12 +01003834 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003835 // Need to perform float-to-int conversion.
3836 // Test for NaN or infinity (both give zero).
3837 __ ldr(r6, FieldMemOperand(value, HeapNumber::kExponentOffset));
3838
3839 // Hoisted load. vldr requires offset to be a multiple of 4 so we can
3840 // not include -kHeapObjectTag into it.
3841 __ sub(r5, value, Operand(kHeapObjectTag));
3842 __ vldr(d0, r5, HeapNumber::kValueOffset);
3843
3844 __ Sbfx(r6, r6, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
3845 // NaNs and Infinities have all-one exponents so they sign extend to -1.
3846 __ cmp(r6, Operand(-1));
3847 __ mov(r5, Operand(0), LeaveCC, eq);
3848
3849 // Not infinity or NaN simply convert to int.
3850 if (IsElementTypeSigned(array_type)) {
3851 __ vcvt_s32_f64(s0, d0, kDefaultRoundToZero, ne);
3852 } else {
3853 __ vcvt_u32_f64(s0, d0, kDefaultRoundToZero, ne);
3854 }
3855 __ vmov(r5, s0, ne);
3856
3857 switch (array_type) {
3858 case kExternalByteArray:
3859 case kExternalUnsignedByteArray:
3860 __ strb(r5, MemOperand(r3, r4, LSL, 0));
3861 break;
3862 case kExternalShortArray:
3863 case kExternalUnsignedShortArray:
3864 __ strh(r5, MemOperand(r3, r4, LSL, 1));
3865 break;
3866 case kExternalIntArray:
3867 case kExternalUnsignedIntArray:
3868 __ str(r5, MemOperand(r3, r4, LSL, 2));
3869 break;
3870 default:
3871 UNREACHABLE();
3872 break;
3873 }
Steve Block1e0659c2011-05-24 12:43:12 +01003874 }
Steve Block1e0659c2011-05-24 12:43:12 +01003875
Steve Block1e0659c2011-05-24 12:43:12 +01003876 // Entry registers are intact, r0 holds the value which is the return
3877 // value.
3878 __ Ret();
Steve Block1e0659c2011-05-24 12:43:12 +01003879 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003880 // VFP3 is not available do manual conversions.
3881 __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3882 __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003883
Steve Block44f0eee2011-05-26 01:26:41 +01003884 if (array_type == kExternalFloatArray) {
3885 Label done, nan_or_infinity_or_zero;
3886 static const int kMantissaInHiWordShift =
3887 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
Steve Block1e0659c2011-05-24 12:43:12 +01003888
Steve Block44f0eee2011-05-26 01:26:41 +01003889 static const int kMantissaInLoWordShift =
3890 kBitsPerInt - kMantissaInHiWordShift;
Steve Block1e0659c2011-05-24 12:43:12 +01003891
Steve Block44f0eee2011-05-26 01:26:41 +01003892 // Test for all special exponent values: zeros, subnormal numbers, NaNs
3893 // and infinities. All these should be converted to 0.
3894 __ mov(r7, Operand(HeapNumber::kExponentMask));
3895 __ and_(r9, r5, Operand(r7), SetCC);
3896 __ b(eq, &nan_or_infinity_or_zero);
Steve Block1e0659c2011-05-24 12:43:12 +01003897
Steve Block44f0eee2011-05-26 01:26:41 +01003898 __ teq(r9, Operand(r7));
3899 __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
3900 __ b(eq, &nan_or_infinity_or_zero);
Steve Block1e0659c2011-05-24 12:43:12 +01003901
Steve Block44f0eee2011-05-26 01:26:41 +01003902 // Rebias exponent.
3903 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3904 __ add(r9,
3905 r9,
3906 Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
Steve Block1e0659c2011-05-24 12:43:12 +01003907
Steve Block44f0eee2011-05-26 01:26:41 +01003908 __ cmp(r9, Operand(kBinary32MaxExponent));
3909 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
3910 __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
3911 __ b(gt, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01003912
Steve Block44f0eee2011-05-26 01:26:41 +01003913 __ cmp(r9, Operand(kBinary32MinExponent));
3914 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
3915 __ b(lt, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01003916
Steve Block44f0eee2011-05-26 01:26:41 +01003917 __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3918 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3919 __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
3920 __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
3921 __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
Steve Block1e0659c2011-05-24 12:43:12 +01003922
Steve Block44f0eee2011-05-26 01:26:41 +01003923 __ bind(&done);
3924 __ str(r5, MemOperand(r3, r4, LSL, 2));
3925 // Entry registers are intact, r0 holds the value which is the return
3926 // value.
3927 __ Ret();
Steve Block1e0659c2011-05-24 12:43:12 +01003928
Steve Block44f0eee2011-05-26 01:26:41 +01003929 __ bind(&nan_or_infinity_or_zero);
3930 __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3931 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3932 __ orr(r9, r9, r7);
3933 __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
3934 __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
3935 __ b(&done);
3936 } else {
3937 bool is_signed_type = IsElementTypeSigned(array_type);
3938 int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
3939 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
3940
3941 Label done, sign;
3942
3943 // Test for all special exponent values: zeros, subnormal numbers, NaNs
3944 // and infinities. All these should be converted to 0.
3945 __ mov(r7, Operand(HeapNumber::kExponentMask));
3946 __ and_(r9, r5, Operand(r7), SetCC);
3947 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3948 __ b(eq, &done);
3949
3950 __ teq(r9, Operand(r7));
3951 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
3952 __ b(eq, &done);
3953
3954 // Unbias exponent.
3955 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3956 __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
3957 // If exponent is negative then result is 0.
3958 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
3959 __ b(mi, &done);
3960
3961 // If exponent is too big then result is minimal value.
3962 __ cmp(r9, Operand(meaningfull_bits - 1));
3963 __ mov(r5, Operand(min_value), LeaveCC, ge);
3964 __ b(ge, &done);
3965
3966 __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
3967 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3968 __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
3969
3970 __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
3971 __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
3972 __ b(pl, &sign);
3973
3974 __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
3975 __ mov(r5, Operand(r5, LSL, r9));
3976 __ rsb(r9, r9, Operand(meaningfull_bits));
3977 __ orr(r5, r5, Operand(r6, LSR, r9));
3978
3979 __ bind(&sign);
3980 __ teq(r7, Operand(0, RelocInfo::NONE));
3981 __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
3982
3983 __ bind(&done);
3984 switch (array_type) {
3985 case kExternalByteArray:
3986 case kExternalUnsignedByteArray:
3987 __ strb(r5, MemOperand(r3, r4, LSL, 0));
3988 break;
3989 case kExternalShortArray:
3990 case kExternalUnsignedShortArray:
3991 __ strh(r5, MemOperand(r3, r4, LSL, 1));
3992 break;
3993 case kExternalIntArray:
3994 case kExternalUnsignedIntArray:
3995 __ str(r5, MemOperand(r3, r4, LSL, 2));
3996 break;
3997 default:
3998 UNREACHABLE();
3999 break;
4000 }
Steve Block1e0659c2011-05-24 12:43:12 +01004001 }
4002 }
4003 }
4004
4005 // Slow case: call runtime.
4006 __ bind(&slow);
4007
4008 // Entry registers are intact.
4009 // ---------- S t a t e --------------
4010 // -- r0 : value
4011 // -- r1 : key
4012 // -- r2 : receiver
4013 // -- lr : return address
4014 // -----------------------------------
4015
4016 // Push receiver, key and value for runtime call.
4017 __ Push(r2, r1, r0);
4018
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004019 __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
4020 __ mov(r0, Operand(Smi::FromInt(
4021 Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
4022 __ Push(r1, r0);
4023
4024 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
Steve Block1e0659c2011-05-24 12:43:12 +01004025
4026 return GetCode(flags);
4027}
4028
4029
Steve Blocka7e24c12009-10-30 11:49:00 +00004030#undef __
4031
4032} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01004033
4034#endif // V8_TARGET_ARCH_ARM